Esempio n. 1
0
func GetArticleWithSentencesAndMeter(uuid string, meter string, syllabi *rhyme.Syllabi) *ArticleWithSentencesAndMeter {
	aws := getArticleWithSentences(uuid)
	rams := FindRhymeAndMetersInSentences(aws.Sentences, meter, syllabi)

	// sort.Sort(rhyme.RhymeAndMeters(*rams))

	awsam := ArticleWithSentencesAndMeter{
		aws,
		meter,
		rams,
		syllabi.KnownUnknowns(),
	}

	return &awsam
}
Esempio n. 2
0
func FindRhymeAndMetersInSentences(sentences *[]string, meter string, syllabi *rhyme.Syllabi) *[]*rhyme.RhymeAndMeter {
	rams := []*rhyme.RhymeAndMeter{}

	if meter == "" {
		meter = rhyme.DefaultMeter
	}

	emphasisRegexp, emphasisRegexpSecondary := rhyme.ConvertToEmphasisPointsStringRegexp(meter)

	for _, s := range *(sentences) {
		syllabiRams := syllabi.RhymeAndMetersOfPhrase(s, emphasisRegexp, emphasisRegexpSecondary)

		for _, ram := range *syllabiRams {
			if ram.EmphasisRegexpMatch2 != "" {
				rams = append(rams, ram)
			}
		}
	}

	return &rams
}
func GetDetails(syllabi *rhyme.Syllabi, ontologyName string, ontologyValue string, meter string, maxArticles int, maxMillis int) (*Details, bool) {

	if maxArticles < 1 {
		maxArticles = 1
	} else if maxArticles > maxMaxArticles {
		maxArticles = maxMaxArticles
	}

	articles, matchedPhrasesWithUrl := article.GetArticlesByOntologyWithSentencesAndMeter(ontologyName, ontologyValue, meter, syllabi, maxArticles, maxMillis)

	finalSyllablesMap := &map[string][]*(article.MatchedPhraseWithUrl){}
	badFinalSyllablesMap := &map[string][]*(article.MatchedPhraseWithUrl){}
	secondaryMatchedPhrasesWithUrl := []*(article.MatchedPhraseWithUrl){}
	badSecondaryMatchedPhrasesWithUrl := []*(article.MatchedPhraseWithUrl){}

	secondaryMatchedPhrasesWithUrlByUrl := map[string]*[]*(article.MatchedPhraseWithUrl){}

	for _, mpwu := range *matchedPhrasesWithUrl {

		if mpwu.MatchesOnMeter.SecondaryMatch != nil {
			// fwwiem := mpwu.MatchesOnMeter.SecondaryMatch.FinalWordWordInEachMatch

			isBadEnd := false
			for _, w := range *(mpwu.MatchesOnMeter.SecondaryMatch.FinalWordWordInEachMatch) {
				if w.IsBadEnd {
					isBadEnd = true
					break
				}
			}
			// isBadEnd := (*fwwiem)[len(*fwwiem)-1].IsBadEnd

			if isBadEnd {
				badSecondaryMatchedPhrasesWithUrl = append(badSecondaryMatchedPhrasesWithUrl, mpwu)
			} else {
				secondaryMatchedPhrasesWithUrl = append(secondaryMatchedPhrasesWithUrl, mpwu)
				url := *mpwu.Url
				if _, ok := secondaryMatchedPhrasesWithUrlByUrl[url]; !ok {
					secondaryMatchedPhrasesWithUrlByUrl[url] = &[]*(article.MatchedPhraseWithUrl){}
				}

				listOfSMPWU := *(secondaryMatchedPhrasesWithUrlByUrl[url])
				listOfSMPWU = append(listOfSMPWU, mpwu)
				secondaryMatchedPhrasesWithUrlByUrl[url] = &listOfSMPWU
			}
		}

		fsAZ := mpwu.MatchesOnMeter.FinalDuringSyllableAZ
		isBadEnd := (mpwu.MatchesOnMeter.FinalDuringWordWord == nil) || mpwu.MatchesOnMeter.FinalDuringWordWord.IsBadEnd

		fsMap := finalSyllablesMap
		if isBadEnd {
			fsMap = badFinalSyllablesMap
		}

		if _, ok := (*fsMap)[fsAZ]; !ok {
			(*fsMap)[fsAZ] = []*(article.MatchedPhraseWithUrl){}
		}

		(*fsMap)[fsAZ] = append((*fsMap)[fsAZ], mpwu)
	}

	processFSMapIntoSortedMPWUs := func(fsMap *map[string][]*(article.MatchedPhraseWithUrl)) *[]*MatchedPhraseWithUrlWithFirst {
		fsCounts := []*FSandCount{}

		for fs, list := range *fsMap {
			fsCounts = append(fsCounts, &FSandCount{fs, len(list)})
		}

		sort.Sort(FSandCounts(fsCounts))

		sortedMpwus := []*MatchedPhraseWithUrlWithFirst{}

		for _, fsc := range fsCounts {
			fsList := (*fsMap)[fsc.FinalSyllable]
			for i, mpwu := range fsList {
				isFirst := (i == 0)
				mpwuf := &MatchedPhraseWithUrlWithFirst{
					mpwu,
					isFirst,
				}
				sortedMpwus = append(sortedMpwus, mpwuf)
			}
		}
		return &sortedMpwus
	}

	sortedMpwus := processFSMapIntoSortedMPWUs(finalSyllablesMap)
	sortedBadMpwus := processFSMapIntoSortedMPWUs(badFinalSyllablesMap)

	listOfArticleAndMPWUs := []*(ArticleAndMPWUs){}

	if len(secondaryMatchedPhrasesWithUrlByUrl) > 0 {
		for _, article := range *articles {
			url := article.SiteUrl
			if mpwus, ok := secondaryMatchedPhrasesWithUrlByUrl[url]; ok {
				articleAndMPWUs := ArticleAndMPWUs{
					Article: article,
					MPWUs:   mpwus,
				}
				listOfArticleAndMPWUs = append(listOfArticleAndMPWUs, &articleAndMPWUs)
			}
		}
	}

	details := Details{
		OntologyName:                      ontologyName,
		OntologyValue:                     ontologyValue,
		Meter:                             meter,
		Articles:                          articles,
		MatchedPhrasesWithUrl:             sortedMpwus,
		BadMatchedPhrasesWithUrl:          sortedBadMpwus,
		KnownUnknowns:                     syllabi.KnownUnknowns(),
		MaxArticles:                       maxArticles,
		NumArticles:                       len(*articles),
		SecondaryMatchedPhrasesWithUrl:    &secondaryMatchedPhrasesWithUrl,
		BadSecondaryMatchedPhrasesWithUrl: &badSecondaryMatchedPhrasesWithUrl,
		MaxMillis:                         maxMillis,
		SecondaryMatchedPhrasesWithUrlArticlesAndMPWUs: &listOfArticleAndMPWUs,
	}

	containsHaikus := (len(secondaryMatchedPhrasesWithUrl) > 0)

	return &details, containsHaikus
}