func ExampleCompose() { nl, _ := language.ParseBase("nl") us, _ := language.ParseRegion("US") de := language.Make("de-1901-u-co-phonebk") jp := language.Make("ja-JP") fi := language.Make("fi-x-ing") u, _ := language.ParseExtension("u-nu-arabic") x, _ := language.ParseExtension("x-piglatin") // Combine a base language and region. fmt.Println(language.Compose(nl, us)) // Combine a base language and extension. fmt.Println(language.Compose(nl, x)) // Replace the region. fmt.Println(language.Compose(jp, us)) // Combine several tags. fmt.Println(language.Compose(us, nl, u)) // Replace the base language of a tag. fmt.Println(language.Compose(de, nl)) fmt.Println(language.Compose(de, nl, u)) // Remove the base language. fmt.Println(language.Compose(de, language.Base{})) // Remove all variants. fmt.Println(language.Compose(de, []language.Variant{})) // Remove all extensions. fmt.Println(language.Compose(de, []language.Extension{})) fmt.Println(language.Compose(fi, []language.Extension{})) // Remove all variants and extensions. fmt.Println(language.Compose(de.Raw())) // An error is gobbled or returned if non-nil. fmt.Println(language.Compose(language.ParseRegion("ZA"))) fmt.Println(language.Compose(language.ParseRegion("HH"))) // Compose uses the same Default canonicalization as Make. fmt.Println(language.Compose(language.Raw.Parse("en-Latn-UK"))) // Call compose on a different CanonType for different results. fmt.Println(language.All.Compose(language.Raw.Parse("en-Latn-UK"))) // Output: // nl-US <nil> // nl-x-piglatin <nil> // ja-US <nil> // nl-US-u-nu-arabic <nil> // nl-1901-u-co-phonebk <nil> // nl-1901-u-nu-arabic <nil> // und-1901-u-co-phonebk <nil> // de-u-co-phonebk <nil> // de-1901 <nil> // fi <nil> // de <nil> // und-ZA <nil> // und language: subtag "HH" is well-formed but unknown // en-Latn-GB <nil> // en-GB <nil> }
func ExampleTag_Region() { ru := language.Make("ru") en := language.Make("en") fmt.Println(ru.Region()) fmt.Println(en.Region()) // Output: // RU Low // US Low }
func ExampleTag_Base() { fmt.Println(language.Make("und").Base()) fmt.Println(language.Make("und-US").Base()) fmt.Println(language.Make("und-NL").Base()) fmt.Println(language.Make("und-419").Base()) // Latin America fmt.Println(language.Make("und-ZZ").Base()) // Output: // en Low // en High // nl High // es Low // en Low }
func ExampleTags() { n := display.Tags(language.English) fmt.Println(n.Name(language.Make("nl"))) fmt.Println(n.Name(language.Make("nl-BE"))) fmt.Println(n.Name(language.Make("nl-CW"))) fmt.Println(n.Name(language.Make("nl-Arab"))) fmt.Println(n.Name(language.Make("nl-Cyrl-RU"))) // Output: // Dutch // Flemish // Dutch (Curaçao) // Dutch (Arabic) // Dutch (Cyrillic, Russia) }
//Load load locales from filesystem func (p *I18n) Load(dir string) error { if err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { p.Logger.Debugf("Find locale file %s", path) if err != nil { return err } if info.Mode().IsRegular() { ss := strings.Split(info.Name(), ".") if len(ss) != 3 { return fmt.Errorf("Ingnore locale file %s", info.Name()) } code := ss[0] lang := language.Make(ss[1]) fd, err := os.Open(path) if err != nil { return err } defer fd.Close() sc := bufio.NewScanner(fd) for sc.Scan() { line := sc.Text() idx := strings.Index(line, "=") if idx <= 0 || line[0] == '#' { continue } p.set(&lang, strings.TrimSpace(code+"."+line[0:idx]), strings.TrimSpace(line[idx+1:len(line)])) } } return nil }); err != nil { return err } for lang := range p.Locales { lng := language.Make(lang) ks, err := p.Provider.Keys(&lng) if err != nil { return err } for _, k := range ks { p.Locales[lang][k] = p.Provider.Get(&lng, k) } p.Logger.Debugf("Find locale %s, %d items.", lang, len(p.Locales[lang])) } return nil }
func ExampleParent() { p := func(tag string) { fmt.Printf("parent(%v): %v\n", tag, language.Make(tag).Parent()) } p("zh-CN") // Australian English inherits from World English. p("en-AU") // If the tag has a different maximized script from its parent, a tag with // this maximized script is inserted. This allows different language tags // which have the same base language and script in common to inherit from // a common set of settings. p("zh-HK") // If the maximized script of the parent is not identical, CLDR will skip // inheriting from it, as it means there will not be many entries in common // and inheriting from it is nonsensical. p("zh-Hant") // The parent of a tag with variants and extensions is the tag with all // variants and extensions removed. p("de-1994-u-co-phonebk") // Remove default script. p("de-Latn-LU") // Output: // parent(zh-CN): zh // parent(en-AU): en-001 // parent(zh-HK): zh-Hant // parent(zh-Hant): und // parent(de-1994-u-co-phonebk): de // parent(de-Latn-LU): de }
// parseMain parses XML files in the main directory of the CLDR core.zip file. func parseMain() { d := &cldr.Decoder{} d.SetDirFilter("main") d.SetSectionFilter("characters") data := decodeCLDR(d) for _, loc := range data.Locales() { x := data.RawLDML(loc) if skipLang(x.Identity.Language.Type) { continue } if x.Characters != nil { x, _ = data.LDML(loc) loc = language.Make(loc).String() for _, ec := range x.Characters.ExemplarCharacters { if ec.Draft != "" { continue } if _, ok := localeChars[loc]; !ok { mainLocales = append(mainLocales, loc) localeChars[loc] = make(charSets) } localeChars[loc][ec.Type] = parseCharacters(ec.Data()) } } } }
func ExampleCanonType() { p := func(id string) { fmt.Printf("Default(%s) -> %s\n", id, language.Make(id)) fmt.Printf("BCP47(%s) -> %s\n", id, language.BCP47.Make(id)) fmt.Printf("Macro(%s) -> %s\n", id, language.Macro.Make(id)) fmt.Printf("All(%s) -> %s\n", id, language.All.Make(id)) } p("en-Latn") p("sh") p("zh-cmn") p("bjd") p("iw-Latn-fonipa-u-cu-usd") // Output: // Default(en-Latn) -> en-Latn // BCP47(en-Latn) -> en // Macro(en-Latn) -> en-Latn // All(en-Latn) -> en // Default(sh) -> sr-Latn // BCP47(sh) -> sh // Macro(sh) -> sh // All(sh) -> sr-Latn // Default(zh-cmn) -> cmn // BCP47(zh-cmn) -> cmn // Macro(zh-cmn) -> zh // All(zh-cmn) -> zh // Default(bjd) -> drl // BCP47(bjd) -> drl // Macro(bjd) -> bjd // All(bjd) -> drl // Default(iw-Latn-fonipa-u-cu-usd) -> he-Latn-fonipa-u-cu-usd // BCP47(iw-Latn-fonipa-u-cu-usd) -> he-Latn-fonipa-u-cu-usd // Macro(iw-Latn-fonipa-u-cu-usd) -> iw-Latn-fonipa-u-cu-usd // All(iw-Latn-fonipa-u-cu-usd) -> he-Latn-fonipa-u-cu-usd }
func (stop *StopWords) stopWordsCount(lang string, text string) wordStats { if text == "" { return wordStats{} } ws := wordStats{} stopWords := set.New() text = strings.ToLower(text) tokenizer := NewMultilangTokenizer(language.Make(lang)) items := tokenizer.Tokenize(text) stops := stop.cachedStopWords[lang] if stops != nil { for _, item := range items { if stops.Has(item) { stopWords.Add(item) } } } ws.stopWordCount = stopWords.Size() ws.wordCount = len(items) ws.stopWords = stopWords return ws }
func TestTokenizer(t *testing.T) { japanese := language.Make("en") tokenizer := NewMultilangTokenizer(japanese) tokens := tokenizer.Tokenize("Language and Locale Matching in Go") if !reflect.DeepEqual(tokens, []string{"Language", "and", "Locale", "Matching", "in", "Go"}) { t.Fatalf("cannot tokenize english string. tokens %v", tokens) } }
func TestJapaneseTokenizer(t *testing.T) { japanese := language.Make("ja") tokenizer := NewMultilangTokenizer(japanese) tokens := tokenizer.Tokenize("すもももももももものうち") if !reflect.DeepEqual(tokens, []string{"すもも", "も", "もも", "も", "もも", "の", "うち"}) { t.Fatalf("cannot tokenize japanese string. tokens %v", tokens) } }
func ExampleTag_Script() { en := language.Make("en") sr := language.Make("sr") sr_Latn := language.Make("sr_Latn") fmt.Println(en.Script()) fmt.Println(sr.Script()) // Was a script explicitly specified? _, c := sr.Script() fmt.Println(c == language.Exact) _, c = sr_Latn.Script() fmt.Println(c == language.Exact) // Output: // Latn High // Cyrl Low // false // true }
// Supported returns the list of languages for which collating differs from its parent. func Supported() []language.Tag { ids := strings.Split(availableLocales, ",") tags := make([]language.Tag, len(ids)) for i, s := range ids { tags[i] = language.Make(s) } return tags }
func checkLang(srcStr, targStr string) tree.Bool { srcLang := language.Make(srcStr) srcRegion, srcRegionConf := srcLang.Region() targLang := language.Make(targStr) targRegion, targRegionConf := targLang.Region() if srcRegionConf == language.Exact && targRegionConf != language.Exact { return tree.Bool(false) } if srcRegion != targRegion && srcRegionConf == language.Exact && targRegionConf == language.Exact { return tree.Bool(false) } _, _, conf := language.NewMatcher([]language.Tag{srcLang}).Match(targLang) return tree.Bool(conf >= language.High) }
// getCollator returns a collate package Collator pointer. This can result in a // panic, so this function must recover from that if it happens. func getCollator(locale string) *collate.Collator { defer func() { recover() }() tag := language.Make(locale) if tag == language.Und { return nil } return collate.New(tag) }
// ExampleDictionary shows how to reduce the amount of data linked into your // binary by only using the predefined Dictionary variables of the languages you // wish to support. func ExampleDictionary() { tags := []language.Tag{ language.English, language.German, language.Japanese, language.Russian, } dicts := []*display.Dictionary{ display.English, display.German, display.Japanese, display.Russian, } m := language.NewMatcher(tags) getDict := func(t language.Tag) *display.Dictionary { _, i, confidence := m.Match(t) // Skip this check if you want to support a fall-back language, which // will be the first one passed to NewMatcher. if confidence == language.No { return nil } return dicts[i] } // The matcher will match Swiss German to German. n := getDict(language.Make("gsw")).Languages() fmt.Println(n.Name(language.German)) fmt.Println(n.Name(language.Make("de-CH"))) fmt.Println(n.Name(language.Make("gsw"))) // Output: // Deutsch // Schweizer Hochdeutsch // Schweizerdeutsch }
func ExampleTag_ComprehensibleTo() { // Various levels of comprehensibility. fmt.Println(language.English.ComprehensibleTo(language.English)) fmt.Println(language.BritishEnglish.ComprehensibleTo(language.AmericanEnglish)) // An explicit Und results in no match. fmt.Println(language.Und.ComprehensibleTo(language.English)) fmt.Println("----") // There is usually no mutual comprehensibility between different scripts. fmt.Println(language.English.ComprehensibleTo(language.Make("en-Dsrt"))) // One exception is for Traditional versus Simplified Chinese, albeit with // a low confidence. fmt.Println(language.SimplifiedChinese.ComprehensibleTo(language.TraditionalChinese)) fmt.Println("----") // A Swiss German speaker will often understand High German. fmt.Println(language.Make("de").ComprehensibleTo(language.Make("gsw"))) // The converse is not generally the case. fmt.Println(language.Make("gsw").ComprehensibleTo(language.Make("de"))) // Output: // Exact // High // No // ---- // No // Low // ---- // High // No }
// parseCollation parses XML files in the collation directory of the CLDR core.zip file. func parseCollation(b *build.Builder) { d := &cldr.Decoder{} d.SetDirFilter("collation") data := decodeCLDR(d) for _, loc := range data.Locales() { x, err := data.LDML(loc) failOnError(err) if skipLang(x.Identity.Language.Type) { continue } cs := x.Collations.Collation sl := cldr.MakeSlice(&cs) if len(types.s) == 0 { sl.SelectAnyOf("type", x.Collations.Default()) } else if !types.all { sl.SelectAnyOf("type", types.s...) } sl.SelectOnePerGroup("alt", altInclude()) for _, c := range cs { id, err := language.Parse(loc) if err != nil { if loc == "en-US-posix" { fmt.Fprintf(os.Stderr, "invalid locale: %q", err.Error()) continue } id = language.Make("en-US-u-va-posix") } // Support both old- and new-style defaults. d := c.Type if x.Collations.DefaultCollation == nil { d = x.Collations.Default() } else { d = x.Collations.DefaultCollation.Data() } // We assume tables are being built either for search or collation, // but not both. For search the default is always "search". if d != c.Type && c.Type != "search" { id, err = id.SetTypeForKey("co", c.Type) failOnError(err) } t := b.Tailoring(id) c.Process(processor{t}) } } }
func ExampleParseAcceptLanguage() { // Tags are reordered based on their q rating. A missing q value means 1.0. fmt.Println(language.ParseAcceptLanguage(" nn;q=0.3, en-gb;q=0.8, en,")) m := language.NewMatcher([]language.Tag{language.Norwegian, language.Make("en-AU")}) t, _, _ := language.ParseAcceptLanguage("da, en-gb;q=0.8, en;q=0.7") fmt.Println(m.Match(t...)) // Danish is pretty close to Norwegian. t, _, _ = language.ParseAcceptLanguage(" da, nl") fmt.Println(m.Match(t...)) // Output: // [en en-GB nn] [1 0.8 0.3] <nil> // en-AU 1 High // no 0 High }
//Read all .ini files in dir, where the filenames are BCP 47 tags //Use the language matcher to get the best match for the locale preference func InitI18n(locale, dir string) { pref := language.Make(locale) // falls back to en-US on parse error files, err := ioutil.ReadDir(dir) if err != nil { log.Fatal(err) } serverLangs := make([]language.Tag, 1) serverLangs[0] = language.AmericanEnglish // en-US fallback for _, file := range files { if filepath.Ext(file.Name()) == ".ini" { name := strings.TrimSuffix(file.Name(), ".ini") tag, err := language.Parse(name) if err == nil { serverLangs = append(serverLangs, tag) } } } matcher := language.NewMatcher(serverLangs) tag, _, _ := matcher.Match(pref) fname := filepath.Join(dir, tag.String()+".ini") conf, err := configparser.Read(fname) if err != nil { log.Fatal("cannot read translation file for", tag.String(), err) } formats, err := conf.Section("formats") if err != nil { log.Fatal("Cannot read formats sections in translations for", tag.String(), err) } translations, err := conf.Section("strings") if err != nil { log.Fatal("Cannot read strings sections in translations for", tag.String(), err) } i18nProvider = &i18n{ translation_dir: dir, formats: formats.Options(), translations: translations.Options(), locale: tag, } }
func (g *phraseGenerator) init(id string) { ec := exemplarCharacters loc := language.Make(id).String() // get sets for locale or parent locale if the set is not defined. for i := range g.sets { for p, ok := loc, true; ok; p, ok = parent(p) { if set, ok := ec[p]; ok && set[i] != "" { g.sets[i].set = strings.Split(set[i], " ") break } } } r := newRewriter() r.addCases = *cases for i := range g.sets { g.sets[i].set = r.rewrite(g.sets[i].set) } // compute indexes for i, set := range g.sets { g.n += len(set.set) g.sets[i].charIndex = g.n } }
//Match get language.Tag from string func Match(lng string) language.Tag { tag, _, _ := matcher.Match(language.Make(lng)) return tag }
func newGoCollator(loc string) (Collator, error) { c := &goCollator{c: collate.New(language.Make(loc))} return c, nil }
package main import ( "C" "golang.org/x/text/language" "log" ) var ( mather = language.NewMatcher([]language.Tag{language.Make("en"), language.Make("ja"), language.Make("zh-TW"), language.Make("zh-CN")}) ) //export preferredLanguageFrom func preferredLanguageFrom(httpAcceptLanguage *string) *string { tag, _, _ := language.ParseAcceptLanguage(*httpAcceptLanguage) t, _, _ := mather.Match(tag...) l := t.String() return &l } //export preferredLanguageFromUseCString func preferredLanguageFromUseCString(cHttpAcceptLanguage *C.char) *C.char { httpAcceptLanguage := C.GoString(cHttpAcceptLanguage) tag, _, _ := language.ParseAcceptLanguage(httpAcceptLanguage) t, _, _ := mather.Match(tag...) return C.CString(t.String()) } func init() {
// Clean removes useless spaces and stop words from a byte slice. // BCP 47 or ISO 639-1 language code (if unknown, we'll apply english filters). // If cleanHTML is TRUE, remove HTML tags from content and unescape HTML entities. func Clean(content []byte, langCode string, cleanHTML bool) []byte { //Remove HTML tags if cleanHTML { content = remTags.ReplaceAll(content, []byte(" ")) content = []byte(html.UnescapeString(string(content))) } //Parse language tag := language.Make(langCode) base, _ := tag.Base() langCode = base.String() //Remove stop words by using a list of most frequent words switch langCode { case "ar": content = removeStopWords(content, arabic) case "bg": content = removeStopWords(content, bulgarian) case "cs": content = removeStopWords(content, czech) case "da": content = removeStopWords(content, danish) case "de": content = removeStopWords(content, german) case "el": content = removeStopWords(content, greek) case "en": content = removeStopWords(content, english) case "es": content = removeStopWords(content, spanish) case "fa": content = removeStopWords(content, persian) case "fr": content = removeStopWords(content, french) case "fi": content = removeStopWords(content, finnish) case "hu": content = removeStopWords(content, hungarian) case "it": content = removeStopWords(content, italian) case "ja": content = removeStopWords(content, japanese) case "lv": content = removeStopWords(content, latvian) case "nl": content = removeStopWords(content, dutch) case "no": content = removeStopWords(content, norwegian) case "pl": content = removeStopWords(content, polish) case "pt": content = removeStopWords(content, portuguese) case "ro": content = removeStopWords(content, romanian) case "ru": content = removeStopWords(content, russian) case "sk": content = removeStopWords(content, slovak) case "sv": content = removeStopWords(content, swedish) case "th": content = removeStopWords(content, thai) case "tr": content = removeStopWords(content, turkish) } //Remove duplicated space characters content = oneSpace.ReplaceAll(content, []byte(" ")) return content }
func main() { gen.Init() // Read the CLDR zip file. r := gen.OpenCLDRCoreZip() defer r.Close() d := &cldr.Decoder{} data, err := d.DecodeZip(r) if err != nil { log.Fatalf("DecodeZip: %v", err) } w := gen.NewCodeWriter() defer func() { buf := &bytes.Buffer{} if _, err = w.WriteGo(buf, "language"); err != nil { log.Fatalf("Error formatting file index.go: %v", err) } // Since we're generating a table for our own package we need to rewrite // doing the equivalent of go fmt -r 'language.b -> b'. Using // bytes.Replace will do. out := bytes.Replace(buf.Bytes(), []byte("language."), nil, -1) if err := ioutil.WriteFile("index.go", out, 0600); err != nil { log.Fatalf("Could not create file index.go: %v", err) } }() m := map[language.Tag]bool{} for _, lang := range data.Locales() { if x := data.RawLDML(lang); false || x.LocaleDisplayNames != nil || x.Characters != nil || x.Delimiters != nil || x.Measurement != nil || x.Dates != nil || x.Numbers != nil || x.Units != nil || x.ListPatterns != nil || x.Collations != nil || x.Segmentations != nil || x.Rbnf != nil || x.Annotations != nil || x.Metadata != nil { // TODO: support POSIX natively, albeit non-standard. tag := language.Make(strings.Replace(lang, "_POSIX", "-u-va-posix", 1)) m[tag] = true } } var core, special []language.Tag for t := range m { if x := t.Extensions(); len(x) != 0 && fmt.Sprint(x) != "[u-va-posix]" { log.Fatalf("Unexpected extension %v in %v", x, t) } if len(t.Variants()) == 0 && len(t.Extensions()) == 0 { core = append(core, t) } else { special = append(special, t) } } w.WriteComment(` NumCompactTags is the number of common tags. The maximum tag is NumCompactTags-1.`) w.WriteConst("NumCompactTags", len(core)+len(special)) sort.Sort(byAlpha(special)) w.WriteVar("specialTags", special) type coreKey struct { base language.Base script language.Script region language.Region } w.WriteType(coreKey{}) // TODO: order by frequency? sort.Sort(byAlpha(core)) // Size computations are just an estimate. w.Size += int(reflect.TypeOf(map[coreKey]uint16{}).Size()) w.Size += len(core) * int(reflect.TypeOf(coreKey{}).Size()+2) // 2 is for uint16 fmt.Fprintln(w, "var coreTags = map[coreKey]uint16{") fmt.Fprintln(w, "coreKey{}: 0, // und") i := len(special) + 1 // Und and special tags already written. for _, t := range core { if t == language.Und { continue } fmt.Fprint(w.Hash, t, i) b, s, r := t.Raw() key := fmt.Sprintf("%#v", coreKey{b, s, r}) key = strings.Replace(key[len("main."):], "language.", "", -1) fmt.Fprintf(w, "%s: %d, // %s\n", key, i, t) i++ } fmt.Fprintln(w, "}") }
func main() { gen.Init() // Read the CLDR zip file. r := gen.OpenCLDRCoreZip() defer r.Close() d := &cldr.Decoder{} data, err := d.DecodeZip(r) if err != nil { log.Fatalf("DecodeZip: %v", err) } w := gen.NewCodeWriter() defer func() { buf := &bytes.Buffer{} if _, err = w.WriteGo(buf, "language"); err != nil { log.Fatalf("Error formatting file index.go: %v", err) } // Since we're generating a table for our own package we need to rewrite // doing the equivalent of go fmt -r 'language.b -> b'. Using // bytes.Replace will do. out := bytes.Replace(buf.Bytes(), []byte("language."), nil, -1) if err := ioutil.WriteFile("index.go", out, 0600); err != nil { log.Fatalf("Could not create file index.go: %v", err) } }() m := map[language.Tag]bool{} for _, lang := range data.Locales() { // We include all locales unconditionally to be consistent with en_US. // We want en_US, even though it has no data associated with it. // TODO: put any of the languages for which no data exists at the end // of the index. This allows all components based on ICU to use that // as the cutoff point. // if x := data.RawLDML(lang); false || // x.LocaleDisplayNames != nil || // x.Characters != nil || // x.Delimiters != nil || // x.Measurement != nil || // x.Dates != nil || // x.Numbers != nil || // x.Units != nil || // x.ListPatterns != nil || // x.Collations != nil || // x.Segmentations != nil || // x.Rbnf != nil || // x.Annotations != nil || // x.Metadata != nil { // TODO: support POSIX natively, albeit non-standard. tag := language.Make(strings.Replace(lang, "_POSIX", "-u-va-posix", 1)) m[tag] = true // } } // Include locales for plural rules, which uses a different structure. for _, plurals := range data.Supplemental().Plurals { for _, rules := range plurals.PluralRules { for _, lang := range strings.Split(rules.Locales, " ") { m[language.Make(lang)] = true } } } var core, special []language.Tag for t := range m { if x := t.Extensions(); len(x) != 0 && fmt.Sprint(x) != "[u-va-posix]" { log.Fatalf("Unexpected extension %v in %v", x, t) } if len(t.Variants()) == 0 && len(t.Extensions()) == 0 { core = append(core, t) } else { special = append(special, t) } } w.WriteComment(` NumCompactTags is the number of common tags. The maximum tag is NumCompactTags-1.`) w.WriteConst("NumCompactTags", len(core)+len(special)) sort.Sort(byAlpha(special)) w.WriteVar("specialTags", special) // TODO: order by frequency? sort.Sort(byAlpha(core)) // Size computations are just an estimate. w.Size += int(reflect.TypeOf(map[uint32]uint16{}).Size()) w.Size += len(core) * 6 // size of uint32 and uint16 fmt.Fprintln(w) fmt.Fprintln(w, "var coreTags = map[uint32]uint16{") fmt.Fprintln(w, "0x0: 0, // und") i := len(special) + 1 // Und and special tags already written. for _, t := range core { if t == language.Und { continue } fmt.Fprint(w.Hash, t, i) b, s, r := t.Raw() fmt.Fprintf(w, "0x%s%s%s: %d, // %s\n", getIndex(b, 3), // 3 is enough as it is guaranteed to be a compact number getIndex(s, 2), getIndex(r, 3), i, t) i++ } fmt.Fprintln(w, "}") }
// ExampleMatcher_bestMatch gives some examples of getting the best match of // a set of tags to any of the tags of given set. func ExampleMatcher() { // This is the set of tags from which we want to pick the best match. These // can be, for example, the supported languages for some package. tags := []language.Tag{ language.English, language.BritishEnglish, language.French, language.Afrikaans, language.BrazilianPortuguese, language.EuropeanPortuguese, language.Croatian, language.SimplifiedChinese, language.Raw.Make("iw-IL"), language.Raw.Make("iw"), language.Raw.Make("he"), } m := language.NewMatcher(tags) // A simple match. fmt.Println(m.Match(language.Make("fr"))) // Australian English is closer to British than American English. fmt.Println(m.Match(language.Make("en-AU"))) // Default to the first tag passed to the Matcher if there is no match. fmt.Println(m.Match(language.Make("ar"))) // Get the default tag. fmt.Println(m.Match()) fmt.Println("----") // Croatian speakers will likely understand Serbian written in Latin script. fmt.Println(m.Match(language.Make("sr-Latn"))) // We match SimplifiedChinese, but with Low confidence. fmt.Println(m.Match(language.TraditionalChinese)) // Serbian in Latin script is a closer match to Croatian than Traditional // Chinese to Simplified Chinese. fmt.Println(m.Match(language.TraditionalChinese, language.Make("sr-Latn"))) fmt.Println("----") // In case a multiple variants of a language are available, the most spoken // variant is typically returned. fmt.Println(m.Match(language.Portuguese)) // Pick the first value passed to Match in case of a tie. fmt.Println(m.Match(language.Dutch, language.Make("fr-BE"), language.Make("af-NA"))) fmt.Println(m.Match(language.Dutch, language.Make("af-NA"), language.Make("fr-BE"))) fmt.Println("----") // If a Matcher is initialized with a language and it's deprecated version, // it will distinguish between them. fmt.Println(m.Match(language.Raw.Make("iw"))) // However, for non-exact matches, it will treat deprecated versions as // equivalent and consider other factors first. fmt.Println(m.Match(language.Raw.Make("he-IL"))) // Output: // fr 2 Exact // en-GB 1 High // en 0 No // en 0 No // ---- // hr 6 High // zh-Hans 7 Low // hr 6 High // ---- // pt-BR 4 High // fr 2 High // af 3 High // ---- // iw 9 Exact // iw-IL 8 Exact }
//Ts translate by lang func (p *I18n) Ts(lng string, code string, args ...interface{}) string { l := language.Make(lng) return p.T(&l, code, args...) }
// +build OMIT package main import ( "fmt" "golang.org/x/text/language" "golang.org/x/text/language/display" ) var userPrefs = []language.Tag{ language.Make("gsw"), // Swiss German language.Make("fr"), // French } var serverLangs = []language.Tag{ language.AmericanEnglish, // en-US fallback language.German, // de } var matcher = language.NewMatcher(serverLangs) func main() { tag, index, confidence := matcher.Match(userPrefs...) fmt.Printf("best match: %s (%s) index=%d confidence=%v\n", display.English.Tags().Name(tag), display.Self.Name(tag), index, confidence) // best match: German (Deutsch) index=1 confidence=High