func (t *LingvoLiveTranslatorResponseFull) GetMeanings() []IParticularMeaning { meanings := []IParticularMeaning{} for _, v := range t.Articles { meaning := &Meaning{Dictionary: v.Dictionary} doc, _ := goquery.NewDocumentFromReader(strings.NewReader(v.BodyHtml)) table := doc.Find(".article .article-body .article-body-items") table.Find(".article-body-items").Each(func(i int, s *goquery.Selection) { if s.Find(".paragraph-marker-top-level").Text() == "" { if s.Find(".parts-of-speech").Text() != "" && len(s.Find(".article-text").Nodes) == 0 { return } } value := s.Find(".article-text-wrap .article-text").Text() if value == "" { // maybe comment value = s.Find(".article-text-wrap .comment").Text() } value = strings.TrimLeft(value, "<-s, ->") value = strings.TrimLeft(value, ";") value = strings.TrimSpace(value) if "" != value { meaning.All = append(meaning.All, value) } if len(meaning.All) > 0 && meaning.Text == "" { meaning.Text = meaning.All[0] } }) meanings = append(meanings, meaning) } return meanings }
func TestEndToEndMkdirNoParentTimestamp(t *testing.T) { tc := NewTestCase(t) defer tc.Clean() tc.RunSuccess(WorkRequest{ Argv: []string{"mkdir", "-p", tc.wd + "/dir"}, }) rootless := strings.TrimLeft(tc.wd, "/") beforeTime := tc.master.attributes.Get(rootless + "/dir").ChangeTime() var after *attr.FileAttr for i := 0; ; i++ { time.Sleep(10e6) subdir := fmt.Sprintf(tc.wd+"/dir/subdir%d", i) tc.RunSuccess(WorkRequest{ Argv: []string{"mkdir", subdir}, }) after = tc.master.attributes.Get(strings.TrimLeft(subdir, "/")) if !after.ChangeTime().Equal(beforeTime) { break } } afterDir := tc.master.attributes.Get(rootless + "/dir") if afterDir.ChangeTime().Equal(beforeTime) { t.Errorf("Forgot to update parent timestamps") } }
// TrimLeft returns a slice of the string s with alll leading Unicode code point // contained in cutset removed func TrimLeft(s string, cutset string) string { fmt.Printf("[%q]\n", strings.TrimLeft(" !!! Achtung! Achtung! !!! ", "! ")) // ["Achtung! Achtung! !!! "] fmt.Printf("[%q]\n", strings.TrimLeft(" !!! Achtung! Achtung! !!! @@@ ", "!@")) // [" !!! Achtung! Achtung! !!! @@@ "] fmt.Printf("[%q]\n", strings.TrimLeft(" !!! Achtung! Achtung! !!! ", "")) // [" !!! Achtung! Achtung! !!! "] fmt.Printf("[%q]\n", strings.TrimLeft(" !!! Achtung! Achtung! !!! ", " ")) // ["!!! Achtung! Achtung! !!! "] return strings.TrimLeft(s, cutset) }
// The request message must like: //{ // "Event":0, // "FileName":"+/path/to/file", or "FileName":"-/path/to/file" // +/- means add or remove watch // .... //} func (em *Distributer) PullRequest() (map[string]string, error) { str, err := em.Read() if err != nil { logging.Debug("Pull request:", err) return nil, err } m, err := router.ParseMessage(str) if err != nil { logging.Debug("Pull request:", err) return nil, err } if m.Event != 0 { return nil, errors.New("Invalid request") } msg := make(map[string]string) if strings.HasPrefix(m.FileName, "+") { msg["ACTION"] = "ADD" msg["PATH"] = strings.TrimLeft(m.FileName, "+") return msg, nil } else if strings.HasPrefix(m.FileName, "-") { msg["ACTION"] = "REMOVE" msg["PATH"] = strings.TrimLeft(m.FileName, "-") return msg, nil } return nil, errors.New("Shouldn't come here") }
func goodOSArchConstraints(file *ast.File) (ok bool) { max := file.Package for _, comment := range file.Comments { if comment.Pos() >= max { break } if len(comment.List) == 0 { continue } line := comment.List[0].Text line = strings.TrimLeft(line, "/") line = strings.TrimSpace(line) if !strings.HasPrefix(line, "+build ") { continue } // Loop over lines == AND for _, cmt := range comment.List { line := cmt.Text line = strings.TrimLeft(line, "/") line = strings.TrimSpace(line)[7:] satisfied := false // Loop over groups == OR for _, group := range strings.Split(line, " ") { gSatisfied := true // Loop over constraints == AND for _, constraint := range strings.Split(group, ",") { if constraint == goos || constraint == goarch { continue } if knownOS[constraint] || knownArch[constraint] { gSatisfied = false } if constraint == "ignore" { gSatisfied = false } } if gSatisfied { satisfied = true } } if !satisfied { return false } } } return true }
func parseArgs(flags []*Flag, args []string) (map[string]*Flag, []string, error) { flagMap := map[string]*Flag{} for _, f := range flags { ks := strings.Split(f.Key, ",") for _, k := range ks { k = strings.TrimSpace(k) k = strings.TrimLeft(k, "-") flagMap[k] = f } } updatedArgs := []string{} for i := 0; i < len(args); i++ { k := args[i] if !isFlag(k) { updatedArgs = append(updatedArgs, k) continue } k = strings.TrimLeft(k, "-") f, ok := flagMap[k] if !ok { return nil, nil, errors.New("no flag found: " + k) } if i+1 == len(args) || isFlag(args[i+1]) { f.Value = ptr("") } else { f.Value = ptr(args[i+1]) i++ } } return flagMap, updatedArgs, nil }
func parseRecord(data string, config *Config) (string, string) { va := strings.Split(data, _SEPARATOR_SYMBOL) propName := va[0] propValue := strings.Join(va[1:], _SEPARATOR_SYMBOL) propName = strings.TrimLeft(propName, " ") propValue = strings.TrimLeft(propValue, " ") if strings.Contains(propValue, _MACRO_SYMBOL) { macroses := macroRE.FindAllStringSubmatch(propValue, -1) for _, macros := range macroses { macroFull := macros[0] macroSect := macros[1] macroProp := macros[2] macroVal := config.GetS(macroSect + _DELIMITER + macroProp) propValue = strings.Replace(propValue, macroFull, macroVal, -1) } } return propName, propValue }
func parseSeasonEp(name string) *showSeasonEp { for _, r := range regularShowRegexes { reu := types.RegexpUtil{Regex: r} m := reu.FindStringSubmatchMap(name) if len(m) > 0 { sse := &showSeasonEp{} if s, ok := m["season"]; ok { sse.Season = strings.TrimLeft(s, "0") } if s, ok := m["episode"]; ok { sse.Episode = strings.TrimLeft(s, "0") } if s, ok := m["lastepisode"]; ok { sse.LastEpisode = s } return sse } } m := seasonEpRegex7.FindStringSubmatch(name) if m != nil { return &showSeasonEp{ Season: m[3] + m[4], Episode: m[5] + "/" + m[6], Airdate: m[2], } } //TODO: add the rest return nil }
// ParseEnvFile reads a file with environment variables enumerated by lines // // ``Environment variable names used by the utilities in the Shell and // Utilities volume of IEEE Std 1003.1-2001 consist solely of uppercase // letters, digits, and the '_' (underscore) from the characters defined in // Portable Character Set and do not begin with a digit. *But*, other // characters may be permitted by an implementation; applications shall // tolerate the presence of such names.'' // -- http://pubs.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap08.html // // As of #16585, it's up to application inside docker to validate or not // environment variables, that's why we just strip leading whitespace and // nothing more. func ParseEnvFile(filename string) ([]string, error) { fh, err := os.Open(filename) if err != nil { return []string{}, err } defer fh.Close() lines := []string{} scanner := bufio.NewScanner(fh) for scanner.Scan() { // trim the line from all leading whitespace first line := strings.TrimLeft(scanner.Text(), whiteSpaces) // line is not empty, and not starting with '#' if len(line) > 0 && !strings.HasPrefix(line, "#") { data := strings.SplitN(line, "=", 2) // trim the front of a variable, but nothing else variable := strings.TrimLeft(data[0], whiteSpaces) if strings.ContainsAny(variable, whiteSpaces) { return []string{}, ErrBadEnvVariable{fmt.Sprintf("variable '%s' has white spaces", variable)} } if len(data) > 1 { // pass the value through, no trimming lines = append(lines, fmt.Sprintf("%s=%s", variable, data[1])) } else { // if only a pass-through variable is given, clean it up. lines = append(lines, fmt.Sprintf("%s=%s", strings.TrimSpace(line), os.Getenv(line))) } } } return lines, scanner.Err() }
func (c *JoinCommand) Run(v *View, e *Edit) error { sel := v.Sel() for i := 0; i < sel.Len(); i++ { r := sel.Get(i) // Removing new line and triming in the selection t := v.Buffer().Substr(r) t = strings.Replace(t, "\r", "\n", -1) slice := strings.Split(t, "\n") t = "" for j, s := range slice { if j == 0 { t += s continue } t += " " + strings.TrimLeft(s, " \t") } v.Replace(e, r, t) // Removing the first new line after selection liner := v.Buffer().FullLine(r.End()) line := v.Buffer().Substr(liner) line = strings.Replace(line, "\n", "", -1) line = strings.Replace(line, "\r", "", -1) line = strings.TrimRight(line, " \t") // Triming the line after nextline := liner.End() + 1 nextliner := v.Buffer().FullLine(nextline) nline := v.Buffer().Substr(nextliner) if nline != "" { v.Replace(e, nextliner, " "+strings.TrimLeft(nline, " \t")) } v.Replace(e, liner, line) } return nil }
func parseUpdateBody(body string) (title, text, colstr string) { body = strings.TrimLeft(body, "\n") v := strings.SplitN(body, "\n", 2) if len(v) <= 0 { return "", "", "" } title = v[0] if len(v) < 2 { return title, "", "" } if strings.HasPrefix(v[1], TEXT_COLS_SEPARATOR[1:]) { text = "" colstr = v[1][len(TEXT_COLS_SEPARATOR[1:]):] return } v = strings.SplitN(v[1], TEXT_COLS_SEPARATOR, 2) if len(v) <= 0 { return title, "", "" } text = strings.TrimLeft(strings.TrimRight(v[0], "\n"), "\n") colstr = "" if len(v) > 1 { colstr = v[1] } return }
func newRequest(method, url1 string, body io.Reader) (req *http.Request, err error) { var host string // url1 = "-H <Host> http://<ip>[:<port>]/<path>" // if strings.HasPrefix(url1, "-H") { url2 := strings.TrimLeft(url1[2:], " \t") pos := strings.Index(url2, " ") if pos <= 0 { return nil, ErrInvalidRequestURL } host = url2[:pos] url1 = strings.TrimLeft(url2[pos+1:], " \t") } req, err = http.NewRequest(method, url1, body) if err != nil { return } if host != "" { req.Host = host } return }
func (fs *workerFS) update(attrs []*attr.FileAttr) { updates := map[string]*termitefs.Result{} for _, attr := range attrs { path := strings.TrimLeft(attr.Path, "/") if !strings.HasPrefix(path, fs.fuseFS.writableRoot) { dir, name := filepath.Split(path) // As file contents are immutable, we must // invalidate the entry instead fs.fuseFS.rpcNodeFS.EntryNotify(filepath.Join(fs.id, dir), name) continue } path = strings.TrimLeft(path[len(fs.fuseFS.writableRoot):], "/") if attr.Deletion() { updates[path] = &termitefs.Result{} } else { r := termitefs.Result{ Original: "", Backing: "", Link: attr.Link, Attr: &fuse.Attr{}, } a := *attr.Attr r.Attr = &a updates[path] = &r } } fs.unionFs.Update(updates) }
func (a *Api) policys(w http.ResponseWriter, r *http.Request) { w.Header().Set("content-type", "application/json") policies, err := a.store.List(pathPolicy) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } var data = map[string]string{} for _, policy := range policies { peer := strings.Split(policy.Key, "/") parts := strings.Split(peer[len(peer)-1], ":") pInfo, qInfo, err := a.parsePolicy(parts) if err != nil { log.Errorf(err.Error()) continue } key := strings.Join([]string{ strings.TrimLeft(pInfo.Name, "/"), strings.TrimLeft(qInfo.Name, "/")}, ":") data[key] = string(policy.Value) } if err := json.NewEncoder(w).Encode(data); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } }
func (me *workerFuseFs) update(attrs []*attr.FileAttr) { updates := map[string]*fs.Result{} for _, attr := range attrs { path := strings.TrimLeft(attr.Path, "/") if !strings.HasPrefix(path, me.writableRoot) { me.rpcNodeFs.Notify(path) continue } path = strings.TrimLeft(path[len(me.writableRoot):], "/") if attr.Deletion() { updates[path] = &fs.Result{} } else { r := fs.Result{ Original: "", Backing: "", Link: attr.Link, Attr: &fuse.Attr{}, } a := *attr.Attr r.Attr = &a updates[path] = &r } } me.unionFs.Update(updates) }
// ParsePreferHeader parses the LDP specific Prefer header func ParsePreferHeader(header string) *Preferheaders { ret := new(Preferheaders) for _, v := range strings.Split(header, ",") { item := new(preferheader) v = strings.TrimSpace(v) if strings.HasPrefix(v, "return=representation") { for _, s := range strings.Split(v, ";") { s = strings.TrimSpace(s) if strings.HasPrefix(s, "omit") { s = strings.TrimLeft(s, "omit=") s = strings.TrimLeft(s, "\"") s = strings.TrimRight(s, "\"") for _, u := range strings.Split(s, " ") { item.omit = append(item.omit, u) } } if strings.HasPrefix(s, "include") { s = strings.TrimLeft(s, "include=") s = strings.TrimLeft(s, "\"") s = strings.TrimRight(s, "\"") for _, u := range strings.Split(s, " ") { item.include = append(item.include, u) } } } ret.headers = append(ret.headers, item) } } return ret }
func relativePath(path string, logPath string) string { if path == "." { return strings.TrimLeft(logPath, "/") } else { return strings.TrimLeft(strings.TrimPrefix(logPath, path), "/") } }
func fromMetaElement(s string) string { for s != "" { csLoc := strings.Index(s, "charset") if csLoc == -1 { return "" } s = s[csLoc+len("charset"):] s = strings.TrimLeft(s, " \t\n\f\r") if !strings.HasPrefix(s, "=") { continue } s = s[1:] s = strings.TrimLeft(s, " \t\n\f\r") if s == "" { return "" } if q := s[0]; q == '"' || q == '\'' { s = s[1:] closeQuote := strings.IndexRune(s, rune(q)) if closeQuote == -1 { return "" } return s[:closeQuote] } end := strings.IndexAny(s, "; \t\n\f\r") if end == -1 { end = len(s) } return s[:end] } return "" }
// ServeHTTP implements the httpserver.Handler interface and serves requests, // setting headers on the response according to the configured rules. func (h Headers) ServeHTTP(w http.ResponseWriter, r *http.Request) (int, error) { replacer := httpserver.NewReplacer(r, nil, "") rww := &responseWriterWrapper{w: w} for _, rule := range h.Rules { if httpserver.Path(r.URL.Path).Matches(rule.Path) { for name := range rule.Headers { // One can either delete a header, add multiple values to a header, or simply // set a header. if strings.HasPrefix(name, "-") { rww.delHeader(strings.TrimLeft(name, "-")) } else if strings.HasPrefix(name, "+") { for _, value := range rule.Headers[name] { rww.Header().Add(strings.TrimLeft(name, "+"), replacer.Replace(value)) } } else { for _, value := range rule.Headers[name] { rww.Header().Set(name, replacer.Replace(value)) } } } } } return h.Next.ServeHTTP(rww, r) }
// Base handler for HTTP requests. func (*handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { path := r.URL.Path if strings.HasPrefix(path, "/css/") { path = strings.TrimLeft(path, "/css/") if css, ok := assets.Css[path]; ok { w.Header().Set("Content-Type", "text/css; charset=utf-8") w.Write([]byte(css)) } else { w.WriteHeader(http.StatusNotFound) } } else if path == "/favicon.ico" { } else { var file []byte path = strings.TrimLeft(path, "/") if path == "" { file = markdown.GetReadme() } else { file = markdown.GetFile(path) } data := data{ Title: "Knowledge Base", Index: template.HTML(string(markdown.GetIndex())), Content: template.HTML(string(file)), } render(w, data) } }
// ParseLinkHeader is a generic Link header parser func ParseLinkHeader(header string) *Linkheaders { ret := new(Linkheaders) for _, v := range strings.Split(header, ", ") { item := new(linkheader) for _, s := range strings.Split(v, ";") { s = strings.TrimSpace(s) if strings.HasPrefix(s, "<") && strings.HasSuffix(s, ">") { s = strings.TrimLeft(s, "<") s = strings.TrimRight(s, ">") item.uri = s } else if strings.Index(s, "rel=") >= 0 { s = strings.TrimLeft(s, "rel=") if strings.HasPrefix(s, "\"") || strings.HasPrefix(s, "'") { s = s[1:] } if strings.HasSuffix(s, "\"") || strings.HasSuffix(s, "'") { s = s[:len(s)-1] } item.rel = s } } ret.headers = append(ret.headers, item) } return ret }
// parse markdown file and convert to html func parseSourceFile(srcFilePath string) (*Post, error) { post := &Post{} post.Name = trimPath(srcFilePath) // date d, err := parseDate(post.Name) if err != nil { log.Warning(err) } post.Date = d // read file data, err := ioutil.ReadFile(srcFilePath) if err != nil { return nil, err } // parse title from first headline lines := strings.Split(string(data), "\n") for _, line := range lines { if s := strings.TrimLeft(line, " "); strings.HasPrefix(s, "#") { post.Title = strings.TrimLeft(strings.TrimLeft(s, "#"), " ") break } } // convert markdown to html content := strings.Join(lines, "\n") output := blackfriday.MarkdownCommon([]byte(content)) post.Content = string(output) return post, nil }
func (driver *QiniuDriver) Rename(keySrc, keyDest string) error { fmt.Println("rename from", keySrc, keyDest) var from = strings.TrimLeft(keySrc, "/") var to = strings.TrimLeft(keyDest, "/") info, err := driver.client.Stat(nil, driver.bucket, from) if err != nil && strings.Contains(err.Error(), "no such file or directory") { from = strings.TrimLeft(keySrc, "/") + "/" to = strings.TrimLeft(keyDest, "/") + "/" info, err = driver.client.Stat(nil, driver.bucket, from) if err != nil { return err } entries, _, err := driver.client2.ListPrefix(nil, driver.bucket, from, "", 1000) if err != nil { return err } for _, entry := range entries { newKey := strings.Replace(entry.Key, from, to, 1) err = driver.client.Move(nil, driver.bucket, entry.Key, driver.bucket, newKey) if err != nil { return err } } return nil } if err != nil { fmt.Println(err) return err } fmt.Println(info, from, to) return driver.client.Move(nil, driver.bucket, from, driver.bucket, to) }
func (op *Parser) newOpt(flags []string, descr string, displayDest bool) *option { opt := &option{} opt.descr = descr opt.configflag, opt.shortflag, opt.longflag = op.computeFlags(flags, opt) opt.completer = op.nextCompleter required := op.nextRequired if required { if opt.configflag == "" { opt.requiredFlag = true } else { opt.requiredConfig = true } } if displayDest { if op.nextDest != "" { opt.dest = op.nextDest } else { if opt.longflag != "" { opt.dest = strings.ToUpper(strings.TrimLeft(opt.longflag, "-")) } else { opt.dest = strings.ToUpper(strings.TrimLeft(opt.shortflag, "-")) } } } op.options = append(op.options, opt) op.nextCompleter = nil op.nextDest = "" op.nextRequired = false return opt }
func isContainsColumn(hasColumns []string, column string) bool { for _, col := range hasColumns { if strings.TrimLeft(col, "-") == strings.TrimLeft(column, "-") { return true } } return false }
func linearizeLeftnotes(input *([]intermediates)) ([]intermediates, error) { //Left notes start at the first character of one line and continue //on succeeding lines (starting at the first character) //there will be a blank spaces at the first normal string output := []intermediates{} for ii, ll := range *input { switch ll.(type) { default: return []intermediates{}, errors.New("linearizeLeftnotes: non-string input") case string: ss := ll.(string) re := regexp.MustCompile("^" + dot + ".*$") if re.MatchString(ss) { sideNote := "" re2 := regexp.MustCompile("^(" + dot + ".*) {2}.*$") match := re2.FindStringSubmatch(ss) if match != nil { replace := regexp.MustCompile("^" + dot + ".* {2}(.*$)") sideNote = strings.Trim(match[1], " ") if ii != 0 { //Fix for blockquote output[ii-1] = strings.TrimLeft(output[ii-1].(string), " ") } for jj := ii + 1; jj < len(*input); jj++ { //Fix for blockquote ss := (*input)[jj].(string) if len(ss) == 0 || ss[0:1] == " " { (*input)[jj] = strings.TrimLeft(ss, " ") break } else { rInner := regexp.MustCompile("^(.*) {2}(.*$)") mInner := rInner.FindStringSubmatch(ss) if mInner == nil { sideNote += " " + strings.Trim(ss, " ") (*input)[jj] = "" } else { sideNote += " " + strings.Trim(mInner[1], " ") //Fix for blockquote (*input)[jj] = strings.Trim(mInner[2], " ") } } fmt.Printf("SIDENOTE: %v\n", sideNote) } ss = replace.ReplaceAllString(ss, "$1") ss = strings.TrimLeft(ss, " ") sideNote = strings.Trim(sideNote, " ") ss = sideNote + " " + ss output = append(output, ss) } } else { output = append(output, ss) } } } return output, nil }
func getStats() (chats *set.Set, users *set.Set, rss *set.Set, used *set.Set, nperuser map[string]int, subscribed map[string]int, unused *set.Set) { chats = set.New() users = set.New() rss = set.New() used = set.New() userswithlinks := set.New() chatswithlinks := set.New() nperuser = map[string]int{} subscribed = map[string]int{} allv := loadFromDbPrefix("") for k := range allv { if strings.HasPrefix(k, "user") && len(strings.Split(k, ":")) == 2 { id := strings.TrimLeft(k, "user:"******"user") { uid := strings.Split(k, ":")[1] if i, e := strconv.Atoi(uid); e == nil { if i > 0 { userswithlinks.Add(i) } else { chatswithlinks.Add(i) } } vu, oku := nperuser[uid] if oku { nperuser[uid] = vu + 1 } else { nperuser[uid] = 1 } surl := strings.Join(strings.Split(k, ":")[2:], ":") used.Add(surl) v, ok := subscribed[surl] if ok { subscribed[surl] = v + 1 } else { subscribed[surl] = 1 } } if strings.HasPrefix(k, "rss") && len(strings.Split(k, ":")) == 3 { rss.Add(strings.TrimLeft(k, "rss:")) } } unused = set.Difference(rss, used).(*set.Set) return }
func (db *mysql) SqlType(c *core.Column) string { var res string switch t := c.SQLType.Name; t { case core.Bool: res = core.TinyInt c.Length = 1 case core.Serial: c.IsAutoIncrement = true c.IsPrimaryKey = true c.Nullable = false res = core.Int case core.BigSerial: c.IsAutoIncrement = true c.IsPrimaryKey = true c.Nullable = false res = core.BigInt case core.Bytea: res = core.Blob case core.TimeStampz: res = core.Char c.Length = 64 case core.Enum: //mysql enum res = core.Enum res += "(" opts := "" for v, _ := range c.EnumOptions { opts += fmt.Sprintf(",'%v'", v) } res += strings.TrimLeft(opts, ",") res += ")" case core.Set: //mysql set res = core.Set res += "(" opts := "" for v, _ := range c.SetOptions { opts += fmt.Sprintf(",'%v'", v) } res += strings.TrimLeft(opts, ",") res += ")" default: res = t } var hasLen1 bool = (c.Length > 0) var hasLen2 bool = (c.Length2 > 0) if res == core.BigInt && !hasLen1 && !hasLen2 { c.Length = 20 hasLen1 = true } if hasLen2 { res += "(" + strconv.Itoa(c.Length) + "," + strconv.Itoa(c.Length2) + ")" } else if hasLen1 { res += "(" + strconv.Itoa(c.Length) + ")" } return res }
func doCommand(command string) { if strings.HasPrefix(command, "/connect ") { addr := strings.TrimLeft(command, "/connect ") go dial(addr) } if strings.HasPrefix(command, "/nick ") { *selfNick = strings.TrimLeft(command, "/nick ") } }
func (h *Host) ExtractServerName(line string) { valid := strings.Contains(line, "ServerName") || strings.Contains(line, "ServerAlias") if valid && line[0] != '#' { line = strings.TrimLeft(line, "ServerName") line = strings.TrimLeft(line, "ServerAlias") line = strings.TrimSpace(line) h.ServerName = append(h.ServerName, line) } }