// ProcessConfig replaces references of environment varialbes for the given data // Support variable syntax: $varname, ${varname} func ProcessConfig(data []byte, e *env.Env, escapeChar rune) ([]byte, error) { var result []byte var sc scanner.Scanner sc.Init(bytes.NewReader(data)) DONE: for { switch ch := sc.Peek(); ch { default: result = append(result, byte(sc.Next())) case scanner.EOF: break DONE case escapeChar: curr, next := sc.Next(), sc.Peek() if next != '$' { result = append(result, byte(curr)) } if next != scanner.EOF { result = append(result, byte(sc.Next())) } case '$': name, err := parseVariable(&sc) if err != nil { pos := sc.Pos() return result, fmt.Errorf(`parseError:%d:%d: %v %q`, pos.Line, pos.Offset, err, name) } result = append(result, e.Get(string(name))...) } } return result, nil }
// Parse updates an inverted index with the file's words func (index *InvertedIndex) Parse(filename string) { if index.alreadyParsed(filename) { return } file, err := os.Open(filename) if err != nil { cwd, _ := os.Getwd() fmt.Printf("The file named %s doesn't exist, the current directory is %s\n", filename, cwd) return } defer file.Close() var s scanner.Scanner s.Init(file) var token rune for token != scanner.EOF { token = s.Scan() tokenText := s.TokenText() _, found := index.indexMap[tokenText] pos := s.Pos() result := Result{File: filename, Line: pos.Line, Index: pos.Column - len(tokenText)} if !found { index.indexMap[tokenText] = []Result{result} } else { index.indexMap[tokenText] = append(index.indexMap[tokenText], result) } } index.files = append(index.files, filename) }
func Example() { const src = ` // This is scanned code. if a > 10 { someParsable = text }` var s scanner.Scanner s.Init(strings.NewReader(src)) var tok rune for tok != scanner.EOF { tok = s.Scan() fmt.Println("At position", s.Pos(), ":", s.TokenText()) } // Output: // At position 3:4 : if // At position 3:6 : a // At position 3:8 : > // At position 3:11 : 10 // At position 3:13 : { // At position 4:15 : someParsable // At position 4:17 : = // At position 4:22 : text // At position 5:3 : } // At position 5:3 : }
func parseText(s *scanner.Scanner, depth int) ([]ast, error) { var slice []ast for { switch s.Scan() { case '+', '-', '/', '%', '*', '=', '<', '>', '!': slice = append(slice, parseIdent(s.TokenText())) case scanner.Ident: ident := s.TokenText() // Periods are allowed in package names. for s.Peek() == '.' { s.Next() ident += "." if s.Scan() != scanner.Ident { return nil, stitchError{pos: s.Pos(), err: fmt.Errorf("bad ident name: %s", ident)} } ident += s.TokenText() } slice = append(slice, parseIdent(ident)) case scanner.Float: x, _ := strconv.ParseFloat(s.TokenText(), 64) slice = append(slice, astFloat(x)) case scanner.Int: x, _ := strconv.Atoi(s.TokenText()) slice = append(slice, astInt(x)) case scanner.String: str := strings.Trim(s.TokenText(), "\"") slice = append(slice, astString(str)) case '(': // We need to save our position before recursing because the // scanner will have moved on by the time the recursive call // returns. pos := s.Pos() sexp, err := parseText(s, depth+1) if err != nil { return nil, err } slice = append(slice, astSexp{sexp: sexp, pos: pos}) case ')': if depth == 0 { return nil, stitchError{s.Pos(), errUnbalancedParens} } return slice, nil case scanner.EOF: if depth != 0 { return nil, stitchError{s.Pos(), errUnbalancedParens} } return slice, nil default: return nil, stitchError{s.Pos(), fmt.Errorf("bad element: %s", s.TokenText())} } } }
func scanBracketedKey(s *scanner.Scanner) (key string, err error) { s.Scan() // scan the '[' key, err = scanKey(s) if err == nil { t := s.Scan() if t != ']' { err = fmt.Errorf("Unexpected token at %s. Expected ']', had %s", s.Pos(), scanner.TokenString(t)) } } return }
func StringToMap(value string) (map[string]string, error) { var tokenizer scanner.Scanner tokenizer.Init(strings.NewReader(value)) tokenizer.Error = func(*scanner.Scanner, string) {} result := make(map[string]string) next := func() string { tokenizer.Scan() return tokenizer.TokenText() } var lvalue, rvalue, expression string for { lvalue = next() if lvalue == "" { return result, errors.New(fmt.Sprintf("Expected key at pos '%d' but found none; "+ "map values should be 'key=value' separated by commas", tokenizer.Pos().Offset)) } if lvalue == "{" { // Assume this is JSON format and attempt to un-marshal return JSONToMap(value) } expression = next() if expression != "=" { return result, errors.New(fmt.Sprintf("Expected '=' after '%s' but found '%s'; "+ "map values should be 'key=value' separated by commas", lvalue, expression)) } rvalue = next() if rvalue == "" { return result, errors.New(fmt.Sprintf("Expected value after '%s' but found none; "+ "map values should be 'key=value' separated by commas", expression)) } // TODO: Handle quoted strings and escaped double quotes result[lvalue] = rvalue // Are there anymore tokens? delimiter := next() if delimiter == "" { break } // Should be a comma next if delimiter != "," { return result, errors.New(fmt.Sprintf("Expected ',' after '%s' but found '%s'; "+ "map values should be 'key=value' separated by commas", rvalue, delimiter)) } } return result, nil }
func scanRune(s *scanner.Scanner, out chan Part, stop villa.Stop, tp int, exp rune) (toStop bool) { start := s.Pos() if r := s.Next(); r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } else if r != exp { return output(out, stop, TP_ERROR, start, s.Pos()) } return output(out, stop, tp, start, s.Pos()) }
func scanWord(s *scanner.Scanner, out chan Part, stop villa.Stop, word []rune) (toStop bool) { start := s.Pos() for i := 0; i < len(word); i++ { if r := s.Next(); r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } else if r != word[i] { return output(out, stop, TP_ERROR, start, s.Pos()) } } return output(out, stop, TP_KEYWORD, start, s.Pos()) }
func tokenize(expr string) []token { var s scanner.Scanner s.Init(strings.NewReader(expr)) var tok rune var tokens []token for tok != scanner.EOF { tok = s.Scan() text := s.TokenText() if text == "" { return tokens } else { tokens = append(tokens, token{ text: text, offset: s.Pos().Offset, }) } } panic("unreachable") }
func funcFromFormula(form string) (string, error) { f, _, err := extractFormula(form) if err != nil { return "", err } var s scanner.Scanner s.Init(strings.NewReader(f)) s.Error = func(s *scanner.Scanner, msg string) { err = fmt.Errorf("error parsing plural formula %s: %s", s.Pos(), msg) } s.Mode = scanner.ScanIdents | scanner.ScanInts s.Whitespace = 0 tok := s.Scan() var code []string var buf bytes.Buffer for tok != scanner.EOF && err == nil { switch tok { case scanner.Ident, scanner.Int: buf.WriteString(s.TokenText()) case '?': code = append(code, fmt.Sprintf("if %s {\n", buf.String())) buf.Reset() case ':': code = append(code, fmt.Sprintf("return %s\n}\n", buf.String())) buf.Reset() default: buf.WriteRune(tok) } tok = s.Scan() } if err != nil { return "", err } if len(code) == 0 && buf.Len() > 0 && buf.String() != "0" { code = append(code, fmt.Sprintf("if %s {\nreturn 1\n}\nreturn 0\n", buf.String())) buf.Reset() } if buf.Len() > 0 { code = append(code, fmt.Sprintf("\nreturn %s\n", buf.String())) } return strings.Join(code, ""), nil }
// ReplaceGo is a specialized routine for correcting Golang source // files. Currently only checks comments, not identifiers for // spelling. // // Other items: // - check strings, but need to ignore // * import "statements" blocks // * import ( "blocks" ) // - skip first comment (line 0) if build comment // func ReplaceGo(input string, debug bool) string { var s scanner.Scanner s.Init(strings.NewReader(input)) s.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanChars | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments lastPos := 0 output := "" for { switch s.Scan() { case scanner.Comment: origComment := s.TokenText() var newComment string if debug { newComment = ReplaceDebug(origComment) } else { newComment = Replace(origComment) } if origComment != newComment { // s.Pos().Offset is the end of the current token // subtract len(origComment) to get the start of token offset := s.Pos().Offset output = output + input[lastPos:offset-len(origComment)] + newComment lastPos = offset } case scanner.EOF: // no changes, no copies if lastPos == 0 { return input } if lastPos >= len(input) { return output } return output + input[lastPos:] } } }
func scanKeyword(s *scanner.Scanner, out chan Part, stop villa.Stop) (toStop bool) { start := s.Pos() switch s.Peek() { case scanner.EOF: s.Next() return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) case 't': return scanWord(s, out, stop, []rune("true")) case 'f': return scanWord(s, out, stop, []rune("false")) case 'n': return scanWord(s, out, stop, []rune("null")) } s.Next() return output(out, stop, TP_ERROR, start, s.Pos()) }
func scanValue(s *scanner.Scanner, out chan Part, stop villa.Stop) (toStop bool) { start := s.Pos() switch s.Peek() { case scanner.EOF: return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) case '"': return scanString(s, out, stop) case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': return scanNumber(s, out, stop) case 't', 'f', 'n': return scanKeyword(s, out, stop) case '{': return scanObject(s, out, stop) case '[': return scanArray(s, out, stop) } return output(out, stop, TP_ERROR, start, s.Pos()) }
func split(expr string) (keys []string, err error) { var msgs []string var s scanner.Scanner s.Init(strings.NewReader(expr)) s.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanStrings s.Error = func(s *scanner.Scanner, msg string) { msgs = append(msgs, fmt.Sprintf("%s %s", s.Pos(), msg)) } key := "" keys = []string{} for err == nil { t := s.Peek() // fmt.Printf(">>> %s: %s %s\n", s.Pos(), scanner.TokenString(t), s.TokenText()) switch t { case '[': key, err = scanBracketedKey(&s) case '.': s.Scan() continue case scanner.EOF: goto end default: key, err = scanKey(&s) } if len(msgs) > 0 { err = errors.New(strings.Join(msgs, "\n")) } if err == nil { keys = append(keys, key) } } end: return }
func scanKey(s *scanner.Scanner) (key string, err error) { t := s.Scan() switch t { case scanner.Ident, scanner.Int, scanner.Float: key = s.TokenText() case scanner.String: key = strings.Trim(s.TokenText(), "\"") default: err = fmt.Errorf("Unexpected token at %s. Expected ident, number or string, had %s", s.Pos(), scanner.TokenString(t)) } return }
func (b *builder) bind(script map[string]func()) { var sc scanner.Scanner var modes []string for targets, handler := range script { sc.Init(bytes.NewBufferString(targets)) err := func(msg string) { fmt.Fprintf(os.Stderr, "internal error in builder.bind: %s in %q at %v\n", msg, targets, sc.Pos()) } expect := func(tok rune) { if sc.Scan() != tok { err(fmt.Sprintf("expected %q", string([]rune{tok}))) } } L3: for { // Reading list of modes. modes = modes[:0] expect('(') L1: for { if sc.Scan() != scanner.Ident { err("identifier expected") return } modes = append(modes, sc.TokenText()) switch sc.Scan() { case ',': // missing comma case ')': break L1 default: err("expected ',' or ')'") return } } // Reading characters L2: for { var tok rune switch sc.Scan() { case scanner.Ident: if sc.TokenText() == "letter" { for _, m := range modes { b.tables[m].letter = handler } tok = sc.Scan() } else { err("unknown keyword") return } case scanner.Char: c1, _, _, _ := strconv.UnquoteChar(sc.TokenText()[1:], '\'') if tok = sc.Scan(); tok == '-' { if sc.Scan() == scanner.Char { c2, _, _, _ := strconv.UnquoteChar(sc.TokenText()[1:], '\'') for _, m := range modes { b.tables[m].bindRange(c1, c2, handler) } tok = sc.Scan() } else { err("expected character literal") } } else { for _, m := range modes { b.tables[m].bind(c1, handler) } } case scanner.String: s, _ := strconv.Unquote(sc.TokenText()) tok = sc.Scan() for _, c := range []rune(s) { for _, m := range modes { b.tables[m].bind(c, handler) } } default: err("unexpected token") return } switch tok { case scanner.EOF: break L3 case ';': break L2 case ',': // missing comma default: err("expected ','") return } } } } }
// reentrant read routine. Will be called recursively if a 'parents' key exists in the config file func read(path string, c *Config) error { f, err := os.Open(path) if err != nil { if strings.Contains(err.Error(), "no such file") { // file doesn't exist, let's try some path magic if path == c.Path { return err } newpath := filepath.Join(filepath.Dir(c.Path), filepath.Base(path)) f, err = os.Open(newpath) if err != nil { return err } log.Warn("could not find %v, but found a similar one at %v, using that instead", path, newpath) } else { return err } } defer f.Close() var s scanner.Scanner s.Init(f) tok := s.Scan() for tok != scanner.EOF { pos := s.Pos() if tok != scanner.Ident { err = fmt.Errorf("%s:%s malformed config: %s, expected identifier, got %s", path, pos, s.TokenText(), scanner.TokenString(tok)) return err } k := s.TokenText() tok = s.Scan() if tok != '=' { err = fmt.Errorf("%s:%s malformed config: %s, expected '=', got %s", path, pos, s.TokenText(), scanner.TokenString(tok)) return err } tok = s.Scan() if tok != scanner.String { err = fmt.Errorf("%s:%s malformed config %s, expected string, got %s", path, pos, s.TokenText(), scanner.TokenString(tok)) return err } v := strings.Trim(s.TokenText(), "\"`") d := strings.Fields(v) switch k { case "parents": for _, i := range d { log.Infoln("reading config:", i) err = read(i, c) c.Parents = append(c.Parents, i) if err != nil { return err } } case "packages": c.Packages = append(c.Packages, d...) case "overlay": // trim any trailing "/" for i, j := range d { d[i] = strings.TrimRight(j, "/") } c.Overlays = append(c.Overlays, d...) case "postbuild": c.Postbuilds = append(c.Postbuilds, v) default: err = fmt.Errorf("invalid key %s", k, d) return err } tok = s.Scan() } return nil }
func scanNumber(s *scanner.Scanner, out chan Part, stop villa.Stop) (toStop bool) { start := s.Pos() if s.Peek() == '-' { s.Next() } if r := s.Next(); r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } else if !isDigit(r) { return output(out, stop, TP_ERROR, start, s.Pos()) } else if r != 0 { for isDigit(s.Peek()) { s.Next() } } if s.Peek() == '.' { s.Next() if r := s.Next(); r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } else if !isDigit(r) { return output(out, stop, TP_ERROR, start, s.Pos()) } for isDigit(s.Peek()) { s.Next() } } if s.Peek() == 'e' || s.Peek() == 'E' { s.Next() if s.Peek() == '+' || s.Peek() == '-' { s.Next() } if r := s.Next(); r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } else if !isDigit(r) { return output(out, stop, TP_ERROR, start, s.Pos()) } for isDigit(s.Peek()) { s.Next() } } return output(out, stop, TP_NUMBER, start, s.Pos()) }
func parseErr(s *scanner.Scanner, msg string, params ...interface{}) error { p := s.Pos() text := fmt.Sprintf(msg, params...) return fmt.Errorf("%s[%d:%d] %s", p.Filename, p.Line, p.Column-1, text) }
func unexpected(s *scanner.Scanner, tok rune) error { return fmt.Errorf("unexpected %s token at %s: %s", scanner.TokenString(tok), s.Pos(), s.TokenText()) }
func err(s *scanner.Scanner, msg string) { s.ErrorCount++ fmt.Println("err: ", msg, " at: ", s.Pos()) }
func scanString(s *scanner.Scanner, out chan Part, stop villa.Stop) (toStop bool) { start := s.Pos() // start quote if r := s.Next(); r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } else if r != '"' { return output(out, stop, TP_ERROR, start, s.Pos()) } // body for s.Peek() != '"' { if r := s.Next(); r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } else if r == '\\' { switch s.Next() { case scanner.EOF: return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) case '"', '\\', '/', 'b', 'f', 'n', 'r', 't': // just ok case 'u': for i := 0; i < 4; i++ { r := s.Next() if r == scanner.EOF { return output(out, stop, TP_EOF_UNEXPECTED, start, s.Pos()) } if !isHexadecimal(r) { return output(out, stop, TP_ERROR, start, s.Pos()) } } default: return output(out, stop, TP_ERROR, start, s.Pos()) } } } // end quote s.Next() return output(out, stop, TP_STRING, start, s.Pos()) }
func parsePo(r io.Reader, filename string) (*Po, error) { comment := false s := new(scanner.Scanner) var err error s.Init(r) s.Filename = filename s.Error = func(s *scanner.Scanner, msg string) { if !comment { err = fmt.Errorf("error parsing %s: %s", s.Pos(), msg) } } s.Mode = scanner.ScanIdents | scanner.ScanStrings | scanner.ScanInts tok := s.Scan() po := &Po{Attrs: make(map[string]string)} var trans *Translation for tok != scanner.EOF && err == nil { if tok == '#' { // Skip until EOL comment = true s.Whitespace = whitespace for tok != '\n' && tok != scanner.EOF { tok = s.Scan() } s.Whitespace = scanner.GoWhitespace comment = false tok = s.Scan() continue } if tok != scanner.Ident { err = unexpected(s, tok) break } text := s.TokenText() switch text { case "msgctxt": if trans != nil { if len(trans.Translations) == 0 { err = unexpected(s, tok) break } po.addTranslation(trans) } trans = &Translation{Context: readString(s, &tok, &err)} case "msgid": if trans != nil { if len(trans.Translations) > 0 || trans.Singular != "" { po.addTranslation(trans) } else if trans.Context != "" { trans.Singular = readString(s, &tok, &err) break } } trans = &Translation{Singular: readString(s, &tok, &err)} case "msgid_plural": if trans == nil || trans.Plural != "" { err = unexpected(s, tok) break } trans.Plural = readString(s, &tok, &err) case "msgstr": str := readString(s, &tok, &err) if tok == '[' { tok = s.Scan() if tok != scanner.Int { err = unexpected(s, tok) break } ii, _ := strconv.Atoi(s.TokenText()) if ii != len(trans.Translations) { err = unexpected(s, tok) break } if tok = s.Scan(); tok != ']' { err = unexpected(s, tok) break } str = readString(s, &tok, &err) } trans.Translations = append(trans.Translations, str) default: err = unexpected(s, tok) } } if trans != nil { po.addTranslation(trans) } if err != nil { return nil, err } for _, v := range po.Messages { if v.Context == "" && v.Singular == "" { if len(v.Translations) > 0 { meta := v.Translations[0] for _, line := range strings.Split(meta, "\n") { colon := strings.Index(line, ":") if colon > 0 { key := strings.TrimSpace(line[:colon]) value := strings.TrimSpace(line[colon+1:]) po.Attrs[key] = value } } } break } } return po, nil }
// func ScanCode tbd; and maybe rename ScanCode, something like ScanLibFile func ScanCode(filename string, LibDataIn0, LibDataIn1, LibDataIn2, LibDataIn3, LibDataIn4 []string, lmt int) (LibData0, LibData1, LibData2, LibData3, LibData4 []string) { //func ScanCode(filename string, LibDataIn [][]string, lmt int) (LibData [][]string) { //func ScanCode(filename string, LibData [][]string, lmt int) { //func ScanCode(filename string, LibData, lmt int) (codeLns int) { //' usage: ScanCode(filePathName, lmt) //' no longer a return value; lmt usually 0 for whole file //' now LibDataIn0 etc in and LibData0 etc out, so create and copy to lenLDI := len(LibDataIn0) LibData0 = make([]string, lenLDI) LibData1 = make([]string, lenLDI) LibData2 = make([]string, lenLDI) LibData3 = make([]string, lenLDI) LibData4 = make([]string, lenLDI) copy(LibData0, LibDataIn0) copy(LibData1, LibDataIn1) copy(LibData2, LibDataIn2) copy(LibData3, LibDataIn3) copy(LibData4, LibDataIn4) //' this section reads a file, obtains tokens via scanner.Scanner etc; fills array tok_Aiis tok_Aiis := make([]iis, 0) this_Aiis := make([]iis, 1) src, err := os.Open(filename) if err != nil { panic(err) } var s scanner.Scanner s.Init(src) tok := s.Scan() for tok != scanner.EOF { tokPos := s.Pos() PosLn, PosCol := 0, 0 //' for scope tokStr := s.TokenText() PosStr := tokPos.String() //' prep for save to array posAry := strings.Split(PosStr, ":") PosLnStr := posAry[0] PosColStr := posAry[1] _, err := fmt.Sscan(PosLnStr, &PosLn) if err != nil { fmt.Println("ScanCode.Sscan:", err) return } _, err = fmt.Sscan(PosColStr, &PosCol) if err != nil { fmt.Println("ScanCode.Sscan:", err) return } //fmt.Println("<diag.ScanCode.dmy> PosLn [", PosLn, "]; PosCol [", PosCol, "]; tokStr [", tokStr, "]") this_Aiis[0].Ln = PosLn this_Aiis[0].Col = PosCol this_Aiis[0].Tok = tokStr tok_Aiis = append(tok_Aiis, this_Aiis[0]) if (PosLn > lmt) && (lmt > 0) { break } tok = s.Scan() } //' this section processes contents of tok_Aiis, using tokPttnSep & tokPttn ixLimit := len(tok_Aiis) step := 1 ix := -1 //' next 3 set up the rules for tokPttn; are generated by tokPttnSep from a condensed format relPos_Ai := []int{} //' 0 for current position, 1 for next, -1 for previous relPosInv_Ai := []int{} //' 0 for true if found, 1 for true if not found (ie 'inverted') relPosTok_As := []string{} //' token to compare to name := "" //' for scope for { ix += step if ix >= ixLimit { break } step = 1 //' restore default in case current token not recognized Ln := tok_Aiis[ix].Ln LnStr := fmt.Sprintf("%d", Ln) if (ix+1 < ixLimit) && (vb > 8) { fmt.Println("<diag.ScanCode.detail> step, ix, Ln, tok, next-tok:", step, ix, Ln, tok_Aiis[ix].Tok, tok_Aiis[ix+1].Tok) } //' Verbosity option: suppresses print unless vb is >= the param that follows it, //' but does not suppress addition to arrays //' pattern: named func relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|func|1|1|(") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 3 { name = tok_Aiis[ix+1].Tok fmt.Println(Ln, "func", name) //' LibData entries: file path-name, line# in file, type of entry, entity name, other info if any. } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "func") LibData3 = append(LibData3, name) LibData4 = append(LibData4, "") step = 1 continue } //' pattern: unnamed parameterless inline func relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|func|1|0|(|2|0|)") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 2 { fmt.Println(Ln, "parameterless inline func") } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "func") LibData3 = append(LibData3, "") LibData4 = append(LibData4, "parameterless inline func") step = 3 continue } //' pattern: unnamed parameterless inline func as a parameter of a func call; later realized may be a func 'contract' relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|func|1|0|(|-3|0|,|-1|0|,") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 2 { fmt.Println(Ln, "inline func as a parameter of a func call") } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "inline func as a parameter of a func call") LibData3 = append(LibData3, "") LibData4 = append(LibData4, "(possibly contract for returned func)") step = 2 continue } //' pattern: method, 1 token in parens relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|func|1|0|(|3|0|)") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 2 { fmt.Printf("%d method %s of (%s)\n", Ln, tok_Aiis[ix+4].Tok, tok_Aiis[ix+2].Tok) } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "method") LibData3 = append(LibData3, tok_Aiis[ix+4].Tok) LibData4 = append(LibData4, "of ("+tok_Aiis[ix+2].Tok+")") step = 4 continue } //' pattern: method, 2 tokens in parens relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|func|1|0|(|4|0|)") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { //' special case, may be (t type) or (*type) spacer := " " if strings.EqualFold(tok_Aiis[ix+2].Tok, "*") { spacer = "" } if vb >= 2 { fmt.Printf("%d method %s of (%s%s%s)\n", Ln, tok_Aiis[ix+5].Tok, tok_Aiis[ix+2].Tok, spacer, tok_Aiis[ix+3].Tok) } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "method") LibData3 = append(LibData3, tok_Aiis[ix+5].Tok) LibData4 = append(LibData4, fmt.Sprintf("of (%s%s%s)", tok_Aiis[ix+2].Tok, spacer, tok_Aiis[ix+3].Tok)) step = 5 continue } //' pattern: method, 3 tokens in parens relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|func|1|0|(|5|0|)") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 2 { fmt.Printf("%d method %s of (%s %s%s)\n", Ln, tok_Aiis[ix+6].Tok, tok_Aiis[ix+2].Tok, tok_Aiis[ix+3].Tok, tok_Aiis[ix+4].Tok) } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "method") LibData3 = append(LibData3, tok_Aiis[ix+6].Tok) LibData4 = append(LibData4, fmt.Sprintf("of (%s %s%s)", tok_Aiis[ix+2].Tok, tok_Aiis[ix+3].Tok, tok_Aiis[ix+4].Tok)) step = 6 continue } //' func and none of previous patterns match, call it an 'oddball' until figure out new pattern //' pattern: any func not yet handled relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|func") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 1 { fmt.Println(Ln, "oddball func not currently recognized by goLibrarian/ScanCode") contextPr(tok_Aiis, ix, ixLimit, -3, 6) } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "oddball func ") LibData3 = append(LibData3, "") LibData4 = append(LibData4, "not currently recognized by goLibrarian/ScanCode") step = 1 continue } //' pattern: grouped structs, list each struct name relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|type|1|0|(|3|0|struct") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { iz, err := tokSeekMirror(tok_Aiis, ix+1, ixLimit) if err == nil { //fmt.Println(Ln, ": start of grouped type ", // "declaration set of structs; struct lines & names:") //' w/i those lines, want to print similar to regular struct print, eg //' whent tok_Aiis[iy].Tok is struct: //' ("\t", tok_Aiis[iy].Ln, "struct ", tok_Aiis[iy-1].Tok) for iy := ix + 2; iy < iz; iy++ { if strings.EqualFold(tok_Aiis[iy].Tok, "struct") { if vb >= 1 { fmt.Println("\t", tok_Aiis[iy].Ln, "struct ", tok_Aiis[iy-1].Tok) } //' vb //' appends LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "struct") LibData3 = append(LibData3, tok_Aiis[iy-1].Tok) LibData4 = append(LibData4, "(grouped)") } //' EqualFold } //' iy } else { if vb >= 1 { fmt.Println(Ln, "start of grouped type declaration set of structs (but failed to find end)") fmt.Println(err) } //' vb //' appends LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "error, failed to find end of grouped type declaration") LibData3 = append(LibData3, "") LibData4 = append(LibData4, "(struct as part of that)") } //' err step = iz - ix continue } //' tokPttn //' pattern: grouped type, list as if each type defined separately relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|type|1|0|(") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { iz, err := tokSeekMirror(tok_Aiis, ix+1, ixLimit) //fmt.Println(Ln, // "start of grouped type declaration set (exploring that content is TBD)") if err == nil { //fmt.Println("<diag.ScanCode.tokSeekMirror> found balancing token", tok_Aiis[iz].Tok, "on line", tok_Aiis[iz].Ln) //fmt.Println(Ln, //"start of grouped type declaration set (found that it spans", //Ln, "to", tok_Aiis[iz].Ln, ")") //' rough plan: (inline, not func) tokens ix+1 to iz-1, with prepended \n whenever //' .Ln chgs; also, simulate what individual type stmts would have looked like //fmt.Println(Ln, ": start of grouped type ", // "declaration set; simulating individual type stmts:") if vb >= 1 { fmt.Println(Ln, ": start grouped type ") } //' vb lnOld := Ln iyOld := ix + 2 for iy := ix + 2; iy < iz; iy++ { //' rethinking this for array appends, created tokRange to return a string //' containing the tokens in a range, so can find range of previous line //' then generate an append for it lnNew := tok_Aiis[iy].Ln if lnNew > lnOld { lnOld = lnNew //fmt.Print("\n\ttype ") tokStr := tokRange(tok_Aiis, ixLimit, iyOld, iy+1) if vb >= 1 { fmt.Println("\ttype ", tokStr) } //' vb //' appends LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "type") LibData3 = append(LibData3, tokStr) LibData4 = append(LibData4, "(grouped)") iyOld = iy + 2 } //' lnNew > lnOld //fmt.Print(" ", tok_Aiis[iy].Tok) } } else { if vb >= 1 { fmt.Println(Ln, "start of grouped type declaration set (but failed to find end)") fmt.Println(err) } //' vb //' appends LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "error, failed to find end of grouped type declaration") LibData3 = append(LibData3, "") LibData4 = append(LibData4, "") } //' err step = iz - ix continue } //' pattern: 'type xx func' is a func contract; func used as parameter of this type must satisfy this signature relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|type|2|0|func") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 2 { fmt.Println(Ln, "type", tok_Aiis[ix+1].Tok, "is a func contract") //' func used as parameter of this type must satisfy this signature } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "type") LibData3 = append(LibData3, tok_Aiis[ix+1].Tok) LibData4 = append(LibData4, "is a func contract") step = 3 continue } //' pattern: solo struct declaration relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|type|2|0|struct") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 2 { fmt.Println(Ln, "struct", tok_Aiis[ix+1].Tok) } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "struct") LibData3 = append(LibData3, tok_Aiis[ix+1].Tok) LibData4 = append(LibData4, "") step = 1 continue } //' pattern: solo type declaration relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|type|2|1|struct") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 2 { fmt.Println(Ln, "type", tok_Aiis[ix+1].Tok) } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "type") LibData3 = append(LibData3, tok_Aiis[ix+1].Tok) LibData4 = append(LibData4, "") step = 2 continue } //' pattern: parameterless inline func deferred until close relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|defer|1|0|func|2|0|(|3|0|)") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 1 { fmt.Println(Ln, "parameterless inline func deferred until close") } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "deferred") LibData3 = append(LibData3, "") LibData4 = append(LibData4, "parameterless inline func") step = 4 continue } //' pattern: inline func (not parameterless) deferred until close relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|defer|1|0|func|2|0|(|3|1|)") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { if vb >= 1 { fmt.Println(Ln, "inline func defered until close") //' BTW: no examples of this seen yet, included 'just in case' } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "defered func") LibData3 = append(LibData3, "") LibData4 = append(LibData4, "inline func") step = 4 continue } //' pattern: solo const declaration if ( is not next relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|const|1|1|(") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { //' in order to use whole (non-cmt) rest of same line, a loop to find tok of next line iz := ix for iy := ix; iy < ixLimit; iy++ { if tok_Aiis[iy].Ln > Ln { iz = iy - 1 break } //' Ln } //' iy tokStr := tokRange(tok_Aiis, ixLimit, ix+1, iz) if vb >= 1 { fmt.Println(Ln, "const", tokStr) } LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "const") LibData3 = append(LibData3, tokStr) LibData4 = append(LibData4, "") step = 2 continue } //' pattern: grouped const declaration if ( is next relPos_Ai, relPosInv_Ai, relPosTok_As = tokPttnSep("|0|0|const|1|0|(") if tokPttn(tok_Aiis, ix, ixLimit, relPos_Ai, relPosInv_Ai, relPosTok_As) { iz, err := tokSeekMirror(tok_Aiis, ix+1, ixLimit) //fmt.Println(Ln, // "start of grouped type declaration set (exploring that content is TBD)") if err == nil { if vb >= 1 { fmt.Println(Ln, "start grouped const ") } //' vb lnOld := Ln + 1 iyOld := ix + 2 for iy := ix + 2; iy <= iz; iy++ { lnNew := tok_Aiis[iy].Ln if lnNew > lnOld { lnOld = lnNew //fmt.Print("\n\ttype ") tokStr := tokRange(tok_Aiis, ixLimit, iyOld, iy-1) if vb >= 1 { fmt.Println("\tconst ", tokStr) } //' vb //' appends LibData0 = append(LibData0, filename) LibData1 = append(LibData1, LnStr) LibData2 = append(LibData2, "const") LibData3 = append(LibData3, tokStr) LibData4 = append(LibData4, "(grouped)") iyOld = iy } //' lnNew > lnOld //fmt.Print(" ", tok_Aiis[iy].Tok) } //' iy } //' err } //' tokPttn //' for now, ignore all other tokens... not flagging as unrecognized } //' for (loop exits only on break) //fmt.Println("<diag.ScanCode> just before return, example lines", LibData[0], "\n", LibData[2]) return } //' ScanCode