func cmdFind(root *CmdNode, path string, level int) (*CmdNode, error) { var s scanner.Scanner s.Error = func(s *scanner.Scanner, msg string) { log.Printf("command scan error: %s [%s]", msg, path) } s.Init(strings.NewReader(path)) parent := root for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() { //log.Printf("cmdFind: token: [%s]", s.TokenText()) label := s.TokenText() children := matchChildren(parent.Children, label) size := len(children) if size < 1 { return nil, fmt.Errorf("cmdFind: not found: [%s] under [%s]", label, parent.Path) } if size > 1 { return nil, fmt.Errorf("cmdFind: ambiguous: [%s] under [%s]", label, parent.Path) } parent = children[0] } //log.Printf("cmdFind: found [%s] as [%s]", path, parent.Path) return parent, nil }
func Example() { const src = ` // This is scanned code. if a > 10 { someParsable = text }` var s scanner.Scanner s.Init(strings.NewReader(src)) var tok rune for tok != scanner.EOF { tok = s.Scan() fmt.Println("At position", s.Pos(), ":", s.TokenText()) } // Output: // At position 3:4 : if // At position 3:6 : a // At position 3:8 : > // At position 3:11 : 10 // At position 3:13 : { // At position 4:15 : someParsable // At position 4:17 : = // At position 4:22 : text // At position 5:3 : } // At position 5:3 : }
func TestReduceScanner(t *testing.T) { in := make(chan interface{}) go func() { reader := strings.NewReader(mapInput) s := scanner.Scanner{} s.Init(reader) tok := s.Scan() for tok != scanner.EOF { tok = s.Scan() in <- s.TokenText() } close(in) }() mapOut := Map(in, func(input interface{}) interface{} { key := input.(string) if len(key) == 0 { return "" } else { return string(key[0]) } }) redOut := Reduce(mapOut, func(key interface{}, values []interface{}) interface{} { return len(values) }) for tok := range redOut { if tok.Value.(int) == 0 { t.Error("expect reduce should be positive") } } }
func (this limitedStringSpan) scanString(s *scanner.Scanner) (string, error) { tok := s.Scan() if tok != scanner.EOF && tok == -2 { return s.TokenText(), nil } return "", NewInvalidTypeError() }
func (t Tokenizer) CreateTokens(request *messaging.ParserRequest) (response messaging.ParserResponse) { response.Body = make(map[string]string) b := bytes.NewBufferString(request.Query) var s scanner.Scanner s.Init(b) index := 0 for { tok := s.Scan() if tok != scanner.EOF { response.Body[strconv.Itoa(index)] = s.TokenText() index++ } else { break } } if index == 0 { response.IsSuccess = false response.Message = "Error! Nil Query Allocated!" } else { response.IsSuccess = false response.Message = "Success! Query Tokenized Successfully!" } return }
/** * This function parses a value string parameter and returns Number value * embedded within the string. It returns nil if it doesn't find any * Number value in the value string. * Example: "some4.56more" would return 4.56 */ func extractFloat32(value string) Number { var sc scanner.Scanner var tok rune var valFloat64 float64 var valFloat32 Number var err error var isFound bool if len(value) > 0 { sc.Init(strings.NewReader(value)) sc.Mode = scanner.ScanFloats for tok != scanner.EOF { tok = sc.Scan() // fmt.Println("At position", sc.Pos(), ":", sc.TokenText()) valFloat64, err = strconv.ParseFloat(sc.TokenText(), 64) if err == nil { isFound = true break } } } if isFound { valFloat32 = Number(valFloat64) } return valFloat32 }
// Read contents of a Go file up to the package declaration. This can be used // to find the the build tags. func readGoContents(fp string) ([]byte, error) { f, err := os.Open(fp) defer f.Close() if err != nil { return []byte{}, err } var s scanner.Scanner s.Init(f) var tok rune var pos scanner.Position for tok != scanner.EOF { tok = s.Scan() // Getting the token text will skip comments by default. tt := s.TokenText() // build tags will not be after the package declaration. if tt == "package" { pos = s.Position break } } buf := bytes.NewBufferString("") f.Seek(0, 0) _, err = io.CopyN(buf, f, int64(pos.Offset)) if err != nil { return []byte{}, err } return buf.Bytes(), nil }
func TestMapScanner(t *testing.T) { in := make(chan interface{}) go func() { reader := strings.NewReader(mapInput) s := scanner.Scanner{} s.Init(reader) tok := s.Scan() for tok != scanner.EOF { tok = s.Scan() in <- s.TokenText() } close(in) }() out := Map(in, func(input interface{}) interface{} { key := input.(string) if len(key) == 0 { return "" } else { return string(key[0]) } }) for tok := range out { for _, val := range tok.Values { if !strings.HasPrefix(val.(string), tok.Key.(string)) { t.Error(val.(string) + ` should has "` + tok.Key.(string) + `" prefix`) } } } }
// gofmtFlags looks for a comment of the form // // //gofmt flags // // within the first maxLines lines of the given file, // and returns the flags string, if any. Otherwise it // returns the empty string. func gofmtFlags(filename string, maxLines int) string { f, err := os.Open(filename) if err != nil { return "" // ignore errors - they will be found later } defer f.Close() // initialize scanner var s scanner.Scanner s.Init(f) s.Error = func(*scanner.Scanner, string) {} // ignore errors s.Mode = scanner.GoTokens &^ scanner.SkipComments // want comments // look for //gofmt comment for s.Line <= maxLines { switch s.Scan() { case scanner.Comment: const prefix = "//gofmt " if t := s.TokenText(); strings.HasPrefix(t, prefix) { return strings.TrimSpace(t[len(prefix):]) } case scanner.EOF: return "" } } return "" }
// ParseKeySet parses a string of comma separated keys // made of dot separated sections into a KeySet. func ParseKeySet(s string) (KeySet, error) { sc := new(scanner.Scanner).Init(strings.NewReader(s)) sc.Mode = scanner.ScanIdents | scanner.ScanStrings | scanner.ScanInts var k Key var ks KeySet for { // scan section switch sc.Scan() { case scanner.String: u, _ := strconv.Unquote(sc.TokenText()) if len(u) == 0 { return nil, fmt.Errorf("part of key missing in %q", s) } k = append(k, u) case scanner.Ident, scanner.Int: k = append(k, sc.TokenText()) default: return nil, fmt.Errorf("bad formatting in %q", s) } // scan separator switch sc.Scan() { case '.': continue case ',': ks = append(ks, k) k = nil case scanner.EOF: return append(ks, k), nil default: return nil, fmt.Errorf("invalid separator in %q", s) } } }
func Parser(s string) []string { var items []string var sc scanner.Scanner sc.Init(strings.NewReader(s)) var tok rune for tok != scanner.EOF { tok = sc.Scan() item := sc.TokenText() if tok != scanner.EOF { i := len(items) if i > 0 { if item == "=" && (items[i-1] == "!" || items[i-1] == ">" || items[i-1] == "<" || items[i-1] == "=") { items[i-1] = fmt.Sprintf("%s%s", items[i-1], item) continue } if item == "|" && (items[i-1]) == "|" { items[i-1] = fmt.Sprintf("%s%s", items[i-1], item) continue } if item == "&" && (items[i-1]) == "&" { items[i-1] = fmt.Sprintf("%s%s", items[i-1], item) continue } } items = append(items, item) } } return items }
// Parse updates an inverted index with the file's words func (index *InvertedIndex) Parse(filename string) { if index.alreadyParsed(filename) { return } file, err := os.Open(filename) if err != nil { cwd, _ := os.Getwd() fmt.Printf("The file named %s doesn't exist, the current directory is %s\n", filename, cwd) return } defer file.Close() var s scanner.Scanner s.Init(file) var token rune for token != scanner.EOF { token = s.Scan() tokenText := s.TokenText() _, found := index.indexMap[tokenText] pos := s.Pos() result := Result{File: filename, Line: pos.Line, Index: pos.Column - len(tokenText)} if !found { index.indexMap[tokenText] = []Result{result} } else { index.indexMap[tokenText] = append(index.indexMap[tokenText], result) } } index.files = append(index.files, filename) }
func TestUnwindScanner(t *testing.T) { in := make(chan interface{}) go func() { reader := strings.NewReader(unwindInput) s := scanner.Scanner{} s.Init(reader) tok := s.Scan() for tok != scanner.EOF { tok = s.Scan() in <- s.TokenText() } close(in) }() out := Unwind(in, func(input interface{}, output chan<- interface{}) { output <- strings.ToLower(input.(string)) output <- strings.ToUpper(input.(string)) }) index := 0 for tok := range out { if index%2 == 0 { if strings.ToLower(tok.(string)) != tok.(string) { t.Error(tok, " should be lower cased") } } else { if strings.ToUpper(tok.(string)) != tok.(string) { t.Error(tok, " should be upper cased") } } index++ } }
func main() { s := new(scanner.Scanner) s.Init(os.Stdin) for { spew.Printf("> ") x := s.Scan() spew.Println(x, s.TokenText()) } }
// generate cuda wrapper for file. func cuda2go(fname string) { // open cuda file f, err := os.Open(fname) util.PanicErr(err) defer f.Close() // read tokens var token []string var s scanner.Scanner s.Init(f) tok := s.Scan() for tok != scanner.EOF { if !filter(s.TokenText()) { token = append(token, s.TokenText()) } tok = s.Scan() } // find function name and arguments funcname := "" argstart, argstop := -1, -1 for i := 0; i < len(token); i++ { if token[i] == "__global__" { funcname = token[i+2] argstart = i + 4 } if argstart > 0 && token[i] == ")" { argstop = i + 1 break } } argl := token[argstart:argstop] // isolate individual arguments var args [][]string start := 0 for i, a := range argl { if a == "," || a == ")" { args = append(args, argl[start:i]) start = i + 1 } } // separate arg names/types and make pointers Go-style argn := make([]string, len(args)) argt := make([]string, len(args)) for i := range args { if args[i][1] == "*" { args[i] = []string{args[i][0] + "*", args[i][2]} } argt[i] = typemap(args[i][0]) argn[i] = args[i][1] } wrapgen(fname, funcname, argt, argn) }
func main() { values := url.Values{} //リクエストパラメータ用のkey-valueを追加 values.Add("city", "200010") fmt.Println("パラメータ:\n", values.Encode()) /*** //時間制限ありGET client := &http.Client{Timeout: time.Duration(10 * time.Second)} get(client, values) ****/ //普通のGET get(values) //普通のPOST post(values) fmt.Println("GETリクエスト:\n", responseGet) fmt.Println("POSTリクエスト:\n", responsePost) //受け取るJSONファイルの解析 dec := json.NewDecoder(bytes.NewBufferString(responseGet)) var d data dec.Decode(&d) b, err := json.Marshal(d) if err != nil { fmt.Errorf("%s", err) } jsonData := string(b) fmt.Println("【date: 日付, title: 天気, url: URL】") fmt.Println(jsonData) //字句解析 lexArray := make([]string, 1) lexical := strings.NewReader(jsonData) var scan scanner.Scanner scan.Init(lexical) for { x := scan.Scan() //fmt.Println(x, scan.TokenText()) if x == -1 { break } else if x == -6 { lexArray = append(lexArray, scan.TokenText()) } } fmt.Println("明日は", lexArray[10], "です. 天気は", lexArray[13], "です. URLは", lexArray[15], "です.") }
// klexer scans the sourcefile and returns tokens on the channel c. func klexer(s scanner.Scanner, c chan lex) { var l lex str := "" // Hold the current read text commt := false key := true tok := s.Scan() defer close(c) for tok != scanner.EOF { l.column = s.Position.Column l.line = s.Position.Line switch x := s.TokenText(); x { case ":": if commt { break } l.token = str if key { l.value = _KEY c <- l // Next token is a space, eat it s.Scan() key = false str = "" } else { l.value = _VALUE } case ";": commt = true case "\n": if commt { // Reset a comment commt = false } l.value = _VALUE l.token = str c <- l str = "" commt = false key = true default: if commt { break } str += x } tok = s.Scan() } if len(str) > 0 { // Send remainder l.token = str l.value = _VALUE c <- l } }
func (this delimiterSpan) scanString(s *scanner.Scanner) (string, error) { tok := s.Scan() if tok != scanner.EOF && tok != -2 && tok != 45 && tok != -3 { return s.TokenText(), nil } else { if debugLayoutDef { fmt.Printf("expected tok:=!(-2,-3,45), received:%d ('%s')\n", tok, s.TokenText()) } } return "", NewInvalidTypeError() }
func cc_Calc(r io.Reader) int { cc := 1 // default var s scanner.Scanner s.Init(r) for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() { if tok == scanner.Ident && isBranch(s.TokenText()) { cc++ } } return cc }
func scanKey(s *scanner.Scanner) (key string, err error) { t := s.Scan() switch t { case scanner.Ident, scanner.Int, scanner.Float: key = s.TokenText() case scanner.String: key = strings.Trim(s.TokenText(), "\"") default: err = fmt.Errorf("Unexpected token at %s. Expected ident, number or string, had %s", s.Pos(), scanner.TokenString(t)) } return }
func main() { var s scanner.Scanner s.Init(os.Stdin) for { switch s.Scan() { case scanner.EOF: return // all done case scanner.Ident: fmt.Println(s.TokenText()) } } }
func Map(fileName string, intermediate chan Partial) { file, err := os.Open(fileName) if err == nil { var s scanner.Scanner s.Init(file) tok := s.Scan() for tok != scanner.EOF { intermediate <- Partial{s.TokenText(), fileName} tok = s.Scan() } } intermediate <- Partial{"", ""} }
func StringToMap(value string) (map[string]string, error) { var tokenizer scanner.Scanner tokenizer.Init(strings.NewReader(value)) tokenizer.Error = func(*scanner.Scanner, string) {} result := make(map[string]string) next := func() string { tokenizer.Scan() return tokenizer.TokenText() } var lvalue, rvalue, expression string for { lvalue = next() if lvalue == "" { return result, errors.New(fmt.Sprintf("Expected key at pos '%d' but found none; "+ "map values should be 'key=value' separated by commas", tokenizer.Pos().Offset)) } if lvalue == "{" { // Assume this is JSON format and attempt to un-marshal return JSONToMap(value) } expression = next() if expression != "=" { return result, errors.New(fmt.Sprintf("Expected '=' after '%s' but found '%s'; "+ "map values should be 'key=value' separated by commas", lvalue, expression)) } rvalue = next() if rvalue == "" { return result, errors.New(fmt.Sprintf("Expected value after '%s' but found none; "+ "map values should be 'key=value' separated by commas", expression)) } // TODO: Handle quoted strings and escaped double quotes result[lvalue] = rvalue // Are there anymore tokens? delimiter := next() if delimiter == "" { break } // Should be a comma next if delimiter != "," { return result, errors.New(fmt.Sprintf("Expected ',' after '%s' but found '%s'; "+ "map values should be 'key=value' separated by commas", rvalue, delimiter)) } } return result, nil }
func readString(s *scanner.Scanner, tok *rune, err *error) string { val := "" *tok = s.Scan() var str string for *tok == scanner.String { str, *err = strconv.Unquote(s.TokenText()) if *err != nil { break } val += str *tok = s.Scan() } return val }
// collect the next characters in s that are numbers // assumes at least TokenText is a number func Number(s *scanner.Scanner) string { var buffer bytes.Buffer for s.Scan() != scanner.EOF { buffer.WriteString(s.TokenText()) if _, ok := strconv.Atoi(string(s.Peek())); ok != nil { return buffer.String() } } // reached the end of file, but that's ok potentially? return buffer.String() }
func Print(s *scanner.Scanner, w io.Writer, p Printer) error { tok := s.Scan() for tok != scanner.EOF { tokText := s.TokenText() err := p.Print(w, tokenKind(tok, tokText), tokText) if err != nil { return err } tok = s.Scan() } return nil }
func ReadNotes(src io.Reader) (ret []interface{}, err error) { var s scanner.Scanner s.Init(src) isGroup := false curSign := 0 var lastGroup []Note var tok rune for tok != scanner.EOF { tok = s.Scan() v := s.TokenText() fmt.Println(v) switch v { case "(": isGroup = true continue case ")": isGroup = false continue case "+": curSing = 1 continue case "-": curSign = -1 continue } if !isGroup { note, err := Parse5(v, curSign) if err != nil { return ret, err } lastGroup = make([]Note, 1) lastGroup[0] = note } } return }
// Parse parses a SPARQL query from the reader func (sparql *SPARQLUpdate) Parse(src io.Reader) error { b, _ := ioutil.ReadAll(src) s := new(scanner.Scanner).Init(bytes.NewReader(b)) s.Mode = scanner.ScanIdents | scanner.ScanStrings start := 0 level := 0 verb := "" tok := s.Scan() for tok != scanner.EOF { switch tok { case -2: if level == 0 { if len(verb) > 0 { verb += " " } verb += s.TokenText() } case 123: // { if level == 0 { start = s.Position.Offset } level++ case 125: // } level-- if level == 0 { query := SPARQLUpdateQuery{ body: string(b[start+1 : s.Position.Offset]), graph: NewGraph(sparql.baseURI), verb: verb, } query.graph.Parse(strings.NewReader(query.body), "text/turtle") sparql.queries = append(sparql.queries, query) } case 59: // ; if level == 0 { verb = "" } } tok = s.Scan() } return nil }
func TestSortScanner(t *testing.T) { in := make(chan interface{}) go func() { reader := strings.NewReader(sortInput) s := scanner.Scanner{} s.Init(reader) tok := s.Scan() for tok != scanner.EOF { tok = s.Scan() in <- s.TokenText() } close(in) }() Sort(in, func(a interface{}, b interface{}) bool { return a.(string) < b.(string) }) // No valid way to assert the result }
// Text satifies the Extractor interface // //ReplaceGo is a specialized routine for correcting Golang source // files. Currently only checks comments, not identifiers for // spelling. // // Other items: // - check strings, but need to ignore // * import "statements" blocks // * import ( "blocks" ) // - skip first comment (line 0) if build comment // func (p *GolangText) Text(raw []byte) []byte { out := bytes.Buffer{} s := scanner.Scanner{} s.Init(bytes.NewReader(raw)) s.Error = (func(s *scanner.Scanner, msg string) {}) s.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanChars | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments for { switch s.Scan() { case scanner.Comment: out.WriteString(s.TokenText()) out.WriteByte('\n') case scanner.EOF: return out.Bytes() } } }