func ProcessFileList(pt *test01.Parse2Type, inList []string, outFn string) (err error) { var fp *os.File if outFn != "" { fp, err = com.Fopen(outFn, "w") if err != nil { fmt.Fprintf(os.Stderr, "Fatal: Unable to create output file\n") err = fmt.Errorf("Fatal: Unable to create output file") return } defer fp.Close() } else { fp = os.Stdout } for _, fn := range inList { if !com.Exists(fn) { fmt.Fprintf(os.Stderr, "Fatal: Missing input file %s\n", fn) err = fmt.Errorf("Fatal: Missing input file %s", fn) return } } go func() { r := pbread.NewPbRead() for _, fn := range inList { r.OpenFile(fn) } pt.Lex.MatcherLexieTable(r, "S_Init") }() // ------------------------------------------------------ process tokens -------------------------------------------------------------- // Generate a parse tree and print out. xpt := pt.GenParseTree(0) pt.TheTree = xpt xpt.DumpMtType(test01.Dbf, 0, 0) pt.ExecuteFunctions(0) if false { fmt.Fprintf(test01.Dbf, "----------------------------------- debug output ----------------------------------------------------\n") fmt.Fprintf(test01.Dbf, "%s\n", com.SVarI(xpt)) } fmt.Fprintf(test01.Dbf, "----------------------------------- errors ----------------------------------------------------\n") pp := pt.CollectErrorNodes(0) for ii, vv := range pp { fmt.Fprintf(test01.Dbf, "Error [%3d]: msg=%s\n", ii, vv.ErrorMsg) } fmt.Fprintf(test01.Dbf, "----------------------------------- final template results ----------------------------------------------------\n") pt.OutputTree(test01.Dbf, 0) pt.OutputTree(fp, 0) return }
// --------------------------------------------------------------------------------------------------------------------------------------- func (pt *Parse2Type) ReadFileAsTemplate(fn string) (mtv *mt.MtType) { pt.LibraryMode = true pt.TemplateName = fn go func() { r := pbread.NewPbRead() r.OpenFile(fn) pt.Lex.MatcherLexieTable(r, "S_Init") }() pt.LibraryMode = false xpt := pt.GenParseTree(0) pt.DefineTemplate(fn, xpt) mtv = xpt return }
// --------------------------------------------------------------------------------------------------------------------------------------- func (pt *Parse2Type) ReadFileAndRun(fn, fn_o string) { go func() { r := pbread.NewPbRead() r.OpenFile(fn) pt.Lex.MatcherLexieTable(r, "S_Init") }() xpt := pt.GenParseTree(0) pt.TheTree = xpt pt.ExecuteFunctions(0) fmt.Printf("Tree Dump = %s\n", com.SVarI(xpt)) fp_o, err := com.Fopen(fn_o, "w") if err != nil { fmt.Fprintf(os.Stderr, "Error: %s\n", err) } else { pt.OutputTree(fp_o, 0) } return }
func main() { var fp *os.File // ------------------------------------------------------ cli processing -------------------------------------------------------------- ifnList, err := flags.ParseArgs(&opts, os.Args) if err != nil { fmt.Printf("Invalid Command Line: %s\n", err) os.Exit(1) } if opts.Debug != "" { s := strings.Split(opts.Debug, ",") com.DbOnFlags[opts.Debug] = true for _, v := range s { com.DbOnFlags[v] = true } } if opts.Echo != "" { com.DbOnFlags["in-echo-machine"] = true // Output machine } fmt.Fprintf(os.Stderr, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF()) // ------------------------------------------------------ setup Lexie -------------------------------------------------------------- pt := NewParse2Type() pt.Lex = dfa.NewLexie() pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel // ------------------------------------------------------ input machine -------------------------------------------------------------- if opts.LexPat != "" { pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex") } else if opts.ReadMachine != "" { fmt.Printf("Should input machine at this point\n") // xyzzy } else { fmt.Printf("Fatal: Must have -l <fn> or -r <fn>, neither supplied.\n") os.Exit(1) } if opts.Machine != "" { fmt.Printf("Should output machine at this point\n") // xyzzy } // -------------------------------------------------- start scanning process ---------------------------------------------------------- if opts.Tokens != "" { fp, _ = com.Fopen(opts.Tokens, "w") } else { fp = os.Stdout } if opts.Input != "" { go func() { r := pbread.NewPbRead() r.OpenFile(opts.Input) pt.Lex.MatcherLexieTable(r, "S_Init") }() } else { go func() { r := pbread.NewPbRead() for _, fn := range ifnList[1:] { r.OpenFile(fn) } pt.Lex.MatcherLexieTable(r, "S_Init") }() } // ------------------------------------------------------ process tokens -------------------------------------------------------------- if false { // just print tokens out to check the scanning prcess and CLI options for msg := range pt.Lex.Message { fmt.Fprintf(fp, "%+v\n", msg) } } else { // Generate a parse tree and print out. xpt := pt.GenParseTree(0) pt.TheTree = xpt xpt.DumpMtType(fp, 0, 0) fmt.Printf("----------------------------------- start execute ----------------------------------------------------\n") pt.ExecuteFunctions(0) fmt.Printf("----------------------------------- debug output ----------------------------------------------------\n") if true { fmt.Printf("%s\n", com.SVarI(xpt)) } fmt.Printf("----------------------------------- output ----------------------------------------------------\n") for i := 0; i < 1000000; i++ { pt.OutputTree0(fp, 0) } fmt.Printf("----------------------------------- errors ----------------------------------------------------\n") pp := pt.CollectErrorNodes(0) for ii, vv := range pp { fmt.Printf("Error [%3d]: msg=%s\n", ii, vv.ErrorMsg) } fmt.Printf("----------------------------------- final template results ----------------------------------------------------\n") pt.OutputTree(fp, 0) } if opts.Tokens != "" { fp.Close() } }
func (s *Reader_TestSuite) TestLexie(c *C) { fmt.Fprintf(os.Stderr, "Test Matcher test from ../in/django3.lex file, %s\n", com.LF()) com.DbOnFlags["db_DumpDFAPool"] = true com.DbOnFlags["db_DumpPool"] = true com.DbOnFlags["db_Matcher_02"] = true // com.DbOnFlags["db_NFA_LnNo"] = true com.DbOnFlags["match"] = true // com.DbOnFlags["nfa3"] = true com.DbOnFlags["output-machine"] = true com.DbOnFlags["match"] = true com.DbOnFlags["match_x"] = true // com.DbOnFlags["nfa3"] = true // com.DbOnFlags["nfa4"] = true // com.DbOnFlags["db_DFAGen"] = true // com.DbOnFlags["pbbuf02"] = true // com.DbOnFlags["DumpParseNodes2"] = true com.DbOnFlags["db_FlushTokenBeforeBefore"] = true com.DbOnFlags["db_FlushTokenBeforeAfter"] = true com.DbOnFlags["db_tok01"] = true com.DbOnFlags["in-echo-machine"] = true // Output machine lex := NewLexie() lex.NewReadFile("../in/django3.lex") for ii, vv := range Lexie02Data { if !vv.SkipTest { fmt.Printf("\n\nTest:%s ------------------------- Start --------------------------, %d, Input: -->>%s<<--\n", vv.Test, ii, vv.Inp) // r := strings.NewReader(vv.Inp) r := pbread.NewPbRead() r.PbString(vv.Inp) r.SetPos(1, 1, fmt.Sprintf("sf-%d.txt", ii)) // simulate file = sf- fmt.Printf("At: %s\n", com.LF()) lex.MatcherLexieTable(r, "S_Init") fmt.Printf("At: %s\n", com.LF()) if len(vv.Result) > 0 { fmt.Printf("At: %s\n", com.LF()) if len(lex.TokList.TokenData) != len(vv.Result) { fmt.Printf("Lengths did not match, %s", com.SVarI(lex.TokList.TokenData)) c.Check(len(lex.TokList.TokenData), Equals, len(vv.Result)) } else { for i := 0; i < len(vv.Result); i++ { if vv.Result[i].StrTokNo != "" { c.Check(vv.Result[i].StrTokNo, Equals, in.Lookup_Tok_Name(int(lex.TokList.TokenData[i].TokNo))) } else { c.Check(vv.Result[i].TokNo, Equals, int(lex.TokList.TokenData[i].TokNo)) } c.Check(vv.Result[i].Match, Equals, lex.TokList.TokenData[i].Match) if vv.Result[i].LineNo > 0 { c.Check(vv.Result[i].LineNo, Equals, lex.TokList.TokenData[i].LineNo) } if vv.Result[i].ColNo > 0 { c.Check(vv.Result[i].ColNo, Equals, lex.TokList.TokenData[i].ColNo) } if vv.Result[i].FileName != "" { c.Check(vv.Result[i].FileName, Equals, lex.TokList.TokenData[i].FileName) } } } } fmt.Printf("At: %s\n", com.LF()) fmt.Printf("Test:%s ------------------------- End --------------------------\n\n", vv.Test) } } }