func ProcessFileList(pt *test01.Parse2Type, inList []string, outFn string) (err error) { var fp *os.File if outFn != "" { fp, err = com.Fopen(outFn, "w") if err != nil { fmt.Fprintf(os.Stderr, "Fatal: Unable to create output file\n") err = fmt.Errorf("Fatal: Unable to create output file") return } defer fp.Close() } else { fp = os.Stdout } for _, fn := range inList { if !com.Exists(fn) { fmt.Fprintf(os.Stderr, "Fatal: Missing input file %s\n", fn) err = fmt.Errorf("Fatal: Missing input file %s", fn) return } } go func() { r := pbread.NewPbRead() for _, fn := range inList { r.OpenFile(fn) } pt.Lex.MatcherLexieTable(r, "S_Init") }() // ------------------------------------------------------ process tokens -------------------------------------------------------------- // Generate a parse tree and print out. xpt := pt.GenParseTree(0) pt.TheTree = xpt xpt.DumpMtType(test01.Dbf, 0, 0) pt.ExecuteFunctions(0) if false { fmt.Fprintf(test01.Dbf, "----------------------------------- debug output ----------------------------------------------------\n") fmt.Fprintf(test01.Dbf, "%s\n", com.SVarI(xpt)) } fmt.Fprintf(test01.Dbf, "----------------------------------- errors ----------------------------------------------------\n") pp := pt.CollectErrorNodes(0) for ii, vv := range pp { fmt.Fprintf(test01.Dbf, "Error [%3d]: msg=%s\n", ii, vv.ErrorMsg) } fmt.Fprintf(test01.Dbf, "----------------------------------- final template results ----------------------------------------------------\n") pt.OutputTree(test01.Dbf, 0) pt.OutputTree(fp, 0) return }
// --------------------------------------------------------------------------------------------------------------------------------------- func (pt *Parse2Type) ReadFileAndRun(fn, fn_o string) { go func() { r := pbread.NewPbRead() r.OpenFile(fn) pt.Lex.MatcherLexieTable(r, "S_Init") }() xpt := pt.GenParseTree(0) pt.TheTree = xpt pt.ExecuteFunctions(0) fmt.Printf("Tree Dump = %s\n", com.SVarI(xpt)) fp_o, err := com.Fopen(fn_o, "w") if err != nil { fmt.Fprintf(os.Stderr, "Error: %s\n", err) } else { pt.OutputTree(fp_o, 0) } return }
func main() { // ------------------------------------------------------ cli processing -------------------------------------------------------------- ifnList, err := flags.ParseArgs(&opts, os.Args) if err != nil { fmt.Printf("Invalid Command Line: %s\n", err) os.Exit(1) } test01.Dbf = os.Stdout if opts.Debug != "" { s := strings.Split(opts.Debug, ";") com.DbOnFlags[opts.Debug] = true for _, v := range s { if len(v) > 5 && v[0:4] == "out:" { test01.Dbf, _ = com.Fopen(v[4:], "w") } else { com.DbOnFlags[v] = true } } } fmt.Fprintf(test01.Dbf, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF()) // ------------------------------------------------------ Options -------------------------------------------------------------- // should be read in from a .json file! Options.MdExtensions = []string{".md", ".makrdown"} Options.ConvertMdToHtml = true Options.LeaveTmpFiles = false Options.TmpDir = "./tmp" if !com.Exists(Options.TmpDir) { os.Mkdir(Options.TmpDir, 0700) } // ------------------------------------------------------ setup Lexie -------------------------------------------------------------- pt := test01.NewParse2Type() pt.Lex = dfa.NewLexie() pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel // ------------------------------------------------------ input machine -------------------------------------------------------------- if opts.LexPat != "" { if !com.Exists(opts.LexPat) { fmt.Fprintf(os.Stderr, "Fatal: Must have -l <fn> lexical analyzer machine. Missing file.\n") os.Exit(1) } pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex") } else { fmt.Fprintf(os.Stderr, "Fatal: Must have -l <fn> lexical analyzer machine.\n") os.Exit(1) } // -------------------------------------------------- start scanning process ---------------------------------------------------------- tp := strings.Split(opts.TemplatePath, ";") for _, tps := range tp { pt.OpenLibraries(tps) } if opts.Recursive { CopyInAssets(opts.SiteName, opts.BaseAssets, opts.SiteAssets, opts.Output, opts.User, opts.Theme, opts.ForcedCpFlag) //fmt.Printf("After CopyInAssets: Not Implemented Yet\n") //os.Exit(1) data2 := make(map[string]string) data2["site_name"] = opts.SiteName if opts.Input == "" && opts.SiteName != "" { opts.Input = com.Qt("./site/%{site_name%}/", data2) } if opts.Output == "" && opts.SiteName != "" { opts.Output = com.Qt("./www/%{site_name%}/", data2) } // --------------------------------------------------------------------------------------------------------------------------------- // 1. Do the rsync copy ops // 2. Process the set of fiels from -i -> -o // -- Process the static files ----------------------------------------------------------------------------------------------------- dp := make([]string, 0, 10) if opts.Input != "" { dp = append(dp, opts.Input) } else { for _, fn := range ifnList[1:] { dp = append(dp, fn) } } var fns, dirs []string for _, dir := range dp { if db_debug3 { fmt.Printf("Getting for %s\n", dir) } t_fns, t_dirs, err := com.GetFilenamesRecrusive(dir) if err != nil { if db_debug3 { fmt.Printf("Error: %s on %s\n", err, dir) } } else { fns = append(fns, t_fns...) dirs = append(dirs, t_dirs...) } } if db_debug3 { fmt.Printf("fns: %+v\n", fns) fmt.Printf("dirs: %+v\n", dirs) } mds := com.ReplaceEach(dirs, opts.Input, opts.Output) for _, aDir := range mds { if !com.Exists(aDir) { err := os.Mkdir(aDir, 0764) if err != nil { if db_debug3 { fmt.Printf("Error: Unable to create directory %s, error: %s\n", aDir, err) } } } } mf := com.ReplaceEach(fns, opts.Input, opts.Input+"/%{user%}/%{theme%}/") mO := com.ReplaceEach(fns, opts.Input, opts.Output) if db_debug3 { fmt.Printf("modded_files: %+v\n", mf) } final := make([]string, 0, len(mf)) data := make(map[string]string) has_err := false for _, mff := range mf { data["user"] = opts.User data["theme"] = opts.Theme mfmod := com.Qt(mff, data) if com.Exists(mfmod) { final = append(final, mfmod) } else { data["user"] = "" // data["theme"] = "A-Theme" mfmod := com.Qt(mff, data) if com.Exists(mfmod) { final = append(final, mfmod) } else { data["user"] = opts.User // data["user"] = "" data["theme"] = "" mfmod := com.Qt(mff, data) if com.Exists(mfmod) { final = append(final, mfmod) } else { fmt.Printf("Error: File Missing %s\n", mfmod) has_err = true } } } } if has_err { fmt.Printf("Error occured...\n") os.Exit(1) } if db_debug3 { fmt.Printf("Final Files:%s\n", final) } tmpFiles := make([]string, 0, len(final)) for ii, yy := range final { yyt := yy fmt.Printf("Process %s to %s\n", yy, mO[ii]) ext := filepath.Ext(yy) if Options.ConvertMdToHtml && com.InArray(ext, Options.MdExtensions) { // if ext == ".md" || ext == ".markdown" { fmt.Printf("\t Convetting from MD to HTML\n") in := yy //yyt = "./tmp/" + com.Basename(yy) + ".html" // old code - not using a Tempfile //err := ConvertMdToHtmlFile(in, yyt) yyt, err = ConvertMdToHtmlFileTmpFile(in) if err != nil { fmt.Printf("Error: In processing from markdown %s to HTML %s: %s\n", in, yyt, err) os.Exit(1) } mO[ii] = com.RmExt(mO[ii]) + ".html" tmpFiles = append(tmpFiles, yyt) } ProcessFileList(pt, []string{yyt}, mO[ii]) } if !Options.LeaveTmpFiles { for _, fn := range tmpFiles { os.Remove(fn) } } } else { inList := make([]string, 0, 10) if opts.Input != "" { inList = append(inList, opts.Input) } else { inList = ifnList[1:] } ProcessFileList(pt, inList, opts.Output) } }
func main() { var fp *os.File // ------------------------------------------------------ cli processing -------------------------------------------------------------- ifnList, err := flags.ParseArgs(&opts, os.Args) if err != nil { fmt.Printf("Invalid Command Line: %s\n", err) os.Exit(1) } if opts.Debug != "" { s := strings.Split(opts.Debug, ",") com.DbOnFlags[opts.Debug] = true for _, v := range s { com.DbOnFlags[v] = true } } if opts.Echo != "" { com.DbOnFlags["in-echo-machine"] = true // Output machine } fmt.Fprintf(os.Stderr, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF()) // ------------------------------------------------------ setup Lexie -------------------------------------------------------------- pt := NewParse2Type() pt.Lex = dfa.NewLexie() pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel // ------------------------------------------------------ input machine -------------------------------------------------------------- if opts.LexPat != "" { pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex") } else if opts.ReadMachine != "" { fmt.Printf("Should input machine at this point\n") // xyzzy } else { fmt.Printf("Fatal: Must have -l <fn> or -r <fn>, neither supplied.\n") os.Exit(1) } if opts.Machine != "" { fmt.Printf("Should output machine at this point\n") // xyzzy } // -------------------------------------------------- start scanning process ---------------------------------------------------------- if opts.Tokens != "" { fp, _ = com.Fopen(opts.Tokens, "w") } else { fp = os.Stdout } if opts.Input != "" { go func() { r := pbread.NewPbRead() r.OpenFile(opts.Input) pt.Lex.MatcherLexieTable(r, "S_Init") }() } else { go func() { r := pbread.NewPbRead() for _, fn := range ifnList[1:] { r.OpenFile(fn) } pt.Lex.MatcherLexieTable(r, "S_Init") }() } // ------------------------------------------------------ process tokens -------------------------------------------------------------- if false { // just print tokens out to check the scanning prcess and CLI options for msg := range pt.Lex.Message { fmt.Fprintf(fp, "%+v\n", msg) } } else { // Generate a parse tree and print out. xpt := pt.GenParseTree(0) pt.TheTree = xpt xpt.DumpMtType(fp, 0, 0) fmt.Printf("----------------------------------- start execute ----------------------------------------------------\n") pt.ExecuteFunctions(0) fmt.Printf("----------------------------------- debug output ----------------------------------------------------\n") if true { fmt.Printf("%s\n", com.SVarI(xpt)) } fmt.Printf("----------------------------------- output ----------------------------------------------------\n") for i := 0; i < 1000000; i++ { pt.OutputTree0(fp, 0) } fmt.Printf("----------------------------------- errors ----------------------------------------------------\n") pp := pt.CollectErrorNodes(0) for ii, vv := range pp { fmt.Printf("Error [%3d]: msg=%s\n", ii, vv.ErrorMsg) } fmt.Printf("----------------------------------- final template results ----------------------------------------------------\n") pt.OutputTree(fp, 0) } if opts.Tokens != "" { fp.Close() } }
func Test_PbBufer01(t *testing.T) { SymbolTable := make([]*MacroDefTestType, 0, 100) Define := func(name rune, body string) { for ii := 0; ii < len(SymbolTable); ii++ { // fmt.Printf("Search at %d, for %s\n", ii, string(name)) if SymbolTable[ii] != nil && SymbolTable[ii].Rn == name { SymbolTable[ii].Body = body return } } // fmt.Printf("Append\n") SymbolTable = append(SymbolTable, &MacroDefTestType{Rn: name, Body: body}) } ResetST := func() { // SymbolTable = make([]*MacroDefTestType, 0, 100) SymbolTable = SymbolTable[:1] } HaveMacro := func(name rune) (body string, found bool) { body = "" found = false for ii := 0; ii < len(SymbolTable); ii++ { if SymbolTable[ii] != nil && SymbolTable[ii].Rn == name { body, found = SymbolTable[ii].Body, true return } } return } for ii, vv := range Pb01Test { if !vv.SkipTest { // Implement a quick - fetch execute macine to test - the PbBuffer - commands/opcodes are the Cmd* constants above. ss := "" pb := NewPbRead() ResetST() for pc, ww := range vv.Actions { switch ww.OpCode { case CmdOpenFile: // Open a file , at the tail end of list of input pb.OpenFile(ww.Fn) com.DbPrintf("testCode", "Open file %s At: %s\n", ww.Fn, com.LF()) if com.DbOn("testDump") { pb.Dump01(os.Stdout) } case CmdPbString: // Push back a string pb.PbString(ww.Data) if com.DbOn("testDump") { pb.Dump01(os.Stdout) } case CmdPbRune: // Push back a rune pb.PbRune(ww.Rn) if com.DbOn("testDump") { pb.Dump01(os.Stdout) } case CmdPbRuneArray: // Push back a rune array pb.PbRuneArray(ww.RnS) if com.DbOn("testDump") { pb.Dump01(os.Stdout) } case CmdNextNChar: for ll := 0; ll < ww.X; ll++ { rn, done := pb.NextRune() if !done { ss = ss + string(rn) } com.DbPrintf("testCode", "Case 5: At: ->%s<- ll=%d ss >>>%s<<< %s\n", string(rn), ll, ss, com.LF()) } if com.DbOn("testDump") { pb.Dump01(os.Stdout) } case CmdPeek: rn, done := pb.PeekRune() if done || rn != ww.Rn { t.Errorf("%04s: Peek at [pc=%d] in test [%s] did not work, got %s expected %s, done=%v\n", pc, ii, string(rn), string(ww.Rn), done) } case CmdOutputToEof: com.DbPrintf("testCode", "All Done: ss >>>%s<<< before At: %s\n", ss, com.LF()) if com.DbOn("testDump") { pb.Dump01(os.Stdout) } for rn, done := pb.NextRune(); !done; rn, done = pb.NextRune() { ss = ss + string(rn) } com.DbPrintf("testCode", "All Done: ss >>>%s<<< after At: %s\n", ss, com.LF()) case CmdPbByteArray: // Push back a byte array pb.PbByteArray([]byte(ww.Data)) if com.DbOn("testDump") { pb.Dump01(os.Stdout) } case CmdPbFile: // Open file and push contents back onto input at head of list. (Macro file, Include, Require) pb.PbFile(ww.Fn) com.DbPrintf("testCode", "Pb file %s At: %s\n", ww.Fn, com.LF()) if com.DbOn("testDump") { pb.Dump01(os.Stdout) } case CmdFileSeen: fs := pb.FileSeen(ww.Fn) if fs != ww.FileSeenFlag { t.Errorf("%04s: Peek at [pc=%d] in test [%s] did not work, got %v expected %s for file seen flagv\n", pc, ii, fs, ww.FileSeenFlag) } case CmdGetPos: // Check get file name ln, cn, fn := pb.GetPos() com.DbPrintf("testCode", "fn=%s ln=%d cn=%d\n", fn, ln, cn) if ln != ww.LineNo { t.Errorf("%04s: %d: did not match line no Expected ->%d<-, Got ->%d<-\n", vv.Test, pc, ww.LineNo, ln) } if cn != ww.ColNo { t.Errorf("%04s: %d: did not match col no Expected ->%d<-, Got ->%d<-\n", vv.Test, pc, ww.ColNo, cn) } if fn != ww.Fn { t.Errorf("%04s: %d: did not match file name Expected ->%s<-, Got ->%s<-\n", vv.Test, pc, ww.Fn, fn) } case CmdSetPos: // Check get file name pb.SetPos(ww.LineNo, ww.ColNo, ww.Fn) case CmdResetST: // Reset symbol table ResetST() case CmdMacroProc: // Apply 1 char macros to input and process for rn, done := pb.NextRune(); !done; rn, done = pb.NextRune() { if m_body, m_found := HaveMacro(rn); m_found { pb.PbString(m_body) } else { ss = ss + string(rn) } } case CmdDefineMacro: // Define Define(ww.Rn, ww.Data) case CmdDumpBuffer: // Dump the buffer - debuging if ww.Fn == "" { pb.Dump01(os.Stdout) } else { fp, err := com.Fopen(ww.Fn, "w") if err == nil { pb.Dump01(fp) fp.Close() } else { pb.Dump01(os.Stdout) t.Errorf("%04s: Unable to open file for output ->%s<-, error: %s\n", vv.Test, ww.Fn, err) } } case CmdResetOutput: // Reset output ss = "" case CmdPushBackXCopies: // Special test to pub back more than buffer of 'x' x := strings.Repeat(ww.Data, ww.X) pb.PbString(x) } } if ss != vv.Results { t.Errorf("%04s: did not match Expected ->%s<-, Got ->%s<-\n", vv.Test, vv.Results, ss) } else { com.DbPrintf("testCode", "%04s: Passed ------------------------------------------------------------------------------------------------\n\n", vv.Test) } } } }
func main() { var fp *os.File ifnList, err := flags.ParseArgs(&opts, os.Args) if err != nil { fmt.Printf("Invalid Command Line: %s\n", err) os.Exit(1) } if opts.Debug != "" { s := strings.Split(opts.Debug, ",") com.DbOnFlags[opts.Debug] = true for _, v := range s { com.DbOnFlags[v] = true } } if opts.Echo != "" { com.DbOnFlags["in-echo-machine"] = true // Output machine } fmt.Fprintf(os.Stderr, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF()) lex := dfa.NewLexie() if opts.LexPat != "" { lex.NewReadFile(opts.LexPat) // lex.NewReadFile("../in/django3.lex") } else if opts.ReadMachine != "" { fmt.Printf("Should input machine at this point\n") // xyzzy } else { fmt.Printf("Fatal: Must have -l <fn> or -r <fn>, neither supplied.\n") os.Exit(1) } if opts.Machine != "" { fmt.Printf("Should output machine at this point\n") // xyzzy } if opts.Tokens != "" { fp, _ = com.Fopen(opts.Tokens, "w") } else { fp = os.Stdout } if opts.Input != "" { s := in.ReadFileIntoString(opts.Input) r := strings.NewReader(s) lex.MatcherLexieTable(r, "S_Init") lex.TokList.TokenData = CategorizeToken(lex.TokList.TokenData) lex.DumpTokenBuffer(fp) } else { for ii, fn := range ifnList[1:] { s := in.ReadFileIntoString(fn) r := strings.NewReader(s) lex.MatcherLexieTable(r, "S_Init") lex.TokList.TokenData = CategorizeToken(lex.TokList.TokenData) fmt.Fprintf(fp, "%d: %s -----Start---------------------------------------------------------------------------------\n", ii, fn) lex.DumpTokenBuffer2(fp) fmt.Fprintf(fp, "%d: %s -----End-----------------------------------------------------------------------------------\n\n\n", ii, fn) } } if opts.Tokens != "" { fp.Close() } }
func Test_St01(t *testing.T) { SymbolTable := NewSymbolTable() for ii, vv := range Pb01Test { _ = ii if !vv.SkipTest { // Implement a quick - fetch execute macine to test - the SymbolTable for pc, ww := range vv.Actions { switch ww.OpCode { case CmdInsert: SymbolTable.DefineSymbol(ww.Item, ww.Data, []string{}) case CmdDefRW: SymbolTable.DefineReservedWord(ww.Item, ww.No) case CmdLookup: as, err := SymbolTable.LookupSymbol(ww.Item) if err == nil { if db_test01 { fmt.Printf("%s: found, value %s\n", ww.Item, as.Body) } if ww.Data == "" { t.Errorf("%04s: %d error, expected to have symbol in table, missing, %s\n", vv.Test, pc, ww.Item) } if as.Body != ww.Data { t.Errorf("%04s: %d error, expected value %s got %s invalid for, %s\n", vv.Test, pc, ww.Data, as.Body, ww.Item) } } else { if ww.Data != "" { t.Errorf("%04s: %d error, expected to NOT have symbol, found it, %s\n", vv.Test, pc, ww.Item) } if db_test01 { fmt.Printf("%s: not found.\n", ww.Item) } } case CmdLookupRW: as, err := SymbolTable.LookupSymbol(ww.Item) if err == nil { if db_test01 { fmt.Printf("%s: found, value %s\n", ww.Item, as.Body) } if as.FxId != ww.No { t.Errorf("%04s: %d error, expected value %d got %d invalid for, %s\n", vv.Test, pc, ww.No, as.FxId, ww.Item) } } else { if db_test01 { fmt.Printf("%s: not found.\n", ww.Item) } } case CmdDelete: SymbolTable.UnDefineSymbol(ww.Item) case CmdDump: if ww.Data == "" { SymbolTable.Dump01(os.Stdout) } else { fp, err := com.Fopen(ww.Data, "w") if err == nil { SymbolTable.Dump01(fp) fp.Close() } else { SymbolTable.Dump01(os.Stdout) t.Errorf("%04s: Unable to open file for output ->%s<-, error: %s\n", vv.Test, ww.Data, err) } } } } } } }
func (lex *Lexie) NewReadFile(path string) { lex.Im = in.ImReadFile(path) lex.NFA_Machine = make([]*nfa.NFA_PoolType, 0, 100) lex.DFA_Machine = make([]*DFA_PoolType, 0, 100) // vv=in.ImDefinedValueType {Seq:1 WhoAmI:ReservedWords NameValueStr:map[and:Tok_L_AND not:Tok_not as:Tok_as in:Tok_in bor:Tok_B_OR band:Tok_B_AND xor:Tok_XOR or:Tok_L_OR true:Tok_true false:Tok_false export:Tok_export] NameValue:map[and:4 true:32 as:34 bor:42 band:41 xor:64 or:5 false:33 not:31 export:35 in:28] Reverse:map[5:or 32:true 42:bor 31:not 41:band 35:export 33:false 28:in 64:xor 4:and 34:as] SeenAt:map[bor:{LineNo:[39] FileName:[unk-file]} band:{LineNo:[39] FileName:[unk-file]} and:{LineNo:[39] FileName:[unk-file]} true:{LineNo:[39] FileName:[unk-file]} export:{LineNo:[39] FileName:[unk-file]} in:{LineNo:[39] FileName:[unk-file]} as:{LineNo:[39] FileName:[unk-file]} or:{LineNo:[39] FileName:[unk-file]} false:{LineNo:[39] FileName:[unk-file]} not:{LineNo:[39 39] FileName:[unk-file unk-file]} xor:{LineNo:[39] FileName:[unk-file]}]}, File: /Users/corwin/Projects/pongo2/lexie/dfa/match.go LineNo:260 for ii, vv := range lex.Im.Def.DefsAre { // ["ReservedWords"] { // func (st *SymbolTable) DefineReservedWord(name string, fxid int) (ss *SymbolType) { _ = ii _ = vv com.DbPrintf("dfa5", "vv=%T %+v, %s\n", vv, vv, com.LF()) } for ii, vv := range lex.Im.Machine { nm := vv.Name Nfa := nfa.NewNFA_Pool() Cur := Nfa.GetNFA() Nfa.InitState = Cur for jj, ww := range vv.Rules { rVx := ww.Rv if ww.ReservedWord { com.DbPrintf("dfa5", "This rule rv=%d is a reserved word rule, AAbbCC\n", rVx) } ww_A := convRuleToActionFlag(ww) if ww.Repl { rVx = 9900 // 9900 is replace com.DbPrintf("match", "###################################### ww.Replace: ii=%d jj=%d ->%s<-, %s\n", ii, jj, ww.ReplString, com.LF()) } cur := -1 if ww.PatternType == 2 { com.DbPrintf("db_Matcher_02", "ADDING AT %2d RE: %-30s (Rv:%2d, final=%4d), %s\n", jj, "<M_EOF>", ww.Rv, rVx, com.LF()) cur = Nfa.AddReInfo(re.X_EOF, "", jj+1, rVx, nfa.InfoType{Action: ww_A, NextState: ww.Call}) } else { com.DbPrintf("db_Matcher_02", "M= ->%s<- Adding at %2d RE: %-30s (Rv:%2d, final=%4d), %s\n", ww.Pattern, jj, ww.Pattern, ww.Rv, rVx, com.LF()) cur = Nfa.AddReInfo(ww.Pattern, "", jj+1, rVx, nfa.InfoType{Action: ww_A, NextState: ww.Call, ReplStr: ww.ReplString}) } if ww.ReservedWord { Nfa.SetReservedWord(cur) } } com.DbPrintf("match", "BuildDFA_2: Nfa.Sigma Before Finialize->%s<-\n", Nfa.Sigma) if com.DbOn("db_Matcher_02") { com.DbPrintf("match", "NFA for (Before Finialize) ->%s<-\n", nm) Nfa.DumpPool(false) } Nfa.FinializeNFA() com.DbPrintf("match", "BuildDFA_2: Nfa.Sigma ->%s<-\n", Nfa.Sigma) if com.DbOn("db_Matcher_02") { com.DbPrintf("match", "Final NFA for ->%s<-\n", nm) Nfa.DumpPool(false) } lex.NFA_Machine = append(lex.NFA_Machine, Nfa) Dfa := NewDFA_Pool() Dfa.ConvNDA_to_DFA(Nfa) if com.DbOn("db_Matcher_02") { com.DbPrintf("match", "Final DFA for ->%s<-\n", nm) Dfa.DumpPool(false) } lex.DFA_Machine = append(lex.DFA_Machine, Dfa) if com.DbOn("db_Matcher_02") { last := len(lex.DFA_Machine) - 1 newFile := fmt.Sprintf("../ref/mmm_%s_%d.tst", "machine", last) gvFile := fmt.Sprintf("../ref/mmm_%s_%d.gv", "machine", last) svgFile := fmt.Sprintf("../ref/mmm_%s_%d.svg", "machine", last) fp, _ := com.Fopen(newFile, "w") lex.DFA_Machine[last].DumpPoolJSON(fp, fmt.Sprintf("Lex-Machine-%d", last), 1) fp.Close() gv, _ := com.Fopen(gvFile, "w") lex.DFA_Machine[last].GenerateGVFile(gv, fmt.Sprintf("Lex-Machine-%d", last), 1) gv.Close() out, err := exec.Command("/usr/local/bin/dot", "-Tsvg", "-o"+svgFile, gvFile).Output() if err != nil { com.DbPrintf("match", "%sError%s from dot, %s, %s\n", com.Red, com.Reset, err, com.LF()) com.DbPrintf("match", "Output: %s\n", out) } } } }
func (s *LexieTestSuite) TestLexie(c *C) { // return fmt.Fprintf(os.Stderr, "Test Parsing of REs, Test genration of NFAs %s\n", com.LF()) com.DbOnFlags["db_NFA"] = true com.DbOnFlags["db_NFA_LnNo"] = true com.DbOnFlags["db_DumpPool"] = true com.DbOnFlags["parseExpression"] = true com.DbOnFlags["CalcLength"] = true // Add a test for any issue c.Check(42, Equals, 42) // c.Assert("nope", Matches, "hel.*there") fmt.Printf("**** In Test Issues\n") //x := test7GenDFA() //c.Check(x, Equals, 0) n_err := 0 n_skip := 0 for ii, vv := range Lexie01Data { if !vv.SkipTest { fmt.Printf("\n\n--- %d Test: %s -----------------------------------------------------------------------------\n\n", ii, vv.Test) Pool := NewNFA_Pool() Cur := Pool.GetNFA() Pool.InitState = Cur Pool.AddReInfo(vv.Re, "", 1, vv.Rv, InfoType{}) Pool.Sigma = Pool.GenerateSigma() if false { com.DbPrintf("test7", "Pool=%s\n", com.SVarI(Pool)) } Pool.DumpPool(false) Pool.DumpPoolJSON(os.Stdout, vv.Re, vv.Rv) fmt.Printf("Sigma: ->%s<-\n", Pool.Sigma) newFile := fmt.Sprintf("../ref/nfa_%s.tst", vv.Test) cmpFile := fmt.Sprintf("../ref/nfa_%s.ref", vv.Test) gvFile := fmt.Sprintf("../ref/nfa_%s.gv", vv.Test) svgFile := fmt.Sprintf("../ref/nfa_%s.svg", vv.Test) fp, _ := com.Fopen(newFile, "w") Pool.DumpPoolJSON(fp, vv.Re, vv.Rv) fp.Close() newData, err := ioutil.ReadFile(newFile) if err != nil { panic("unable to read file, " + cmpFile) } if com.Exists(cmpFile) { ref, err := ioutil.ReadFile(cmpFile) if err != nil { panic("unable to read file, " + cmpFile) } if string(ref) != string(newData) { c.Check(string(newData), Equals, string(ref)) fmt.Printf("%sError%s: Test case %s failed to match\n", com.Red, com.Reset, vv.Test) n_err++ } } else { n_skip++ } gv, _ := com.Fopen(gvFile, "w") Pool.GenerateGVFile(gv, vv.Re, vv.Rv) gv.Close() out, err := exec.Command("/usr/local/bin/dot", "-Tsvg", "-o"+svgFile, gvFile).Output() if err != nil { fmt.Printf("Error from dot, %s, %s\n", err, com.LF()) fmt.Printf("Output: %s\n", out) } } } if n_skip > 0 { fmt.Fprintf(os.Stderr, "%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset) com.DbPrintf("debug", "\n\n%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset) } if n_err > 0 { fmt.Fprintf(os.Stderr, "%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) com.DbPrintf("debug", "\n\n%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) } else { fmt.Fprintf(os.Stderr, "%sPASS%s\n", com.Green, com.Reset) com.DbPrintf("debug", "\n\n%sPASS%s\n", com.Green, com.Reset) } }
func (s *NFA_Multi_Part_TestSuite) TestLexie(c *C) { // return fmt.Fprintf(os.Stderr, "Test NFA Multi-Part RE - NFA test %s\n", com.LF()) n_err := 0 n_skip := 0 // ------------------------- ------------------------- --------------------------------------- --------------------------------------- // Test as sections // ------------------------- ------------------------- --------------------------------------- --------------------------------------- for ii, vv := range NFATest_02Data { fmt.Printf("\n\n--- %2d Test: %4s -------------------------------------------------------------------------------\n", ii, vv.Test) Pool := NewNFA_Pool() Cur := Pool.GetNFA() Pool.InitState = Cur for jj, ww := range vv.Data { fmt.Printf("\n\n--- %2d Test: %4s Part %2d ----------------------------------------------------------------------\n\n", ii, vv.Test, jj) // Add in components Pool.AddReInfo(ww.Re, "", 1, ww.Rv, InfoType{}) // Dum out parsed REs // Dum out parsed NFAs along the way } Pool.FinializeNFA() // Fnialize Pool.DumpPool(false) // Dump out NFA - check it. // ------------------------- --------------------------------------- --------------------------------------- // Test these also // func (nn *NFA_PoolType) DeleteRe(oldRe string) { // func (nn *NFA_PoolType) ChangeRe(oldRe string, newRe string) { // ------------------------- --------------------------------------- --------------------------------------- newFile := fmt.Sprintf("../ref/n2_%s.tst", vv.Test) cmpFile := fmt.Sprintf("../ref/n2_%s.ref", vv.Test) gvFile := fmt.Sprintf("../ref/n2_%s.gv", vv.Test) svgFile := fmt.Sprintf("../ref/n2_%s.svg", vv.Test) fp, _ := com.Fopen(newFile, "w") Pool.DumpPoolJSON(fp, vv.Test, 0) fp.Close() newData, err := ioutil.ReadFile(newFile) if err != nil { panic("unable to read file, " + cmpFile) } if com.Exists(cmpFile) { ref, err := ioutil.ReadFile(cmpFile) if err != nil { panic("unable to read file, " + cmpFile) } if string(ref) != string(newData) { c.Check(string(newData), Equals, string(ref)) fmt.Printf("%sError%s: Test case %s failed to match\n", com.Red, com.Reset, vv.Test) n_err++ } } else { n_skip++ } gv, _ := com.Fopen(gvFile, "w") Pool.GenerateGVFile(gv, vv.Test, 0) gv.Close() _, err = exec.Command("/usr/local/bin/dot", "-Tsvg", "-o"+svgFile, gvFile).Output() if err != nil { fmt.Printf("Error from dot, %s\n", err) } } // ------------------------- ------------------------- --------------------------------------- --------------------------------------- // Test as a single machine // ------------------------- ------------------------- --------------------------------------- --------------------------------------- if n_skip > 0 { fmt.Fprintf(os.Stderr, "%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset) com.DbPrintf("debug", "\n\n%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset) } if n_err > 0 { fmt.Fprintf(os.Stderr, "%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) com.DbPrintf("debug", "\n\n%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) } else { fmt.Fprintf(os.Stderr, "%sPASS%s\n", com.Green, com.Reset) com.DbPrintf("debug", "\n\n%sPASS%s\n", com.Green, com.Reset) } _ = n_skip }
func (s *LambdaClosureTestSuite) TestLexie(c *C) { // return fmt.Fprintf(os.Stderr, "Test NFA generation from REs, %s\n", com.LF()) n_err := 0 com.DbOnFlags["db_NFA"] = true com.DbOnFlags["db_NFA_LnNo"] = true com.DbOnFlags["db_DumpPool"] = true com.DbOnFlags["parseExpression"] = true // {Test: "0011", Re: `a(bcd)*(ghi)+(jkl)*X`, Rv: 1011}, // Pool := NewNFA_Pool() Cur := Pool.GetNFA() Pool.InitState = Cur vv := Lexie01Data[11] Pool.AddReInfo(vv.Re, "", 1, vv.Rv, InfoType{}) Pool.Sigma = Pool.GenerateSigma() fmt.Printf("\n\nRe: %s\n", vv.Re) Pool.DumpPool(false) // -------------------------------------- test 1 ----------------------------------- r1 := Pool.LambdaClosure([]int{4, 1, 5}) fmt.Printf("\n\nr1(4,1,5)=%v\n", r1) if len(com.CompareSlices([]int{4, 1, 5}, r1)) != 0 { fmt.Printf("%sError%s: Test case 1 failed to match\n", com.Red, com.Reset) n_err++ } c.Check(len(com.CompareSlices([]int{4, 1, 5}, r1)), Equals, 0) // -------------------------------------- test 2 ----------------------------------- r2 := Pool.LambdaClosure([]int{12, 9, 13}) fmt.Printf("\n\nr2(5,9,12,9,13)=%v\n", r2) if len(com.CompareSlices([]int{12, 9, 13}, r2)) != 0 { fmt.Printf("%sError%s: Test case 2 failed to match\n", com.Red, com.Reset) n_err++ } c.Check(len(com.CompareSlices([]int{12, 9, 13}, r2)), Equals, 0) // -------------------------------------- test 3 ----------------------------------- r3 := Pool.LambdaClosure([]int{12, 9, 13}) fmt.Printf("\n\nr3(12,9,13)=%v\n", r3) if len(com.CompareSlices([]int{12, 9, 13}, r3)) != 0 { fmt.Printf("%sError%s: Test case 3 failed to match\n", com.Red, com.Reset) n_err++ } c.Check(len(com.CompareSlices([]int{12, 9, 13}, r3)), Equals, 0) // ------------------------- understand test runner --------------------------------------- if false { c.Check(1, Equals, 0) // c.Assert(2, Equals, 0) // Failure of an assert ends test (exit) sss := c.GetTestLog() fp, err := com.Fopen(",g", "w") c.Check(err, Equals, nil) fmt.Fprintf(fp, "c.GetTestLog: ->%s<-\n", sss) } // ------------------------- eval results now --------------------------------------- if n_err > 0 { fmt.Fprintf(os.Stderr, "%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) com.DbPrintf("debug", "\n\n%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) } else { fmt.Fprintf(os.Stderr, "%sPASS%s\n", com.Green, com.Reset) com.DbPrintf("debug", "\n\n%sPASS%s\n", com.Green, com.Reset) } }