// n from beginning to m from end. func (mt *MtType) EvalExpr(Context *eval.ContextType, n, m int) bool { m = len(mt.SVal) - m // Convert to Pos sv := mt.SVal[n:m] // Slice of params to eval. fmt.Printf("mt.EvalExpr - TOP: n=%d m=%d, Range [n:m]\n", n, m) fmt.Printf("mt.EvalExpr - TOP: ealuate sv=%+v (Subset) ----------------------------------------------------- \n", sv) fmt.Printf("mt.EvalExpr - TOP: ealuate mt.SVal=%+v (Orig)----------------------------------------------------- \n", mt.SVal) // xyzzy -- temporary -- incomplete!!!!!!!!!!!!!!!!!!!!!! evalData := &eval.EvalType{ Pos: 0, Ctx: Context, Mm: mt.TokVal[n:m], // []tok.Token PrintErrorMsg: true, } // hot patch - xyzzy //if evalData.Mm[0].Match == "[" { // evalData.Mm[0].TokNo = 38 //} // hot patch - xyzzy evalData.InitFunctions() fmt.Printf("INPUT m=%d n=%d, %s ----------------------------------------------------- \n", m, n, com.SVarI(evalData)) tr := evalData.PresTop() fmt.Printf("BOTTOM: %s ----------------------------------------------------- \n", com.SVarI(tr)) //s := sv[0] //v, t, _ := Context.GetFromContext(s) //fmt.Printf("At: %s - in EvalExpr, v=%v t=%v for >%s<-\n", com.LF(), v, t, s) mt.DataType = tr.DataType mt.XValue = tr.CurValue fmt.Printf("At bottom of EvalExpr - Type = %d == %T, value = %v\n", tr.DataType, tr.CurValue, tr.CurValue) return true }
// n from beginning to m from end. func (mt *MtType) EvalExpr(Context *eval.ContextType, n, m int) bool { m = len(mt.SVal) - m // Convert to Pos sv := mt.SVal[n:m] // Slice of params to eval. fmt.Printf("mt.EvalExpr - TOP: ealuate sv=%+v ----------------------------------------------------- \n", sv) fmt.Printf("mt.EvalExpr - TOP: ealuate mt.SVal=%+v ----------------------------------------------------- \n", mt.SVal) // xyzzy -- temporary -- incomplete!!!!!!!!!!!!!!!!!!!!!! evalData := &eval.EvalType{ Pos: 0, Ctx: Context, Mm: mt.TokVal[n:m], // []tok.Token } fmt.Printf("INPUT m=%d n=%d, %s ----------------------------------------------------- \n", m, n, com.SVarI(evalData)) tr := evalData.Pres2() fmt.Printf("BOTTOM: %s ----------------------------------------------------- \n", com.SVarI(tr)) s := sv[0] v, t, _ := Context.GetFromContext(s) fmt.Printf("At: %s - in EvalExpr, v=%v t=%v for >%s<-\n", com.LF(), v, t, s) // xyzzy nil, 9 -- 9 is error, not found if t == eval.CtxType_Bool { fmt.Printf("Setting bool to true\n") mt.DataType = t mt.XValue = v } return true }
func (st *SymbolTable) DumpSymbolTable(fo io.Writer) { for ii, vv := range st.Symbols { fmt.Fprintf(fo, "\t[%s] Body=%s SymType=%d FxId=%d\n", ii, vv.Body, vv.SymType, vv.FxId) if vv.SymType == gen.Tok_Template { fmt.Fprintf(fo, "\t\tTemplate\n") mtv := vv.AnyData.(*mt.MtType) fmt.Fprintf(fo, "AnyData = %s\n", com.SVarI(mtv)) } } }
func ProcessFileList(pt *test01.Parse2Type, inList []string, outFn string) (err error) { var fp *os.File if outFn != "" { fp, err = com.Fopen(outFn, "w") if err != nil { fmt.Fprintf(os.Stderr, "Fatal: Unable to create output file\n") err = fmt.Errorf("Fatal: Unable to create output file") return } defer fp.Close() } else { fp = os.Stdout } for _, fn := range inList { if !com.Exists(fn) { fmt.Fprintf(os.Stderr, "Fatal: Missing input file %s\n", fn) err = fmt.Errorf("Fatal: Missing input file %s", fn) return } } go func() { r := pbread.NewPbRead() for _, fn := range inList { r.OpenFile(fn) } pt.Lex.MatcherLexieTable(r, "S_Init") }() // ------------------------------------------------------ process tokens -------------------------------------------------------------- // Generate a parse tree and print out. xpt := pt.GenParseTree(0) pt.TheTree = xpt xpt.DumpMtType(test01.Dbf, 0, 0) pt.ExecuteFunctions(0) if false { fmt.Fprintf(test01.Dbf, "----------------------------------- debug output ----------------------------------------------------\n") fmt.Fprintf(test01.Dbf, "%s\n", com.SVarI(xpt)) } fmt.Fprintf(test01.Dbf, "----------------------------------- errors ----------------------------------------------------\n") pp := pt.CollectErrorNodes(0) for ii, vv := range pp { fmt.Fprintf(test01.Dbf, "Error [%3d]: msg=%s\n", ii, vv.ErrorMsg) } fmt.Fprintf(test01.Dbf, "----------------------------------- final template results ----------------------------------------------------\n") pt.OutputTree(test01.Dbf, 0) pt.OutputTree(fp, 0) return }
func ParsePlist(pl string) (aa []string) { t1 := pl_re.FindAllStringSubmatch(pl, -1) if t1 != nil { com.DbPrintf("in", "t1=%s\n", com.SVarI(t1)) for _, vv := range t1 { if len(vv) > 3 && vv[2] != "" { aa = append(aa, vv[2]) } } } return }
func (lr *LexReType) DumpParseNodes() { com.DbPrintf("DumpParseNodes", "\nDumpParseNodes: At %s\n", com.LF()) for ii, vv := range lr.Tree.Children { com.DbPrintf("DumpParseNodes", "at %s [step %d] ", com.LF(), ii) com.DbPrintf("DumpParseNodes", "Item: [%s] %d=%s, N-Children=%d\n", vv.Item, vv.LR_Tok, NameOfLR_TokType(vv.LR_Tok), len(vv.Children)) if len(vv.Children) > 0 { lr.DumpParseNodesChild(vv.Children, 1) } } com.DbPrintf("DumpParseNodes", "DumpParseNodes: Done %s\n\n", com.LF()) com.DbPrintf("DumpParseNodesX", "DumpParseNodes: %s\n\n", com.SVarI(lr.Tree)) }
// Rv(Name) Ignore(Xxx) func ParseActionItem(act string) (aa string, pp string) { aa, pp = "", "" t1 := fx_re.FindAllStringSubmatch(act, -1) if t1 != nil { com.DbPrintf("in", "t1=%s\n", com.SVarI(t1)) aa = t1[0][1] if len(t1[0]) > 1 { pp = t1[0][2] } } else { aa = act } return }
// Tok_Name=1 Tok_Name "T O K" func ParseNameValue(nv string) (name string, value string) { name, value = "", "" t1 := pnv_re.FindAllStringSubmatch(nv, -1) com.DbPrintf("in", "t1=%s\n", com.SVarI(t1)) if t1 != nil && len(t1[0]) > 0 { name = t1[0][1] if len(t1[0]) > 3 { value = t1[0][3] } } else { name = nv } return }
// test Replace sub-tree ------------------------------------------------------------------------------------------------------------------------------- // func ReplaceBlocksWithNew(search_in_tree, new_block *MtType) { func Test_Mt02(t *testing.T) { var bob *MtType bob = NewMtType(1, "bob") bob.List = append(bob.List, NewMtType(1, "bob.1")) bob.List = append(bob.List, NewMtType(1, "bob.2")) bob.List = append(bob.List, NewMtType(1, "bob.3")) bob.List[2].List = append(bob.List[2].List, NewMtType(gen.Fx_block, "bob.3.1")) bob.List[2].List[0].SVal = make([]string, 1, 1) bob.List[2].List[0].FxId = gen.Fx_block bob.List[2].List[0].SVal[0] = "mike" bob.List[2].List[0].List = append(bob.List[2].List[0].List, NewMtType(1, "bob.3.1.1")) bob.List[2].List[0].List[0].HTML_Output = "Original Chunk 1" bob.List[2].List[0].List = append(bob.List[2].List[0].List, NewMtType(1, "bob.3.1.2")) bob.List[2].List[0].List[1].HTML_Output = "Original Chunk 2" bob.List[2].List[0].List = append(bob.List[2].List[0].List, NewMtType(1, "bob.3.1.3")) bob.List[2].List = append(bob.List[2].List, NewMtType(1, "bob.3.2")) bob.List[2].List = append(bob.List[2].List, NewMtType(1, "bob.3.3")) bob.List[2].List = append(bob.List[2].List, NewMtType(1, "bob.3.4")) bob.List[2].List = append(bob.List[2].List, NewMtType(1, "bob.3.5")) bob.List = append(bob.List, NewMtType(1, "bob.4")) bob.List = append(bob.List, NewMtType(1, "bob.5")) fmt.Printf("bob before change =%s\n\n", com.SVarI(bob)) var repl *MtType repl = NewMtType(gen.Fx_block, "bob") repl.SVal = make([]string, 1, 1) repl.SVal[0] = "mike" repl.List = append(repl.List, NewMtType(1, "repl.1")) repl.List = append(repl.List, NewMtType(1, "repl.1.1")) repl.List[0].HTML_Output = "Replacement Text 1.1" repl.List = append(repl.List, NewMtType(1, "repl.1.2")) fmt.Printf("repl before change =%s\n\n", com.SVarI(repl)) ReplaceBlocksWithNew(&bob, repl) fmt.Printf("bob after change =%s\n\n", com.SVarI(bob)) }
// --------------------------------------------------------------------------------------------------------------------------------------- func (pt *Parse2Type) ReadFileAndRun(fn, fn_o string) { go func() { r := pbread.NewPbRead() r.OpenFile(fn) pt.Lex.MatcherLexieTable(r, "S_Init") }() xpt := pt.GenParseTree(0) pt.TheTree = xpt pt.ExecuteFunctions(0) fmt.Printf("Tree Dump = %s\n", com.SVarI(xpt)) fp_o, err := com.Fopen(fn_o, "w") if err != nil { fmt.Fprintf(os.Stderr, "Error: %s\n", err) } else { pt.OutputTree(fp_o, 0) } return }
func (s *Reader_TestSuite) TestLexie(c *C) { fmt.Fprintf(os.Stderr, "Test Matcher test from ../in/django3.lex file, %s\n", com.LF()) com.DbOnFlags["db_DumpDFAPool"] = true com.DbOnFlags["db_DumpPool"] = true com.DbOnFlags["db_Matcher_02"] = true // com.DbOnFlags["db_NFA_LnNo"] = true com.DbOnFlags["match"] = true // com.DbOnFlags["nfa3"] = true com.DbOnFlags["output-machine"] = true com.DbOnFlags["match"] = true com.DbOnFlags["match_x"] = true // com.DbOnFlags["nfa3"] = true // com.DbOnFlags["nfa4"] = true // com.DbOnFlags["db_DFAGen"] = true // com.DbOnFlags["pbbuf02"] = true // com.DbOnFlags["DumpParseNodes2"] = true com.DbOnFlags["db_FlushTokenBeforeBefore"] = true com.DbOnFlags["db_FlushTokenBeforeAfter"] = true com.DbOnFlags["db_tok01"] = true com.DbOnFlags["in-echo-machine"] = true // Output machine lex := NewLexie() lex.NewReadFile("../in/django3.lex") for ii, vv := range Lexie02Data { if !vv.SkipTest { fmt.Printf("\n\nTest:%s ------------------------- Start --------------------------, %d, Input: -->>%s<<--\n", vv.Test, ii, vv.Inp) // r := strings.NewReader(vv.Inp) r := pbread.NewPbRead() r.PbString(vv.Inp) r.SetPos(1, 1, fmt.Sprintf("sf-%d.txt", ii)) // simulate file = sf- fmt.Printf("At: %s\n", com.LF()) lex.MatcherLexieTable(r, "S_Init") fmt.Printf("At: %s\n", com.LF()) if len(vv.Result) > 0 { fmt.Printf("At: %s\n", com.LF()) if len(lex.TokList.TokenData) != len(vv.Result) { fmt.Printf("Lengths did not match, %s", com.SVarI(lex.TokList.TokenData)) c.Check(len(lex.TokList.TokenData), Equals, len(vv.Result)) } else { for i := 0; i < len(vv.Result); i++ { if vv.Result[i].StrTokNo != "" { c.Check(vv.Result[i].StrTokNo, Equals, in.Lookup_Tok_Name(int(lex.TokList.TokenData[i].TokNo))) } else { c.Check(vv.Result[i].TokNo, Equals, int(lex.TokList.TokenData[i].TokNo)) } c.Check(vv.Result[i].Match, Equals, lex.TokList.TokenData[i].Match) if vv.Result[i].LineNo > 0 { c.Check(vv.Result[i].LineNo, Equals, lex.TokList.TokenData[i].LineNo) } if vv.Result[i].ColNo > 0 { c.Check(vv.Result[i].ColNo, Equals, lex.TokList.TokenData[i].ColNo) } if vv.Result[i].FileName != "" { c.Check(vv.Result[i].FileName, Equals, lex.TokList.TokenData[i].FileName) } } } } fmt.Printf("At: %s\n", com.LF()) fmt.Printf("Test:%s ------------------------- End --------------------------\n\n", vv.Test) } } }
func (lr *LexReType) parseExpression(depth int, d_depth int, xTree *ReTreeNodeType) []ReTreeNodeType { //var first *ReTreeNodeType //var last *ReTreeNodeType pre := strings.Repeat(" ", depth) if depth == 0 { xTree = lr.Tree com.DbPrintf("parseExpression", "%sat %s !!!top!!!, depth=%d \n", pre, com.LF(), depth) } isFirst := true inOr := false com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) c, w := lr.Next() for w != LR_EOF { com.DbPrintf("parseExpression", "%sat %s !!!top!!!, depth=%d c=->%s<- w=%d %s -- Loop Top -- xTree=%s\n\n", pre, com.LF(), depth, c, w, NameOfLR_TokType(w), com.SVarI(xTree)) switch w { case LR_CL_BR: // } fallthrough case LR_COMMA: // , fallthrough case LR_E_CCL: fallthrough case LR_MINUS: // - -- Text if not in CCL and not 1st char in CCL fallthrough case LR_Text: // -- Add a node to list, move right //if true { xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text}) //} else { // // Bad Idea - mucks up '*' and other processing - To Simplify Tree needs to be done post-generation with Simp-Rules // ll := len(lr.Tree.Children) - 1 // if ll >= 0 && lr.Tree.Children[ll].LR_Tok == LR_Text { // lr.Tree.Children[ll].Item += c // } else { // xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text}) // } //} case LR_CARROT: // ^ -- BOL -- If at beginning, or after ( or | then BOL - else just text?? fallthrough case LR_DOLLAR: // $ -- BOL -- If at end, or just before ) or | the EOL - else just text?? fallthrough case LR_DOT: // . -- Match any char xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: w}) case LR_OP_BR: // { if isFirst { lr.Warn(fmt.Sprintf("Invalid '%s' at beginning of R.E. assumed to be a text character missing esacape.", c)) xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text}) } else { ll := len(xTree.Children) - 1 tmp := xTree.Children[ll] newTree := lr.parseIterator(depth + 1) if newTree.Mm == 0 && newTree.Nn == InfiniteIteration { ll := len(xTree.Children) - 1 tmp := xTree.Children[ll] com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, ll=%d, xTree=%s tmp=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), ll, com.SVarI(xTree), com.SVarI(tmp)) xTree.Children[ll] = ReTreeNodeType{Item: "*", LR_Tok: LR_STAR, Children: []ReTreeNodeType{tmp}} } else { if newTree.Mm > newTree.Nn { lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Invalid Range, Start is bigger than end, {%d,%d}, %s", newTree.Mm, newTree.Nn, com.LF()))) } com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, ll=%d, xTree=%s tmp=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), ll, com.SVarI(xTree), com.SVarI(tmp)) // xTree.Children[ll] = ReTreeNodeType{Item: c, LR_Tok: LR_OP_BR, Children: []ReTreeNodeType{tmp}, Mm: newTree.Mm, Nn: newTree.Nn} newTree.Children = []ReTreeNodeType{tmp} xTree.Children[ll] = newTree // CCL: xTree.Children = append(xTree.Children, lr.parseCCL(depth+1, w)) // xyzzy needs work --------------------------------------------------- } com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) } case LR_STAR: // * -- Error if 1st char, else take prev item from list, star and replace it. fallthrough case LR_PLUS: // + -- Error if 1st char fallthrough case LR_QUEST: // ? -- Error if 1st char if isFirst { lr.Warn(fmt.Sprintf("Invalid '%s' at beginning of R.E. assumed to be a text character missing esacape.", c)) xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text}) } else { ll := len(xTree.Children) - 1 tmp := xTree.Children[ll] com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, ll=%d, xTree=%s tmp=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), ll, com.SVarI(xTree), com.SVarI(tmp)) xTree.Children[ll] = ReTreeNodeType{Item: c, LR_Tok: w, Children: []ReTreeNodeType{tmp}} com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) } case LR_OR: // | n-ary or operator com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) inOr = true // Left Machine is collected to be sub-machine == Beginnig-to-current // left := xTree.Children // change to be left section back to but not including "|" node - or all if no | node. kk := -1 for jj := len(xTree.Children) - 1; jj >= 0; jj-- { if xTree.Children[jj].LR_Tok == LR_OR { kk = jj break } } if kk == -1 { // No OR tok found left := xTree.Children // change to be left section back to but not including "|" node - or all if no | node. ll := len(left) leftNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, ll, ll)} for jj := range left { leftNode.Children[jj] = left[jj] } newTop := ReTreeNodeType{Item: "|", LR_Tok: LR_OR, Children: make([]ReTreeNodeType, 0, 10)} newTop.Children = append(newTop.Children, leftNode) // only if no "or" node, else ref to "or" node xTree.Children = xTree.Children[:0] xTree.Children = append(xTree.Children, newTop) com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, left=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), com.SVarI(left)) } else { if kk >= 0 { if kk < len(xTree.Children) { tmp := xTree.Children[kk+1:] xTree.Children = xTree.Children[0 : kk+1] newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, len(tmp), len(tmp))} for i := 0; i < len(tmp); i++ { newNode.Children[i] = tmp[i] } xTree.Children[kk].Children = append(xTree.Children[kk].Children, newNode) } } } // Or node is created like (LR_STAR) // Recursive call to parse rest of items at this level //newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, 1, 10)} // No recursive call //lr.parseExpression(depth+1, depth, &newNode.Children[0]) //newTop.Children = append(newTop.Children, newNode.Children[0]) // Take results of recursion and put in as RIGHT machine under LR_OR (optimize for N-Tree OR at this point) //xTree.Children = append(xTree.Children, newTop) //if depth > d_depth { //com.DbPrintf("parseExpression", "%sat %s, depth=%d d_detph=%d\n", pre, com.LF(), depth, d_depth) // return xTree.Children //} case LR_OP_PAR: // ( -- Start of Sub_Re com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) newNode := ReTreeNodeType{Item: c, LR_Tok: LR_OP_PAR, Children: make([]ReTreeNodeType, 1, 10)} lr.parseExpression(depth+1, depth+1, &newNode.Children[0]) newNode.Children[0].Item = c newNode.Children[0].LR_Tok = LR_OP_PAR xTree.Children = append(xTree.Children, newNode.Children[0]) com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) case LR_CL_PAR: // ) // If in "or" node set - then collect last section to "or" ------------------------ <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) if depth == 0 { com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) lr.Warn(fmt.Sprintf("Invalid '%s' at not properly nested. Assuming that this was to match a character.", c)) xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text}) com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) } else { com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) if inOr { com.DbPrintf("parseExpression", "%sAT Top of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree)) kk := -1 for jj := len(xTree.Children) - 1; jj >= 0; jj-- { if xTree.Children[jj].LR_Tok == LR_OR { kk = jj break } } if kk >= 0 { if kk < len(xTree.Children) { tmp := xTree.Children[kk+1:] xTree.Children = xTree.Children[0 : kk+1] newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, len(tmp), len(tmp))} for i := 0; i < len(tmp); i++ { newNode.Children[i] = tmp[i] } xTree.Children[kk].Children = append(xTree.Children[kk].Children, newNode) } } com.DbPrintf("parseExpression", "%sAT Bo5 of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree)) } return xTree.Children } com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF()) inOr = false case LR_CCL: // [...] -- CCL Node (Above) fallthrough case LR_N_CCL: // [^...] -- N_CCL Node xTree.Children = append(xTree.Children, lr.parseCCL(depth+1, w)) // xyzzy needs work --------------------------------------------------- default: lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Invalid LR Token Type, '%d', '%s', %s", w, NameOfLR_TokType(w), com.LF()))) return xTree.Children } isFirst = false com.DbPrintf("parseExpression", "%sAT %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree)) c, w = lr.Next() } // If in "or" node set - then collect last section to "or" ------------------------ <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< if inOr { com.DbPrintf("parseExpression", "%sAT Top of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree)) kk := -1 for jj := len(xTree.Children) - 1; jj >= 0; jj-- { if xTree.Children[jj].LR_Tok == LR_OR { kk = jj break } } if kk >= 0 { if kk < len(xTree.Children) { tmp := xTree.Children[kk+1:] xTree.Children = xTree.Children[0 : kk+1] newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, len(tmp), len(tmp))} for i := 0; i < len(tmp); i++ { newNode.Children[i] = tmp[i] } xTree.Children[kk].Children = append(xTree.Children[kk].Children, newNode) } } com.DbPrintf("parseExpression", "%sAT Bo5 of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree)) } return xTree.Children }
func FxFor(callNo int, pt *Parse2Type, Context *eval.ContextType, curTree *MtType) (err error) { fmt.Printf("Fx_For Called, %d\n", callNo) fmt.Printf("---------------------------------------------------------------------------- for tree -------------------------------------------------------------------------\n") if false { fmt.Printf("%s\n\n", com.SVarI(curTree)) } else { curTree.DumpMtType(os.Stdout, 0, 0) } tmpMt := func(ss []*MtType) (rv *MtType) { rv = &MtType{ NodeType: gen.Tok_Tree_List, List: make([]*MtType, 0, len(ss)), LineNo: ss[0].LineNo, ColNo: ss[0].ColNo, FileName: ss[0].FileName, } for _, vv := range ss { rv.List = append(rv.List, vv) } return } var walkTreeEmptyOutput func(mt *MtType, pos, depth int) walkTreeEmptyOutput = func(mt *MtType, pos, depth int) { mt.HTML_Output = "" for ii, vv := range mt.List { walkTreeEmptyOutput(vv, ii, depth+1) } } if callNo == 11 { if !curTree.MoreThan(1) { } else { ifp := FindTags(curTree.List[0], gen.Tok_Tree_Empty, gen.Tok_Tree_EndFor) // find parts of for loop fmt.Printf("ifp=%+v\n", ifp) if curTree.EvalExpr(Context, 0, 0) { x := tmpMt(curTree.List[0].List[0:ifp[0]]) curTree.HTML_Output = "" // xyzzy - check type for ii, vv := range curTree.XValue.([]interface{}) { Context.SetInContext("$index", eval.CtxType_Int, ii) // xyzzy - conversion to string not correct -- needs to push $index - on endfor pop Context.SetInContext("$value", eval.CtxType_Str, vv) // xyzzy - conversion to string not correct //Context.SetInContext("key", fmt.Sprintf("%d", ii)) // xyzzy - conversion to string not correct -- key should be ID, Value too. //Context.SetInContext("value", fmt.Sprintf("%v", vv)) // xyzzy - conversion to string not correct pt.x_walk(x, pt.pos, pt.depth) curTree.HTML_Output += pt.CollectTree(x, 0) // Need to collect HTML_Output and append it to curTree.HTML_Output } mx := len(ifp) // xyzzy - check type if len(curTree.XValue.([]interface{})) == 0 && mx > 1 && curTree.List[0].List[ifp[mx-1]].NodeType == gen.Tok_Tree_Empty { i := mx - 1 x := tmpMt(curTree.List[0].List[ifp[i]+1 : ifp[i+1]]) pt.x_walk(x, pt.pos, pt.depth) curTree.HTML_Output += pt.CollectTree(x, 0) // Need to collect HTML_Output and append it to curTree.HTML_Output } walkTreeEmptyOutput(curTree.List[0], 0, 0) // set children's HTML_Output to "" } } } return }
// func ParseAction(ln string) [][]string { func Test_ParseAction(t *testing.T) { tst1 := false com.DbOnFlags["in-echo-machine"] = true // Output machine for ii, vv := range In01Test { if !vv.SkipTest { // fmt.Printf("\nTest %s ------------------------------------------------------------------------------------ \n", vv.Test) if tst1 { x := ParseAction(vv.Inp) fmt.Printf("%3d=%s\n", ii, com.SVarI(x)) } if vv.ResultTst4 != "" { cls := ClasifyLine(vv.Inp) // fmt.Printf("Test %s cls: %s for -->>%s<<--, %s\n", vv.Test, cls, vv.Inp, com.LF()) atFront, rest := PickOffPatternAtBeginning(cls, vv.Inp) _ = rest if vv.ResultTst2 != "" { _, _, opt := ParsePattern(cls, vv.Inp) r := fmt.Sprintf("%v", opt) if r != vv.ResultTst2 { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, vv.ResultTst2, r, com.LF()) } } if atFront != vv.ResultTst4 { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, vv.ResultTst4, atFront, com.LF()) } } else if vv.ResultTst2 != "" { cls := ClasifyLine(vv.Inp) pat, flag, opt := ParsePattern(cls, vv.Inp) r := fmt.Sprintf("%v", opt) if r != vv.ResultTst2 { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, vv.ResultTst2, r, com.LF()) } if false { fmt.Printf("%3d: %v %v %v\n", ii, pat, flag, opt) } } if len(vv.ResultTst5) > 0 { name, value := ParseNameValue(vv.Inp) // fmt.Printf("name=%s value=%s\n", name, value) if name != vv.ResultTst5[0] { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, vv.ResultTst5[0], name, com.LF()) } if value != vv.ResultTst5[1] { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, vv.ResultTst5[1], value, com.LF()) } } if len(vv.ResultTst6) > 0 { name, value := ParseActionItem(vv.Inp) // fmt.Printf("name=%s value=%s\n", name, value) if name != vv.ResultTst6[0] { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, vv.ResultTst6[0], name, com.LF()) } if value != vv.ResultTst6[1] { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, vv.ResultTst6[1], value, com.LF()) } } if len(vv.ResultTst7) > 0 { pl := ParsePlist(vv.Inp) if len(pl) != len(vv.ResultTst7) { t.Errorf("Test %s Failed, Expected %d length Got %d, %s\n", vv.Test, len(vv.ResultTst7), len(pl), com.LF()) } tt := fmt.Sprintf("%s", pl) ss := fmt.Sprintf("%s", vv.ResultTst7) if ss != tt { t.Errorf("Test %s Failed, Expected ->%s<- Got ->%s<-, %s\n", vv.Test, ss, tt, com.LF()) } } } } //xp := ParsePlist("abc, def, ghi") //fmt.Printf("xp=%+v\n", xp) }
func (s *LexieTestSuite) TestLexie(c *C) { // return fmt.Fprintf(os.Stderr, "Test Parsing of REs, Test genration of NFAs %s\n", com.LF()) com.DbOnFlags["db_NFA"] = true com.DbOnFlags["db_NFA_LnNo"] = true com.DbOnFlags["db_DumpPool"] = true com.DbOnFlags["parseExpression"] = true com.DbOnFlags["CalcLength"] = true // Add a test for any issue c.Check(42, Equals, 42) // c.Assert("nope", Matches, "hel.*there") fmt.Printf("**** In Test Issues\n") //x := test7GenDFA() //c.Check(x, Equals, 0) n_err := 0 n_skip := 0 for ii, vv := range Lexie01Data { if !vv.SkipTest { fmt.Printf("\n\n--- %d Test: %s -----------------------------------------------------------------------------\n\n", ii, vv.Test) Pool := NewNFA_Pool() Cur := Pool.GetNFA() Pool.InitState = Cur Pool.AddReInfo(vv.Re, "", 1, vv.Rv, InfoType{}) Pool.Sigma = Pool.GenerateSigma() if false { com.DbPrintf("test7", "Pool=%s\n", com.SVarI(Pool)) } Pool.DumpPool(false) Pool.DumpPoolJSON(os.Stdout, vv.Re, vv.Rv) fmt.Printf("Sigma: ->%s<-\n", Pool.Sigma) newFile := fmt.Sprintf("../ref/nfa_%s.tst", vv.Test) cmpFile := fmt.Sprintf("../ref/nfa_%s.ref", vv.Test) gvFile := fmt.Sprintf("../ref/nfa_%s.gv", vv.Test) svgFile := fmt.Sprintf("../ref/nfa_%s.svg", vv.Test) fp, _ := com.Fopen(newFile, "w") Pool.DumpPoolJSON(fp, vv.Re, vv.Rv) fp.Close() newData, err := ioutil.ReadFile(newFile) if err != nil { panic("unable to read file, " + cmpFile) } if com.Exists(cmpFile) { ref, err := ioutil.ReadFile(cmpFile) if err != nil { panic("unable to read file, " + cmpFile) } if string(ref) != string(newData) { c.Check(string(newData), Equals, string(ref)) fmt.Printf("%sError%s: Test case %s failed to match\n", com.Red, com.Reset, vv.Test) n_err++ } } else { n_skip++ } gv, _ := com.Fopen(gvFile, "w") Pool.GenerateGVFile(gv, vv.Re, vv.Rv) gv.Close() out, err := exec.Command("/usr/local/bin/dot", "-Tsvg", "-o"+svgFile, gvFile).Output() if err != nil { fmt.Printf("Error from dot, %s, %s\n", err, com.LF()) fmt.Printf("Output: %s\n", out) } } } if n_skip > 0 { fmt.Fprintf(os.Stderr, "%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset) com.DbPrintf("debug", "\n\n%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset) } if n_err > 0 { fmt.Fprintf(os.Stderr, "%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) com.DbPrintf("debug", "\n\n%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset) } else { fmt.Fprintf(os.Stderr, "%sPASS%s\n", com.Green, com.Reset) com.DbPrintf("debug", "\n\n%sPASS%s\n", com.Green, com.Reset) } }
func main() { var fp *os.File // ------------------------------------------------------ cli processing -------------------------------------------------------------- ifnList, err := flags.ParseArgs(&opts, os.Args) if err != nil { fmt.Printf("Invalid Command Line: %s\n", err) os.Exit(1) } if opts.Debug != "" { s := strings.Split(opts.Debug, ",") com.DbOnFlags[opts.Debug] = true for _, v := range s { com.DbOnFlags[v] = true } } if opts.Echo != "" { com.DbOnFlags["in-echo-machine"] = true // Output machine } fmt.Fprintf(os.Stderr, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF()) // ------------------------------------------------------ setup Lexie -------------------------------------------------------------- pt := NewParse2Type() pt.Lex = dfa.NewLexie() pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel // ------------------------------------------------------ input machine -------------------------------------------------------------- if opts.LexPat != "" { pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex") } else if opts.ReadMachine != "" { fmt.Printf("Should input machine at this point\n") // xyzzy } else { fmt.Printf("Fatal: Must have -l <fn> or -r <fn>, neither supplied.\n") os.Exit(1) } if opts.Machine != "" { fmt.Printf("Should output machine at this point\n") // xyzzy } // -------------------------------------------------- start scanning process ---------------------------------------------------------- if opts.Tokens != "" { fp, _ = com.Fopen(opts.Tokens, "w") } else { fp = os.Stdout } if opts.Input != "" { go func() { r := pbread.NewPbRead() r.OpenFile(opts.Input) pt.Lex.MatcherLexieTable(r, "S_Init") }() } else { go func() { r := pbread.NewPbRead() for _, fn := range ifnList[1:] { r.OpenFile(fn) } pt.Lex.MatcherLexieTable(r, "S_Init") }() } // ------------------------------------------------------ process tokens -------------------------------------------------------------- if false { // just print tokens out to check the scanning prcess and CLI options for msg := range pt.Lex.Message { fmt.Fprintf(fp, "%+v\n", msg) } } else { // Generate a parse tree and print out. xpt := pt.GenParseTree(0) pt.TheTree = xpt xpt.DumpMtType(fp, 0, 0) fmt.Printf("----------------------------------- start execute ----------------------------------------------------\n") pt.ExecuteFunctions(0) fmt.Printf("----------------------------------- debug output ----------------------------------------------------\n") if true { fmt.Printf("%s\n", com.SVarI(xpt)) } fmt.Printf("----------------------------------- output ----------------------------------------------------\n") for i := 0; i < 1000000; i++ { pt.OutputTree0(fp, 0) } fmt.Printf("----------------------------------- errors ----------------------------------------------------\n") pp := pt.CollectErrorNodes(0) for ii, vv := range pp { fmt.Printf("Error [%3d]: msg=%s\n", ii, vv.ErrorMsg) } fmt.Printf("----------------------------------- final template results ----------------------------------------------------\n") pt.OutputTree(fp, 0) } if opts.Tokens != "" { fp.Close() } }
func (lr *LexReType) CalcLengthChild(tree *ReTreeNodeType, d int) (x int, hard bool) { t := 0 hard = false if d == 1 { com.DbPrintf("CalcLength", "CalcLengthChild at top: %s\n\n", com.SVarI(tree)) } switch tree.LR_Tok { case LR_null: // for jj := range tree.Children { t, hard = lr.CalcLengthChild(&tree.Children[jj], d+1) x += t } case LR_Text: // // com.DbPrintf("CalcLength", "Len of item(%s) = %d, %s\n", tree.Item, len(tree.Item), com.LF()) x += len(tree.Item) hard = true case LR_EOF: // hard = true case LR_DOT: // . x += 1 case LR_STAR: // * x = 0 // com.DbPrintf("CalcLength", "After * x = %d, hard=%v\n", x, hard) case LR_PLUS: // + // patch to fix the problem with [0-9]+ not working -- In reality the length is only if it is a "FIXED" length, 0 else // if len(tree.Children) > 0 { // t, hard = lr.CalcLengthChild(&tree.Children[0], d+1) // x += t // } // hard = true x = 0 case LR_QUEST: // ? x = 0 case LR_OP_BR: // { // {m,n} - need to calculate length of ( m times, length of children x = 0 case LR_OP_PAR: // ( if len(tree.Children) > 0 { t, hard = lr.CalcLengthChild(&tree.Children[0], d+1) x += t } // com.DbPrintf("CalcLength", "After ( x = %d, hard=%v\n", x, hard) case LR_CL_PAR: // ) x = 0 case LR_CCL: // [...] x += 1 hard = true case LR_N_CCL: // [^...] x += 1 case LR_E_CCL: // ] x += 1 case LR_CARROT: // ^ x += 0 hard = true case LR_MINUS: // - x += 1 hard = true case LR_DOLLAR: // $ hard = true case LR_OR: // | y := -1 z := 0 hard = false if len(tree.Children) > 0 { hard = true h := false for jj := range tree.Children { z, h = lr.CalcLengthChild(&tree.Children[jj], d+1) if y == -1 { y = z } else if y < z { y = z } if !h { hard = false } } } x += y // com.DbPrintf("CalcLength", "After | x = %d, hard = %v\n", x, hard) } return }
func init() { if false { fmt.Printf("", com.SVarI(nil), com.LF()) } }
func CopyInAssets(optsSiteName string, BaseAssets string, SiteAssets string, optsOutput string, User string, Theme string, forcedCpFlag bool) { // --------------------------------------------------------------------------------------------------------------------------------- data := make(map[string]string) data["site_name"] = optsSiteName data["user"] = User data["theme"] = Theme //if opts.Input == "" && optsSiteName != "" { // opts.Input = com.Qt("./site/%{site_name%}/", data2) //} if optsOutput == "" && optsSiteName != "" { optsOutput = com.Qt("./www/%{site_name%}/", data) } // Generate list of top directories to search top := make([]string, 0, 10) topDirs := com.Qt(SiteAssets+"/%{user%}/%{theme%}/", data) // ./site_assets/%{site_name%} -> ./site_assets/%{site_name%}/%{user%}/%{theme%}/ if com.Exists(topDirs) { top = append(top, topDirs) } data["user"] = "" // data["theme"] = "A-Theme" topDirs = com.Qt(SiteAssets+"/%{user%}/%{theme%}/", data) // ./site_assets/%{site_name%} -> ./site_assets/%{site_name%}/%{user%}/%{theme%}/ if com.Exists(topDirs) { top = append(top, topDirs) } data["user"] = User // data["user"] = "" data["theme"] = "" topDirs = com.Qt(SiteAssets+"/%{user%}/%{theme%}/", data) // ./site_assets/%{site_name%} -> ./site_assets/%{site_name%}/%{user%}/%{theme%}/ if com.Exists(topDirs) { top = append(top, topDirs) } top = append(top, BaseAssets) // top has array in order of top level dirs to copy from. var infn, outfn, dirs []string cpList2 := make(map[string]string) for ii := range top { jj := (len(top) - 1) - ii dir := top[jj] t_fns, t_dirs, err := com.GetFilenamesRecrusive(dir) if err != nil { fmt.Printf("Error: %s\n", err) } else { // if base-fiel-name is not in fns, then add it infn = append(infn, t_fns...) t_fns = com.ReplaceEach(t_fns, opts.Input, optsOutput) outfn = append(outfn, t_fns...) outfn = com.ReplaceEach(outfn, dir, optsOutput) for kk, in := range infn { // fmt.Printf("Loop %2d: in=>%s<- dir=%s\n", kk, in, dir) if com.Exists(in) { // xyzzy - compare time stamps and size if InputModified(in, outfn[kk], forcedCpFlag) || FileSizeDiffers(in, outfn[kk]) { cpList2[outfn[kk]] = in } } } // if base-dir-name is not in dirs then add base dir name t_dirs = com.ReplaceEach(t_dirs, dir, optsOutput) for _, x := range t_dirs { if !com.InArray(x, dirs) { dirs = append(dirs, x) } } if db_debug4 { fmt.Printf("\tinfn=\n") debugPrintStrinSlice(infn, "\t\t") fmt.Printf("\toutfn=\n") debugPrintStrinSlice(outfn, "\t\t") fmt.Printf("\tdirs 0=\n") debugPrintStrinSlice(dirs, "\t\t") } } } MkDirArray(dirs) if db_debug4 { fmt.Printf("cp: %s\n", com.SVarI(cpList2)) } CopyFilesInHash(cpList2) }
// ------------------------------------------------------------------------------------------------------------------------------------------------------------------------- func FxIf(callNo int, pt *Parse2Type, Context *eval.ContextType, curTree *MtType) (err error) { fmt.Printf("Fx_If Called, %d\n", callNo) fmt.Printf("---------------------------------------------------------------------------- if tree -------------------------------------------------------------------------\n") if false { fmt.Printf("%s\n\n", com.SVarI(curTree)) } else { curTree.DumpMtType(os.Stdout, 0, 0) } tmpMt := func(ss []*MtType) (rv *MtType) { rv = &MtType{ NodeType: gen.Tok_Tree_List, List: make([]*MtType, 0, len(ss)), LineNo: ss[0].LineNo, ColNo: ss[0].ColNo, FileName: ss[0].FileName, } for _, vv := range ss { rv.List = append(rv.List, vv) } return } if callNo == 11 { fmt.Printf("n options = %d, opts = %v AT: %s\n", len(curTree.SVal), curTree.SVal, com.LF()) if !curTree.MoreThan(0) { } else { ifp := FindTags(curTree.List[0], gen.Tok_Tree_ElsIf, gen.Tok_Tree_Else, gen.Tok_Tree_Endif) // find parts of if/else fmt.Printf("ifp=%+v, 1st expr = %v\n", ifp, curTree.EvalExpr(Context, 0, 0)) // xyzzy - should check order of ElsIf...Else...EndIf if curTree.EvalExpr(Context, 0, 0) { if curTree.DataType == eval.CtxType_Bool && curTree.XValue.(bool) { x := tmpMt(curTree.List[0].List[0:ifp[0]]) pt.x_walk(x, pt.pos, pt.depth) return } } fmt.Printf("At AT: %s\n", com.LF()) for i := 0; i < len(ifp)-1; i++ { ct := curTree.List[0].List[ifp[i]] fmt.Printf("At AT: %s\n", com.LF()) if ct.NodeType == gen.Tok_Tree_ElsIf { fmt.Printf("At AT, it is (((ElsIf))): %s\n", com.LF()) // if ct.EvalExpr(Context, 0, 0) { // expression is correct fmt.Printf("At AT: %s, ct=%+v\n", com.LF(), ct) // if ct.DataType == eval.CtxType_Bool && ct.XValue.(bool) { // If true value for expression x := tmpMt(curTree.List[0].List[ifp[i]+1 : ifp[i+1]]) fmt.Printf("At -- Need to collect results -- AT: %s -------- elsif sub-tree Range[%d,%d] is %s\n", com.LF(), ifp[i]+1, ifp[i+1], com.SVarI(x)) pt.x_walk(x, pt.pos, pt.depth) return } } } else if ct.NodeType == gen.Tok_Tree_Else { fmt.Printf("At AT, it is (((Else))): %s\n", com.LF()) x := tmpMt(curTree.List[0].List[ifp[i]+1 : ifp[i+1]]) pt.x_walk(x, pt.pos, pt.depth) return } } } } return }