Ejemplo n.º 1
0
Archivo: re.go Proyecto: pschlump/lexie
func (lr *LexReType) ParseRe(ss string) {
	com.DbPrintf("db2", "at %s\n", com.LF())
	lr.SetBuf(ss)
	com.DbPrintf("db2", "at %s\n", com.LF())
	lr.parseExpression(0, 0, nil)
	com.DbPrintf("db2", "at %s\n", com.LF())
}
Ejemplo n.º 2
0
Archivo: re.go Proyecto: pschlump/lexie
//
// What can I see at the top of a RE
//
//	LR_Text                     //
//	LR_EOF                      //
//	LR_DOT                      // .		-- Match any char
//	LR_STAR                     // *		-- Error if 1st char
//	LR_PLUS                     // +		-- Error if 1st char
//	LR_QUEST                    // ?		-- Error if 1st char
//	LR_B_CCL                    // [		-- Start of CCL Node
//	LR_E_CCL                    // ]
//	LR_OP_PAR                   // (		-- Start of Sub_Re
//	LR_CL_PAR                   // )
//	LR_CCL                      // [...]	-- CCL Node (Above)
//	LR_N_CCL                    // [^...]	-- N_CCL Node
//	LR_CARROT                   // ^		-- BOL
//	LR_MINUS                    // -		-- Text if not in CCL and not 1st char in CCL
//
//	Item     string
//	LR_Tok   LR_TokType
//	Children []*ReTreeNodeType
//	Next     *ReTreeNodeType
//
func (lr *LexReType) DumpParseNodesChild(ch []ReTreeNodeType, d int) {
	com.DbPrintf("DumpParseNodes", "\n%sDumpParseNodesChild: At %s\n", N4Blanks(d), com.LF())
	for ii, vv := range ch {
		com.DbPrintf("DumpParseNodes", "%sat %s [step %d] ", N4Blanks(d), com.LF(), ii)
		com.DbPrintf("DumpParseNodes", "Item: [%s] %d=%s, N-Children=%d\n", vv.Item, vv.LR_Tok, NameOfLR_TokType(vv.LR_Tok), len(vv.Children))
		if len(vv.Children) > 0 {
			lr.DumpParseNodesChild(vv.Children, d+1)
		}
	}
	com.DbPrintf("DumpParseNodes", "%sDumpParseNodesChild: Done %s\n\n", N4Blanks(d), com.LF())
}
Ejemplo n.º 3
0
// ----------------------------------------------------------------------------------------------------------------------------------------
func FxReadJson(callNo int, pt *Parse2Type, Context *eval.ContextType, curTree *MtType) (err error) {
	fmt.Printf("Fx_ReadJson Called, %d\n", callNo)
	// {% read_json ID "file_name.json" %} (config allows url:// not just file"

	if callNo == 0 {

		if !curTree.NOptions(2) {
			// xyzzy } else if !curTree.IsId(0) {		// -- implement to check that [0] is an ID
		} else {
			id := curTree.SVal[0]
			path := curTree.SVal[1]
			// path = path[0 : len(path)-1]
			err = nil
			// var jsonData map[string]SQLOne
			var file []byte
			file, err = ioutil.ReadFile(path)
			if err != nil {
				fmt.Printf("Error(10014): %v, %s, Config File:%s\n", err, com.LF(), path)
				return
			}
			file = []byte(strings.Replace(string(file), "\t", " ", -1)) // file = []byte(ReplaceString(string(file), "^[ \t][ \t]*//.*$", ""))

			// Check beginning of file if "{" then MapOf, if "[" Array, else look at single value
			if strings.HasPrefix(string(file), "{") {

				jsonData := make(map[string]interface{})

				err = json.Unmarshal(file, &jsonData)
				if err != nil {
					fmt.Printf("Error(10012): %v, %s, Config File:%s\n", err, com.LF(), path)
					return
				}

				Context.SetInContext(id, eval.CtxType_MapOf, jsonData)

			} else {

				jsonData := make([]interface{}, 0, 100)

				err = json.Unmarshal(file, &jsonData)
				if err != nil {
					fmt.Printf("Error(10012): %v, %s, Config File:%s\n", err, com.LF(), path)
					return
				}

				Context.SetInContext(id, eval.CtxType_ArrayOf, jsonData)

			}

		}
	}

	return
}
Ejemplo n.º 4
0
Archivo: re.go Proyecto: pschlump/lexie
func (lr *LexReType) DumpParseNodes() {
	com.DbPrintf("DumpParseNodes", "\nDumpParseNodes: At %s\n", com.LF())
	for ii, vv := range lr.Tree.Children {
		com.DbPrintf("DumpParseNodes", "at %s [step %d] ", com.LF(), ii)
		com.DbPrintf("DumpParseNodes", "Item: [%s] %d=%s, N-Children=%d\n", vv.Item, vv.LR_Tok, NameOfLR_TokType(vv.LR_Tok), len(vv.Children))
		if len(vv.Children) > 0 {
			lr.DumpParseNodesChild(vv.Children, 1)
		}
	}
	com.DbPrintf("DumpParseNodes", "DumpParseNodes: Done %s\n\n", com.LF())
	com.DbPrintf("DumpParseNodesX", "DumpParseNodes: %s\n\n", com.SVarI(lr.Tree))
}
Ejemplo n.º 5
0
// Get the current line/col no and file name
func (pb *PBReadType) GetPos() (LineNo int, ColNo int, FileName string) {
	com.DbPrintf("pbbuf02", "At: %s\n", com.LF())
	if len(pb.PbBuffer) > 0 {
		com.DbPrintf("pbbuf02", "From Buffer At: %s\n", com.LF())
		LineNo = pb.PbBuffer[0].LineNo
		ColNo = pb.PbBuffer[0].ColNo
		FileName = pb.PbBuffer[0].FileName
	} else {
		com.DbPrintf("pbbuf02", "Not set At: %s\n", com.LF())
		LineNo = 1
		ColNo = 1
		FileName = ""
	}
	return
}
Ejemplo n.º 6
0
// Return the next rune.  If runes have been pushed back then use those first.
func (pb *PBReadType) NextRune() (rn rune, done bool) {
	com.DbPrintf("pbbuf01", "At: %s\n", com.LF())
	done = false

	if pb.PbTop > 0 {
		pb.PbTop--
		rn = pb.PbAFew[pb.PbTop]
	} else if len(pb.PbBuffer) <= 0 {
		done = true
		// } else if len(pb.PbBuffer) == 1 && pb.PbBuffer[0].Pos >= len(pb.PbBuffer[0].Buffer) && !pb.PbBuffer[0].EofOnFile {
		// Xyzzy - read in more form file - append
		// so far case never happens because EofOnFile is constant true at init time.
	} else if len(pb.PbBuffer) == 1 && pb.PbBuffer[0].Pos >= len(pb.PbBuffer[0].Buffer) && pb.PbBuffer[0].EofOnFile {
		done = true
	} else if len(pb.PbBuffer) > 1 && pb.PbBuffer[0].Pos >= len(pb.PbBuffer[0].Buffer) && pb.PbBuffer[0].EofOnFile {
		pb.PbBuffer = pb.PbBuffer[1:]
		return pb.NextRune()
	} else {
		//fmt.Printf("Just before core, Pos=%d\n", pb.PbBuffer[0].Pos)
		//fmt.Printf("Just before core, Len 1=%d\n", len(pb.PbBuffer[0].Buffer))
		if pb.PbBuffer[0].Pos >= len(pb.PbBuffer[0].Buffer) { // xyzzy --------------------- pjs - new code - not certain if correct ---------------------------------
			done = true
		} else {
			rn = pb.PbBuffer[0].Buffer[pb.PbBuffer[0].Pos]
			pb.PbBuffer[0].Pos++
			if rn == '\n' {
				pb.PbBuffer[0].LineNo++
				pb.PbBuffer[0].ColNo = 1
			} else {
				pb.PbBuffer[0].ColNo++
			}
		}
	}
	return
}
Ejemplo n.º 7
0
Archivo: mt.go Proyecto: pschlump/lexie
// -------------------------------------------------------------------------------------------------------------------------------------------------------------------------
// Find the nodes in tree (blocks) with stated name and replace them with new
func ReplaceBlocksWithNew(search_in_tree **MtType, new_block *MtType) {

	block_name := new_block.SVal[0]

	var walkTree func(mt **MtType, pos, depth int)
	walkTree = func(mt **MtType, pos, depth int) {
		for ii := range (*mt).List {
			//if vv.FxId == gen.Fx_block && block_name == vv.SVal[0] {
			//	fmt.Printf("FxExtend Replace: [%d] found block with name >%s<-, %s\n", ii, vv.SVal[0], com.LF())
			//	*mt = new_block
			//}
			walkTree(&((*mt).List[ii]), ii, depth+1)
		}
		if len((*mt).SVal) > 0 {
			fmt.Printf("FxExtend Before FxId = %d, looking for %d (*mt).SVal[0] = >%s< looking for %s, %s\n", (*mt).FxId, gen.Fx_block, (*mt).SVal[0], block_name, com.LF())
			if (*mt).FxId == gen.Fx_block && block_name == (*mt).SVal[0] {
				fmt.Printf("FxExtend Replace: found block with name >%s<-, %s\n", (*mt).SVal[0], com.LF())
				*mt = new_block
			}
		}
	}

	walkTree(search_in_tree, 0, 0)

	return
}
Ejemplo n.º 8
0
// Call a function that has been placed in the table
func (ctx *ContextType) Call(name string, params ...interface{}) (result []reflect.Value, err error) {
	ctx.mutex.RLock()
	fx, ok := ctx.Store[name]
	ctx.mutex.RUnlock()
	if !ok { // 								Need to have a mutext - lock
		err = errors.New(name + " function does not exist.")
		return
	}
	np := len(params)
	fmt.Printf("np=%d, name=%s\n", np, name)
	if np != fx.Func.Type().NumIn() { // 		Posssibility of default params? // Should save # of params from SetInContext call?
		err = ErrParamsNotAdapted
		return
	}
	in := make([]reflect.Value, np) // 			Type check params for correctness?
	for k, param := range params {
		if params[k] == nil {
			in[k] = reflect.ValueOf((*string)(nil))
		} else {
			in[k] = reflect.ValueOf(param)
		}
	}
	fmt.Printf("Just Before %s %d %+v, %s\n", name, np, params[0], com.LF())
	result = fx.Func.Call(in)
	return
}
Ejemplo n.º 9
0
// n from beginning to m from end.
func (mt *MtType) EvalExpr(Context *eval.ContextType, n, m int) bool {
	m = len(mt.SVal) - m // Convert to Pos
	sv := mt.SVal[n:m]   // Slice of params to eval.
	fmt.Printf("mt.EvalExpr - TOP: ealuate sv=%+v ----------------------------------------------------- \n", sv)
	fmt.Printf("mt.EvalExpr - TOP: ealuate mt.SVal=%+v ----------------------------------------------------- \n", mt.SVal)
	// xyzzy -- temporary -- incomplete!!!!!!!!!!!!!!!!!!!!!!
	evalData := &eval.EvalType{
		Pos: 0,
		Ctx: Context,
		Mm:  mt.TokVal[n:m], // []tok.Token
	}
	fmt.Printf("INPUT m=%d n=%d, %s ----------------------------------------------------- \n", m, n, com.SVarI(evalData))
	tr := evalData.Pres2()
	fmt.Printf("BOTTOM: %s ----------------------------------------------------- \n", com.SVarI(tr))
	s := sv[0]
	v, t, _ := Context.GetFromContext(s)
	fmt.Printf("At: %s - in EvalExpr, v=%v t=%v for >%s<-\n", com.LF(), v, t, s)
	// xyzzy nil, 9 -- 9 is error, not found
	if t == eval.CtxType_Bool {
		fmt.Printf("Setting bool to true\n")
		mt.DataType = t
		mt.XValue = v
	}
	return true
}
Ejemplo n.º 10
0
// Open a file - this puts the file at the end of the input.   This is used on the command line for a list of files
// in order.  Each opened and added to the end of the list.
func (pb *PBReadType) OpenFile(fn string) (err error) {
	com.DbPrintf("pbbuf01", "At: %s\n", com.LF())
	pb.FileName = fn
	pb.AbsFileName, _ = filepath.Abs(fn)
	pb.FilesOpened[pb.AbsFileName] = true

	// read file -> PbBuffer
	b := &ABuffer{
		FileName:    fn,
		AbsFileName: pb.AbsFileName,
		LineNo:      1,
		ColNo:       1,
	}
	pb.PbBuffer = append(pb.PbBuffer, b)

	bb, err := ioutil.ReadFile(fn)
	if err != nil {
		return
	}
	b.EofOnFile = true
	b.Pos = 0
	var rn rune
	var sz int
	b.Buffer = make([]rune, 0, len(bb))
	for ii := 0; ii < len(bb); ii += sz {
		rn, sz = utf8.DecodeRune(bb[ii:])
		b.Buffer = append(b.Buffer, rn)
	}

	return nil
}
Ejemplo n.º 11
0
// Have we already seen the specified file.  Useful for require(fn)
func (pb *PBReadType) FileSeen(fn string) bool {
	com.DbPrintf("pbbuf01", "At: %s\n", com.LF())
	a, _ := filepath.Abs(fn)
	if t, ok := pb.FilesOpened[a]; ok && t {
		return true
	}
	return false
}
Ejemplo n.º 12
0
// Push back a single rune onto input.  You can call this more than one time.
func (pb *PBReadType) PbRune(rn rune) {
	com.DbPrintf("pbbuf01", "At: %s\n", com.LF())

	if pb.PbTop >= MaxAFew { // Buffer is full
		pb.pushbackIntoBuffer()
	}

	pb.PbAFew[pb.PbTop] = rn
	pb.PbTop++
}
Ejemplo n.º 13
0
// Set the line/col/file-name for the current buffer - Useful for constructing something like C/Pre processor's #line
func (pb *PBReadType) SetPos(LineNo int, ColNo int, FileName string) {
	com.DbPrintf("pbbuf01", "At: %s\n", com.LF())
	pb.pushbackIntoBuffer()
	if len(pb.PbBuffer) > 0 {
		pb.PbBuffer[0].LineNo = LineNo
		pb.PbBuffer[0].ColNo = ColNo
		pb.PbBuffer[0].FileName = FileName
	}
	return
}
Ejemplo n.º 14
0
// Push back a string.  Will be converted from an array of byte to an array of runes.
func (pb *PBReadType) PbByteArray(s []byte) {
	com.DbPrintf("pbbuf01", "At: %s\n", com.LF())
	rns := make([]rune, 0, len(s))
	var rn rune
	var sz int
	for ii := 0; ii < len(s); ii += sz {
		rn, sz = utf8.DecodeRune(s[ii:])
		rns = append(rns, rn)
	}
	pb.PbRuneArray(rns)
}
Ejemplo n.º 15
0
// dfa.TokList.ReplaceToken ( dfa.MTab.Machine[ctx.St].Info.MatchLength, dfa.MTab.Machine[ctx.St].Info.ReplStr )
func (tl *TokenList) ReplaceToken(l int, s string) {
	ii := len(tl.TL) - 1
	lv := len(tl.TL[ii].AToken.Val)
	tl.TL[ii].AToken.IsRepl = true
	tl.TL[ii].AToken.ReplStr = s
	if lv-l >= 1 {
		tl.TL[ii].AToken.Val = tl.TL[ii].AToken.Val[0:lv-l] + s
	} else {
		com.DbPrintf("db_tok01", "Error: ReplaceToken has invalid data, %s\n", com.LF())
	}
	com.DbPrintf("db_tok01", "ReplaceToken: Match: ->%s<- Val: ->%s<-\n", tl.TL[ii].AToken.Match, tl.TL[ii].AToken.Val)
}
Ejemplo n.º 16
0
// Output debugging info
func (pb *PBReadType) Dump01(fo io.Writer) {
	fmt.Fprintf(fo, "Dump At: %s\n", com.LF())
	fmt.Fprintf(fo, "N PbBuffer=%d\n", len(pb.PbBuffer))
	for ii := 0; ii < len(pb.PbBuffer); ii++ {
		fmt.Fprintf(fo, "  Buffer [%d] Len: %d Pos: %d\n", ii, len(pb.PbBuffer[ii].Buffer), pb.PbBuffer[ii].Pos)
		fmt.Fprintf(fo, "  Contents ->")
		for jj := pb.PbBuffer[ii].Pos; jj < len(pb.PbBuffer[ii].Buffer); jj++ {
			fmt.Fprintf(fo, "%s", string(pb.PbBuffer[ii].Buffer[jj]))
		}
		fmt.Fprintf(fo, "<-\n")
	}
	if pb.PbTop > 0 {
		fmt.Fprintf(fo, "PbTop=%d\n", pb.PbTop)
		fmt.Fprintf(fo, "  PbAFew ->")
		for jj := pb.PbTop - 1; jj >= 0; jj-- {
			fmt.Fprintf(fo, "%s", string(pb.PbAFew[jj]))
		}
		fmt.Fprintf(fo, "<-\n")
	}
}
Ejemplo n.º 17
0
// Place the contents of a file in buffers at the head so NextRune will pull from this next.
func (pb *PBReadType) PbFile(fn string) (err error) {
	com.DbPrintf("pbbuf01", "At: %s\n", com.LF())
	err = nil

	pb.pushbackIntoBuffer()

	pb.FileName = fn
	pb.AbsFileName, _ = filepath.Abs(fn)
	pb.FilesOpened[pb.AbsFileName] = true

	// read file -> PbBuffer
	b := &ABuffer{
		FileName:    fn,
		AbsFileName: pb.AbsFileName,
		LineNo:      1,
		ColNo:       1,
	}
	// pb.PbBuffer = append(pb.PbBuffer, b)
	// data = append([]string{"Prepend Item"}, data...)
	pb.PbBuffer = append([]*ABuffer{b}, pb.PbBuffer...) // prepend

	bb, err := ioutil.ReadFile(fn)
	if err != nil {
		return
	}
	b.EofOnFile = true
	b.Pos = 0
	var rn rune
	var sz int
	b.Buffer = make([]rune, 0, len(bb))
	for ii := 0; ii < len(bb); ii += sz {
		rn, sz = utf8.DecodeRune(bb[ii:])
		b.Buffer = append(b.Buffer, rn)
	}

	return
}
Ejemplo n.º 18
0
func (s *ReTesteSuite) TestLexie(c *C) {

	// Test of Parseing REs into RE-TParseTrees
	// return
	fmt.Fprintf(os.Stderr, "Test Parsing of REs, %s\n", com.LF())

	com.DbOnFlags["db_DumpPool"] = true
	com.DbOnFlags["parseExpression"] = true
	com.DbOnFlags["CalcLength"] = true
	com.DbOnFlags["DumpParseNodes"] = true
	com.DbOnFlags["DumpParseNodesX"] = true

	fmt.Printf("**** In Test RE\n")

	n_err := 0
	n_skip := 0

	for ii, vv := range Lexie00Data {
		if !vv.SkipTest {
			fmt.Printf("\n\n--- %d Test: %s -----------------------------------------------------------------------------\n\n", ii, vv.Test)
			lr := NewLexReType()
			lr.SetBuf(vv.Re)
			tn := 0
			cc, ww := lr.Next()
			for ww != LR_EOF {
				fmt.Printf("    %s % x = %d %s\n", string(cc), string(cc), ww, LR_TokTypeLookup[ww])

				if len(vv.SM) > 0 {
					// Check correct token
					if vv.SM[tn].Tok != ww {
						fmt.Printf("     Failed to return the correct token, expecting %d/%s, got %d/%s\n", vv.SM[tn].Tok, LR_TokTypeLookup[vv.SM[tn].Tok], ww,
							LR_TokTypeLookup[ww])
						c.Check(int(vv.SM[tn].Tok), Equals, int(ww))
						n_err++
					}
					// Check correct string/rune returned
					if !CmpByteArr(vv.SM[tn].Dat, []byte(cc)) {
						fmt.Printf("     The returned runes did not match\n")
						c.Check(string(vv.SM[tn].Dat), Equals, cc)
						n_err++
					}

				}

				tn++
				cc, ww = lr.Next()
			}
			fmt.Printf("\n--- %d End : %s -----------------------------------------------------------------------------\n\n", ii, vv.Test)
		}
	}

	for ii, vv := range Lexie01Data {
		if !vv.SkipTest {
			fmt.Printf("\n\n--- %d Test: %s -----------------------------------------------------------------------------\n\n", ii, vv.Test)

			lr := NewLexReType()
			lr.ParseRe(vv.Re)

			lr.Sigma = lr.GenerateSigma()
			fmt.Printf("Sigma: ->%s<-\n", lr.Sigma)

			if vv.Sigma != "" {
				if vv.Sigma != lr.Sigma {
					fmt.Printf("     The calculated and reference Sigma did not match\n")
					c.Check(vv.Sigma, Equals, lr.Sigma)
					n_err++
				}
			}

			fmt.Printf("\n--- %d End : %s -----------------------------------------------------------------------------\n\n", ii, vv.Test)
		}
	}

	// -------------------------------------------------------------------------------------------------------------------------------------------------
	lr := NewLexReType()
	com.DbOnFlags["DumpParseNodes"] = true
	com.DbOnFlags["DumpParseNodesX"] = true
	com.DbOnFlags["db_DumpPool"] = true
	com.DbOnFlags["parseExpression"] = true
	for i, v := range Test6Data {
		com.DbPrintf("debug", "\nTest[%03d]: `%s` %s\n\n", i, v.Re, strings.Repeat("-", 120-len(v.Re)))
		// fmt.Printf("%s *** com.Red *** %s\n", ansi.ColorCode("red"), ansi.ColorCode("reset"))
		lr.ParseRe(v.Re)
		lr.DumpParseNodes()
		if len(v.TopTok) > 0 {
			if len(v.TopTok) != len(lr.Tree.Children) {
				com.DbPrintf("debug", "%sError%s: wrong number of tokens prsed, Expected: %d Got %d\n", com.Red, com.Reset, len(v.TopTok), len(lr.Tree.Children))
				n_err++
			} else {
				for i := 0; i < len(v.TopTok); i++ {
					if v.TopTok[i] != lr.Tree.Children[i].LR_Tok {
						com.DbPrintf("debug", "%sError%s: invalid token returnd at postion %d\n", com.Red, com.Reset, i)
						c.Check(v.TopTok[i], Equals, lr.Tree.Children[i].LR_Tok)
						n_err++
					}
				}
			}
		}
		if len(v.TopVal) > 0 {
			if len(v.TopVal) != len(lr.Tree.Children) {
				com.DbPrintf("debug", "%sError%s: wrong number of tokens prsed, Expected: %d Got %d - Based on number of values, TopVal\n", com.Red, com.Reset, len(v.TopVal), len(lr.Tree.Children))
				n_err++
			} else {
				for i := 0; i < len(v.TopVal); i++ {
					if v.TopVal[i] != lr.Tree.Children[i].Item {
						com.DbPrintf("debug", "%sError%s: invalid value at postion %d, %s\n", com.Red, com.Reset, i, com.LF())
						n_err++
					}
				}
			}
		}
		if len(lr.Error) > v.NExpectedErr {
			com.DbPrintf("debug", "%sError%s: Errors reported in R.E. parsing %d\n", com.Red, com.Reset, len(lr.Error))
			n_err++
		} else if len(lr.Error) > 0 {
			com.DbPrintf("debug", "%sNote%s: Errors reported in R.E. parsing %d\n", com.Green, com.Reset, len(lr.Error))
		}
		lr.Error = lr.Error[:0]
		com.DbPrintf("debug", "\nDone[%03d]: `%s` %s\n\n", i, v.Re, strings.Repeat("-", 120-len(v.Re)))
	}
	if n_err > 0 {
		fmt.Fprintf(os.Stderr, "%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset)
		com.DbPrintf("debug", "\n\n%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset)
	} else {
		fmt.Fprintf(os.Stderr, "%sPASS%s\n", com.Green, com.Reset)
		com.DbPrintf("debug", "\n\n%sPASS%s\n", com.Green, com.Reset)
	}

	if n_skip > 0 {
		fmt.Fprintf(os.Stderr, "%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset)
		com.DbPrintf("debug", "\n\n%sSkipped, # of files without automated checks = %d%s\n", com.Yellow, n_skip, com.Reset)
	}
	if n_err > 0 {
		c.Check(n_err, Equals, 0)
		fmt.Fprintf(os.Stderr, "%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset)
		com.DbPrintf("debug", "\n\n%sFailed, # of errors = %d%s\n", com.Red, n_err, com.Reset)
	} else {
		fmt.Fprintf(os.Stderr, "%sPASS%s\n", com.Green, com.Reset)
		com.DbPrintf("debug", "\n\n%sPASS%s\n", com.Green, com.Reset)
	}
}
Ejemplo n.º 19
0
func Test_PbBufer01(t *testing.T) {

	SymbolTable := make([]*MacroDefTestType, 0, 100)
	Define := func(name rune, body string) {
		for ii := 0; ii < len(SymbolTable); ii++ {
			// fmt.Printf("Search at %d, for %s\n", ii, string(name))
			if SymbolTable[ii] != nil && SymbolTable[ii].Rn == name {
				SymbolTable[ii].Body = body
				return
			}
		}
		// fmt.Printf("Append\n")
		SymbolTable = append(SymbolTable, &MacroDefTestType{Rn: name, Body: body})
	}
	ResetST := func() {
		// SymbolTable = make([]*MacroDefTestType, 0, 100)
		SymbolTable = SymbolTable[:1]
	}
	HaveMacro := func(name rune) (body string, found bool) {
		body = ""
		found = false
		for ii := 0; ii < len(SymbolTable); ii++ {
			if SymbolTable[ii] != nil && SymbolTable[ii].Rn == name {
				body, found = SymbolTable[ii].Body, true
				return
			}
		}
		return
	}

	for ii, vv := range Pb01Test {

		if !vv.SkipTest {

			// Implement a quick - fetch execute macine to test - the PbBuffer - commands/opcodes are the Cmd* constants above.
			ss := ""
			pb := NewPbRead()
			ResetST()
			for pc, ww := range vv.Actions {

				switch ww.OpCode {
				case CmdOpenFile: // Open a file , at the tail end of list of input
					pb.OpenFile(ww.Fn)
					com.DbPrintf("testCode", "Open file %s At: %s\n", ww.Fn, com.LF())
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
				case CmdPbString: // Push back a string
					pb.PbString(ww.Data)
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
				case CmdPbRune: // Push back a rune
					pb.PbRune(ww.Rn)
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
				case CmdPbRuneArray: // Push back a rune array
					pb.PbRuneArray(ww.RnS)
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
				case CmdNextNChar:
					for ll := 0; ll < ww.X; ll++ {
						rn, done := pb.NextRune()
						if !done {
							ss = ss + string(rn)
						}
						com.DbPrintf("testCode", "Case 5: At: ->%s<- ll=%d ss >>>%s<<< %s\n", string(rn), ll, ss, com.LF())
					}
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
				case CmdPeek:
					rn, done := pb.PeekRune()
					if done || rn != ww.Rn {
						t.Errorf("%04s: Peek at [pc=%d] in test [%s] did not work, got %s expected %s, done=%v\n", pc, ii, string(rn), string(ww.Rn), done)
					}
				case CmdOutputToEof:
					com.DbPrintf("testCode", "All Done: ss >>>%s<<< before At: %s\n", ss, com.LF())
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
					for rn, done := pb.NextRune(); !done; rn, done = pb.NextRune() {
						ss = ss + string(rn)
					}
					com.DbPrintf("testCode", "All Done: ss >>>%s<<< after At: %s\n", ss, com.LF())
				case CmdPbByteArray: // Push back a byte array
					pb.PbByteArray([]byte(ww.Data))
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
				case CmdPbFile: // Open file and push contents back onto input at head of list. (Macro file, Include, Require)
					pb.PbFile(ww.Fn)
					com.DbPrintf("testCode", "Pb file %s At: %s\n", ww.Fn, com.LF())
					if com.DbOn("testDump") {
						pb.Dump01(os.Stdout)
					}
				case CmdFileSeen:
					fs := pb.FileSeen(ww.Fn)
					if fs != ww.FileSeenFlag {
						t.Errorf("%04s: Peek at [pc=%d] in test [%s] did not work, got %v expected %s for file seen flagv\n", pc, ii, fs, ww.FileSeenFlag)
					}
				case CmdGetPos: // Check get file name
					ln, cn, fn := pb.GetPos()
					com.DbPrintf("testCode", "fn=%s ln=%d cn=%d\n", fn, ln, cn)
					if ln != ww.LineNo {
						t.Errorf("%04s: %d: did not match line no Expected ->%d<-, Got ->%d<-\n", vv.Test, pc, ww.LineNo, ln)
					}
					if cn != ww.ColNo {
						t.Errorf("%04s: %d: did not match col no Expected ->%d<-, Got ->%d<-\n", vv.Test, pc, ww.ColNo, cn)
					}
					if fn != ww.Fn {
						t.Errorf("%04s: %d: did not match file name Expected ->%s<-, Got ->%s<-\n", vv.Test, pc, ww.Fn, fn)
					}
				case CmdSetPos: // Check get file name
					pb.SetPos(ww.LineNo, ww.ColNo, ww.Fn)
				case CmdResetST: // Reset symbol table
					ResetST()
				case CmdMacroProc: // Apply 1 char macros to input and process
					for rn, done := pb.NextRune(); !done; rn, done = pb.NextRune() {
						if m_body, m_found := HaveMacro(rn); m_found {
							pb.PbString(m_body)
						} else {
							ss = ss + string(rn)
						}
					}
				case CmdDefineMacro: // Define
					Define(ww.Rn, ww.Data)
				case CmdDumpBuffer: // Dump the buffer  - debuging
					if ww.Fn == "" {
						pb.Dump01(os.Stdout)
					} else {
						fp, err := com.Fopen(ww.Fn, "w")
						if err == nil {
							pb.Dump01(fp)
							fp.Close()
						} else {
							pb.Dump01(os.Stdout)
							t.Errorf("%04s: Unable to open file for output ->%s<-, error: %s\n", vv.Test, ww.Fn, err)
						}
					}
				case CmdResetOutput: // Reset output
					ss = ""
				case CmdPushBackXCopies: // Special test to pub back more than buffer of 'x'
					x := strings.Repeat(ww.Data, ww.X)
					pb.PbString(x)
				}
			}
			if ss != vv.Results {
				t.Errorf("%04s: did not match Expected ->%s<-, Got ->%s<-\n", vv.Test, vv.Results, ss)
			} else {
				com.DbPrintf("testCode", "%04s: Passed ------------------------------------------------------------------------------------------------\n\n", vv.Test)
			}
		}
	}

}
Ejemplo n.º 20
0
Archivo: re.go Proyecto: pschlump/lexie
func expandCCL(s string) (ccl string) {
	ccl = ""
	com.DbPrintf("db2", "at %s\n", com.LF())

	pos := 0
	if len(s) > 0 && s[0:1] == "-" { // Check for leading '-' include in CCL
		ccl += "-"
		pos = 1
	}

	for ii := pos; ii < len(s); ii++ {
		com.DbPrintf("re2", "ii=%d remaining ->%s<-, %s\n", ii, s[ii:], com.LF())
		if strings.HasPrefix(s[ii:], "[:alphnum:]") {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			// ccl += X_ALPHA
			ccl += X_LOWER
			ccl += X_UPPER
			ccl += X_NUMERIC
			ii += len("[:alphnum:]") - 1
		} else if strings.HasPrefix(s[ii:], "[:alpha:]") {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			// ccl += X_ALPHA
			ccl += X_LOWER
			ccl += X_UPPER
			ii += len("[:alpha:]") - 1
		} else if strings.HasPrefix(s[ii:], "[:lower:]") {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			ccl += X_LOWER
			ii += len("[:lower:]") - 1
		} else if strings.HasPrefix(s[ii:], "[:upper:]") {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			ccl += X_UPPER
			ii += len("[:upper:]") - 1
		} else if strings.HasPrefix(s[ii:], "[:numeric:]") {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			ccl += X_NUMERIC
			ii += len("[:numeric:]") - 1
		} else if ii+9 <= len(s) && s[ii:ii+9] == "a-zA-Z0-9" {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			// ccl += X_ALPHA
			ccl += X_LOWER
			ccl += X_UPPER
			ccl += X_NUMERIC
			ii += 8
		} else if ii+6 <= len(s) && s[ii:ii+6] == "a-zA-Z" {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			// ccl += X_ALPHA
			ccl += X_LOWER
			ccl += X_UPPER
			ii += 5
		} else if ii+3 <= len(s) && s[ii:ii+3] == "0-9" {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			// fmt.Printf("matched 0-9 pattern\n")
			ccl += X_NUMERIC
			ii += 2
		} else if ii+3 <= len(s) && s[ii:ii+3] == "a-z" {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			ccl += X_LOWER
			ii += 2
		} else if ii+3 <= len(s) && s[ii:ii+3] == "A-Z" {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			ccl += X_UPPER
			ii += 2
		} else if ii+2 < len(s) && s[ii+1:ii+2] == "-" {
			com.DbPrintf("re2", "   Matched: %s\n", com.LF())
			// xyzzyRune  TODO - this code is horribley non-utf8 compatable at this moment in time.
			e := s[ii+2]
			// fmt.Printf("matched a-b pattern, b=%s e=%s\n", string(s[ii]), string(e))
			if s[ii] >= e {
				fmt.Printf("Error: Poorly formatted character-class, beginning is larger than or equal to end of CCL\n") // Xyzzy - need line number etc
			}
			for b := s[ii]; b <= e; b++ {
				ccl += string(b)
			}
			ii += 2
		} else {
			ccl += s[ii : ii+1]
		}
		// fmt.Printf("bottom ccl now: ->%s<-\n", ccl)
	}

	return
}
Ejemplo n.º 21
0
func (s *Reader_TestSuite) TestLexie(c *C) {

	fmt.Fprintf(os.Stderr, "Test Matcher test from ../in/django3.lex file, %s\n", com.LF())

	com.DbOnFlags["db_DumpDFAPool"] = true
	com.DbOnFlags["db_DumpPool"] = true
	com.DbOnFlags["db_Matcher_02"] = true
	// com.DbOnFlags["db_NFA_LnNo"] = true
	com.DbOnFlags["match"] = true
	// com.DbOnFlags["nfa3"] = true
	com.DbOnFlags["output-machine"] = true
	com.DbOnFlags["match"] = true
	com.DbOnFlags["match_x"] = true
	// com.DbOnFlags["nfa3"] = true
	// com.DbOnFlags["nfa4"] = true
	// com.DbOnFlags["db_DFAGen"] = true
	// com.DbOnFlags["pbbuf02"] = true
	// com.DbOnFlags["DumpParseNodes2"] = true
	com.DbOnFlags["db_FlushTokenBeforeBefore"] = true
	com.DbOnFlags["db_FlushTokenBeforeAfter"] = true
	com.DbOnFlags["db_tok01"] = true
	com.DbOnFlags["in-echo-machine"] = true // Output machine

	lex := NewLexie()
	lex.NewReadFile("../in/django3.lex")

	for ii, vv := range Lexie02Data {

		if !vv.SkipTest {

			fmt.Printf("\n\nTest:%s ------------------------- Start --------------------------, %d, Input: -->>%s<<--\n", vv.Test, ii, vv.Inp)

			// r := strings.NewReader(vv.Inp)
			r := pbread.NewPbRead()
			r.PbString(vv.Inp)
			r.SetPos(1, 1, fmt.Sprintf("sf-%d.txt", ii)) // simulate  file = sf-

			fmt.Printf("At: %s\n", com.LF())
			lex.MatcherLexieTable(r, "S_Init")
			fmt.Printf("At: %s\n", com.LF())

			if len(vv.Result) > 0 {
				fmt.Printf("At: %s\n", com.LF())
				if len(lex.TokList.TokenData) != len(vv.Result) {
					fmt.Printf("Lengths did not match, %s", com.SVarI(lex.TokList.TokenData))
					c.Check(len(lex.TokList.TokenData), Equals, len(vv.Result))
				} else {
					for i := 0; i < len(vv.Result); i++ {
						if vv.Result[i].StrTokNo != "" {
							c.Check(vv.Result[i].StrTokNo, Equals, in.Lookup_Tok_Name(int(lex.TokList.TokenData[i].TokNo)))
						} else {
							c.Check(vv.Result[i].TokNo, Equals, int(lex.TokList.TokenData[i].TokNo))
						}
						c.Check(vv.Result[i].Match, Equals, lex.TokList.TokenData[i].Match)
						if vv.Result[i].LineNo > 0 {
							c.Check(vv.Result[i].LineNo, Equals, lex.TokList.TokenData[i].LineNo)
						}
						if vv.Result[i].ColNo > 0 {
							c.Check(vv.Result[i].ColNo, Equals, lex.TokList.TokenData[i].ColNo)
						}
						if vv.Result[i].FileName != "" {
							c.Check(vv.Result[i].FileName, Equals, lex.TokList.TokenData[i].FileName)
						}
					}
				}
			}

			fmt.Printf("At: %s\n", com.LF())
			fmt.Printf("Test:%s ------------------------- End --------------------------\n\n", vv.Test)

		}
	}

}
Ejemplo n.º 22
0
func main() {
	var fp *os.File

	// ------------------------------------------------------ cli processing --------------------------------------------------------------
	ifnList, err := flags.ParseArgs(&opts, os.Args)

	if err != nil {
		fmt.Printf("Invalid Command Line: %s\n", err)
		os.Exit(1)
	}

	if opts.Debug != "" {
		s := strings.Split(opts.Debug, ",")
		com.DbOnFlags[opts.Debug] = true
		for _, v := range s {
			com.DbOnFlags[v] = true
		}
	}

	if opts.Echo != "" {
		com.DbOnFlags["in-echo-machine"] = true // Output machine
	}

	fmt.Fprintf(os.Stderr, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF())

	// ------------------------------------------------------ setup Lexie --------------------------------------------------------------
	pt := NewParse2Type()
	pt.Lex = dfa.NewLexie()
	pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel

	// ------------------------------------------------------ input machine  --------------------------------------------------------------
	if opts.LexPat != "" {
		pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex")
	} else if opts.ReadMachine != "" {
		fmt.Printf("Should input machine at this point\n")
		// xyzzy
	} else {
		fmt.Printf("Fatal: Must have -l <fn> or -r <fn>, neither supplied.\n")
		os.Exit(1)
	}

	if opts.Machine != "" {
		fmt.Printf("Should output machine at this point\n")
		// xyzzy
	}

	// -------------------------------------------------- start scanning process  ----------------------------------------------------------
	if opts.Tokens != "" {
		fp, _ = com.Fopen(opts.Tokens, "w")
	} else {
		fp = os.Stdout
	}

	if opts.Input != "" {

		go func() {
			r := pbread.NewPbRead()
			r.OpenFile(opts.Input)
			pt.Lex.MatcherLexieTable(r, "S_Init")
		}()

	} else {

		go func() {
			r := pbread.NewPbRead()
			for _, fn := range ifnList[1:] {
				r.OpenFile(fn)
			}
			pt.Lex.MatcherLexieTable(r, "S_Init")
		}()

	}

	// ------------------------------------------------------ process tokens --------------------------------------------------------------
	if false {
		// just print tokens out to check the scanning prcess and CLI options
		for msg := range pt.Lex.Message {
			fmt.Fprintf(fp, "%+v\n", msg)
		}
	} else {
		// Generate a parse tree and print out.
		xpt := pt.GenParseTree(0)
		pt.TheTree = xpt
		xpt.DumpMtType(fp, 0, 0)
		fmt.Printf("----------------------------------- start execute ----------------------------------------------------\n")
		pt.ExecuteFunctions(0)
		fmt.Printf("----------------------------------- debug output ----------------------------------------------------\n")
		if true {
			fmt.Printf("%s\n", com.SVarI(xpt))
		}
		fmt.Printf("----------------------------------- output ----------------------------------------------------\n")
		for i := 0; i < 1000000; i++ {
			pt.OutputTree0(fp, 0)
		}
		fmt.Printf("----------------------------------- errors ----------------------------------------------------\n")
		pp := pt.CollectErrorNodes(0)
		for ii, vv := range pp {
			fmt.Printf("Error [%3d]: msg=%s\n", ii, vv.ErrorMsg)
		}
		fmt.Printf("----------------------------------- final template results  ----------------------------------------------------\n")
		pt.OutputTree(fp, 0)
	}

	if opts.Tokens != "" {
		fp.Close()
	}

}
Ejemplo n.º 23
0
Archivo: re.go Proyecto: pschlump/lexie
func (lr *LexReType) parseExpression(depth int, d_depth int, xTree *ReTreeNodeType) []ReTreeNodeType {
	//var first *ReTreeNodeType
	//var last *ReTreeNodeType
	pre := strings.Repeat("    ", depth)
	if depth == 0 {
		xTree = lr.Tree
		com.DbPrintf("parseExpression", "%sat %s !!!top!!!, depth=%d \n", pre, com.LF(), depth)
	}
	isFirst := true
	inOr := false
	com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
	c, w := lr.Next()
	for w != LR_EOF {
		com.DbPrintf("parseExpression", "%sat %s !!!top!!!, depth=%d c=->%s<- w=%d %s -- Loop Top -- xTree=%s\n\n",
			pre, com.LF(), depth, c, w, NameOfLR_TokType(w), com.SVarI(xTree))
		switch w {
		case LR_CL_BR: // }
			fallthrough
		case LR_COMMA: // ,
			fallthrough
		case LR_E_CCL:
			fallthrough
		case LR_MINUS: // -		-- Text if not in CCL and not 1st char in CCL
			fallthrough
		case LR_Text: //			-- Add a node to list, move right
			//if true {
			xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text})
			//} else {
			// // Bad Idea - mucks up '*' and other processing -  To Simplify Tree needs to be done post-generation with Simp-Rules
			//	ll := len(lr.Tree.Children) - 1
			//	if ll >= 0 && lr.Tree.Children[ll].LR_Tok == LR_Text {
			//		lr.Tree.Children[ll].Item += c
			//	} else {
			//		xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text})
			//	}
			//}

		case LR_CARROT: // ^		-- BOL		-- If at beginning, or after ( or | then BOL - else just text??
			fallthrough
		case LR_DOLLAR: // $		-- BOL		-- If at end, or just before ) or | the EOL - else just text??
			fallthrough
		case LR_DOT: // .		-- Match any char
			xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: w})

		case LR_OP_BR: // {
			if isFirst {
				lr.Warn(fmt.Sprintf("Invalid '%s' at beginning of R.E. assumed to be a text character missing esacape.", c))
				xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text})
			} else {
				ll := len(xTree.Children) - 1
				tmp := xTree.Children[ll]
				newTree := lr.parseIterator(depth + 1)
				if newTree.Mm == 0 && newTree.Nn == InfiniteIteration {
					ll := len(xTree.Children) - 1
					tmp := xTree.Children[ll]
					com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, ll=%d, xTree=%s tmp=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), ll, com.SVarI(xTree), com.SVarI(tmp))
					xTree.Children[ll] = ReTreeNodeType{Item: "*", LR_Tok: LR_STAR, Children: []ReTreeNodeType{tmp}}
				} else {
					if newTree.Mm > newTree.Nn {
						lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Invalid Range, Start is bigger than end, {%d,%d}, %s", newTree.Mm, newTree.Nn, com.LF())))
					}
					com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, ll=%d, xTree=%s tmp=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), ll, com.SVarI(xTree), com.SVarI(tmp))
					// xTree.Children[ll] = ReTreeNodeType{Item: c, LR_Tok: LR_OP_BR, Children: []ReTreeNodeType{tmp}, Mm: newTree.Mm, Nn: newTree.Nn}
					newTree.Children = []ReTreeNodeType{tmp}
					xTree.Children[ll] = newTree
					// CCL: xTree.Children = append(xTree.Children, lr.parseCCL(depth+1, w)) // xyzzy needs work ---------------------------------------------------
				}
				com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
			}

		case LR_STAR: // *		-- Error if 1st char, else take prev item from list, star and replace it.
			fallthrough
		case LR_PLUS: // +		-- Error if 1st char
			fallthrough
		case LR_QUEST: // ?		-- Error if 1st char
			if isFirst {
				lr.Warn(fmt.Sprintf("Invalid '%s' at beginning of R.E. assumed to be a text character missing esacape.", c))
				xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text})
			} else {
				ll := len(xTree.Children) - 1
				tmp := xTree.Children[ll]
				com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, ll=%d, xTree=%s tmp=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), ll, com.SVarI(xTree), com.SVarI(tmp))
				xTree.Children[ll] = ReTreeNodeType{Item: c, LR_Tok: w, Children: []ReTreeNodeType{tmp}}
				com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
			}

		case LR_OR: // |		n-ary or operator
			com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
			inOr = true

			// Left Machine is collected to be sub-machine == Beginnig-to-current
			// left := xTree.Children // change to be left section back to but not including "|" node - or all if no | node.
			kk := -1
			for jj := len(xTree.Children) - 1; jj >= 0; jj-- {
				if xTree.Children[jj].LR_Tok == LR_OR {
					kk = jj
					break
				}
			}
			if kk == -1 { // No OR tok found
				left := xTree.Children // change to be left section back to but not including "|" node - or all if no | node.
				ll := len(left)
				leftNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, ll, ll)}
				for jj := range left {
					leftNode.Children[jj] = left[jj]
				}

				newTop := ReTreeNodeType{Item: "|", LR_Tok: LR_OR, Children: make([]ReTreeNodeType, 0, 10)}
				newTop.Children = append(newTop.Children, leftNode) // only if no "or" node, else ref to "or" node
				xTree.Children = xTree.Children[:0]
				xTree.Children = append(xTree.Children, newTop)
				com.DbPrintf("parseExpression", "%sAT %s, w=%d %s, left=%s\n", pre, com.LF(), w, NameOfLR_TokType(w), com.SVarI(left))
			} else {
				if kk >= 0 {
					if kk < len(xTree.Children) {
						tmp := xTree.Children[kk+1:]
						xTree.Children = xTree.Children[0 : kk+1]
						newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, len(tmp), len(tmp))}
						for i := 0; i < len(tmp); i++ {
							newNode.Children[i] = tmp[i]
						}
						xTree.Children[kk].Children = append(xTree.Children[kk].Children, newNode)
					}
				}
			}

			// Or node is created like (LR_STAR)
			// Recursive call to parse rest of items at this level
			//newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, 1, 10)} // No recursive call
			//lr.parseExpression(depth+1, depth, &newNode.Children[0])
			//newTop.Children = append(newTop.Children, newNode.Children[0])

			// Take results of recursion and put in as RIGHT machine under LR_OR (optimize for N-Tree OR at this point)
			//xTree.Children = append(xTree.Children, newTop)
			//if depth > d_depth {
			//com.DbPrintf("parseExpression", "%sat %s, depth=%d d_detph=%d\n", pre, com.LF(), depth, d_depth)
			//	return xTree.Children
			//}

		case LR_OP_PAR: // (		-- Start of Sub_Re
			com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())

			newNode := ReTreeNodeType{Item: c, LR_Tok: LR_OP_PAR, Children: make([]ReTreeNodeType, 1, 10)}

			lr.parseExpression(depth+1, depth+1, &newNode.Children[0])

			newNode.Children[0].Item = c
			newNode.Children[0].LR_Tok = LR_OP_PAR

			xTree.Children = append(xTree.Children, newNode.Children[0])

			com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())

		case LR_CL_PAR: // )
			// If in "or" node set - then collect last section to "or" ------------------------ <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
			com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
			if depth == 0 {
				com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
				lr.Warn(fmt.Sprintf("Invalid '%s' at not properly nested.   Assuming that this was to match a character.", c))
				xTree.Children = append(xTree.Children, ReTreeNodeType{Item: c, LR_Tok: LR_Text})
				com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
			} else {
				com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
				if inOr {
					com.DbPrintf("parseExpression", "%sAT Top of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree))
					kk := -1
					for jj := len(xTree.Children) - 1; jj >= 0; jj-- {
						if xTree.Children[jj].LR_Tok == LR_OR {
							kk = jj
							break
						}
					}
					if kk >= 0 {
						if kk < len(xTree.Children) {
							tmp := xTree.Children[kk+1:]
							xTree.Children = xTree.Children[0 : kk+1]
							newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, len(tmp), len(tmp))}
							for i := 0; i < len(tmp); i++ {
								newNode.Children[i] = tmp[i]
							}
							xTree.Children[kk].Children = append(xTree.Children[kk].Children, newNode)
						}
					}
					com.DbPrintf("parseExpression", "%sAT Bo5 of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree))
				}
				return xTree.Children
			}
			com.DbPrintf("parseExpression", "%sat %s\n", pre, com.LF())
			inOr = false

		case LR_CCL: // [...]	-- CCL Node (Above)
			fallthrough
		case LR_N_CCL: // [^...]	-- N_CCL Node
			xTree.Children = append(xTree.Children, lr.parseCCL(depth+1, w)) // xyzzy needs work ---------------------------------------------------

		default:
			lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Invalid LR Token Type, '%d', '%s', %s", w, NameOfLR_TokType(w), com.LF())))
			return xTree.Children
		}
		isFirst = false
		com.DbPrintf("parseExpression", "%sAT %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree))
		c, w = lr.Next()
	}
	// If in "or" node set - then collect last section to "or" ------------------------ <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
	if inOr {
		com.DbPrintf("parseExpression", "%sAT Top of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree))
		kk := -1
		for jj := len(xTree.Children) - 1; jj >= 0; jj-- {
			if xTree.Children[jj].LR_Tok == LR_OR {
				kk = jj
				break
			}
		}
		if kk >= 0 {
			if kk < len(xTree.Children) {
				tmp := xTree.Children[kk+1:]
				xTree.Children = xTree.Children[0 : kk+1]
				newNode := ReTreeNodeType{Item: "", LR_Tok: LR_null, Children: make([]ReTreeNodeType, len(tmp), len(tmp))}
				for i := 0; i < len(tmp); i++ {
					newNode.Children[i] = tmp[i]
				}
				xTree.Children[kk].Children = append(xTree.Children[kk].Children, newNode)
			}
		}
		com.DbPrintf("parseExpression", "%sAT Bo5 of new code %s, BOTTOM xTree=%s\n", pre, com.LF(), com.SVarI(xTree))
	}
	return xTree.Children
}
Ejemplo n.º 24
0
Archivo: re.go Proyecto: pschlump/lexie
func init() {
	if false {
		fmt.Printf("", com.SVarI(nil), com.LF())
	}
}
Ejemplo n.º 25
0
func (dfa *DFA_PoolType) ConvNDA_to_DFA(nn *nfa.NFA_PoolType) {
	StartState := nn.InitState
	dfa.NoneVisited()

	nn.Sigma = nn.GenerateSigma()
	dfa.Sigma = nn.Sigma
	com.DbPrintf("dfa2", "Sigma at top ->%s<-\n", dfa.Sigma)

	// Build initial state
	dfa_set := nn.LambdaClosure([]int{StartState}) // Find all the lambda closures from specified state
	dfa_set = append(dfa_set, StartState)          // Add in initial state
	dfa_set = com.USortIntSlice(dfa_set)           // Make set unique
	com.DbPrintf("db_DFAGen", "\nStart: %s, \u03a3 =->%s<-, %s\n", com.SVar(dfa_set), dfa.Sigma, com.LF())
	A := dfa.GetDFAName(dfa_set)
	if r, is, info, Is0Ch := nn.IsTerminalState(dfa_set); is {
		dfa.Pool[A].Rv = r
		dfa.Pool[A].Is0Ch = Is0Ch
		dfa.Pool[A].Info = info
	} else {
		dfa.Pool[A].Info = info
	}
	if com.DbOn("db_DFAGen") {
		dfa.DumpPool(false)
	}
	// Look at all the locaitons we can get to from this "state"
	for _, S := range dfa.Sigma {
		StateSet := nn.LambdaClosureSet(dfa_set, string(S))
		com.DbPrintf("db_DFAGen", "FOR INITIAL state ->%s<- StateSet=%s, %s\n", string(S), com.SVar(StateSet), com.LF())
		if len(StateSet) > 0 {
			com.DbPrintf("db_DFAGen", "Have a non-empty result, %s\n", com.LF())
			com.DbPrintf("db_DFAGen", "<><><> this is the point where we should check to see if 'S' is DOT or NCCL, %s\n", com.LF())

			StateSetT := nn.LambdaClosure(StateSet) // need to lambda complete the state set
			StateSet = append(StateSet, StateSetT...)
			StateSet = com.USortIntSlice(StateSet) // Make set unique
			com.DbPrintf("db_DFAGen", "    Output Is %s, %s\n", com.SVar(StateSet), com.LF())
			B := 0
			if t := dfa.HaveStateAlready(StateSet); t != -1 { // Have Already
				B = t
				com.DbPrintf("db_DFAGen", "    Already have this state at location %d, %s\n", t, com.LF())
			} else {
				B = dfa.GetDFAName(StateSet)
				com.DbPrintf("db_DFAGen", "    *** New state %d, %s\n", B, com.LF())
			}
			dfa.AddEdge(A, B, string(S))
			com.DbPrintf("db_DFAGen", "    *** Before (top) %s\n", com.LF())
			if r, is, info, Is0Ch := nn.IsTerminalState(StateSet); is {
				dfa.Pool[B].Rv = r
				dfa.Pool[B].Is0Ch = Is0Ch
				dfa.Pool[B].Info = info
				com.DbPrintf("db_DFAGen", "    *** New state %d, %s\n", B, com.LF())
			} else if _, is, info, Is0Ch := nn.IsNonTerminalPushPopState(StateSet); is {
				dfa.Pool[B].Is0Ch = Is0Ch
				dfa.Pool[B].Info = info
				com.DbPrintf("db_DFAGen", "    *** New info for state %d, %s\n", B, com.LF())
			} else {
				dfa.Pool[B].Info = info
				com.DbPrintf("db_DFAGen", "    *** NO State Info for state %d, %s\n", B, com.LF())
			}
			com.DbPrintf("db_DFAGen", "    *** After (top) %s\n", com.LF())
			if com.DbOn("db_DFAGen") {
				fmt.Printf("for %s StateSet=%s, A=%d, B=%s %s\n", string(S), com.SVar(StateSet), A, com.SVar(B), com.LF())
				dfa.DumpPool(false)
			}
		}
	}
	dfa.Pool[A].Visited = true

	com.DbPrintf("db_DFAGen", "\nBefore Main Loop, %s\n", com.LF())
	limit := 0
	for stateToDo := dfa.FindNotVisited(); stateToDo != -1; stateToDo = dfa.FindNotVisited() {
		com.DbPrintf("db_DFAGen", "\nMain Loop: !!TOP!! State:%d\n", stateToDo)
		// -----------------------------------------------------------------------------------------------------------
		if !dfa.Pool[stateToDo].Visited {
			dfa_set := nn.LambdaClosure(dfa.Pool[stateToDo].StateSet)  // Find all the lambda closures from specified state
			dfa_set = append(dfa_set, dfa.Pool[stateToDo].StateSet...) // Add in initial state
			dfa_set = com.USortIntSlice(dfa_set)                       // Make set unique
			for _, S := range dfa.Sigma {
				StateSet := nn.LambdaClosureSet(dfa_set, string(S))
				com.DbPrintf("db_DFAGen", "    for initial state %s StateSet=%s, %s\n", string(S), com.SVar(StateSet), com.LF())
				com.DbPrintf("db_DFAGen", "<><><> this is the point where we should check to see if 'S' is DOT or NCCL, %s\n", com.LF())
				if len(StateSet) > 0 {
					com.DbPrintf("db_DFAGen", "    >>> Have a non-empty result, Input Is %s, %s\n", com.SVar(StateSet), com.LF())
					StateSetT := nn.LambdaClosure(StateSet) // need to lambda complete the state set
					StateSet = append(StateSet, StateSetT...)
					StateSet = com.USortIntSlice(StateSet) // Make set unique
					com.DbPrintf("db_DFAGen", "    >>> Output Is %s, %s\n", com.SVar(StateSet), com.LF())
					B := 0
					if t := dfa.HaveStateAlready(StateSet); t != -1 { // Have Already
						B = t
						com.DbPrintf("db_DFAGen", "    Already have this state at location %d, %s\n", t, com.LF())
					} else {
						B = dfa.GetDFAName(StateSet)
					}
					dfa.AddEdge(stateToDo, B, string(S))
					com.DbPrintf("db_DFAGen", "    *** Before %s\n", com.LF())
					if r, is, info, Is0Ch := nn.IsTerminalState(StateSet); is {
						dfa.Pool[B].Rv = r
						dfa.Pool[B].Is0Ch = Is0Ch
						dfa.Pool[B].Info = info
						com.DbPrintf("db_DFAGen", "    *** New state %d, %s\n", B, com.LF())
					} else if _, is, info, Is0Ch := nn.IsNonTerminalPushPopState(StateSet); is {
						dfa.Pool[B].Is0Ch = Is0Ch
						dfa.Pool[B].Info = info
						com.DbPrintf("db_DFAGen", "    *** New info for state %d, %s\n", B, com.LF())
					} else {
						com.DbPrintf("db_DFAGen", "    *** NO State Info for state %d, %s\n", B, com.LF())
						dfa.Pool[B].Info = info
					}
					com.DbPrintf("db_DFAGen", "    *** After %s\n", com.LF())
					if com.DbOn("db_DFAGen") {
						fmt.Printf("    Add New Edge on %s fr %d to %d, %s\n", string(S), stateToDo, B, com.LF())
						fmt.Printf("    for %s StateSet=%s, A(stateToDo)=%d, %s\n", string(S), com.SVar(StateSet), stateToDo, com.LF())
						dfa.DumpPool(false)
					}
				}
			}
		}
		// -----------------------------------------------------------------------------------------------------------
		dfa.Pool[stateToDo].Visited = true
		limit++
		if limit > 50000 {
			break
		}
	}
	dfa.VerifyMachine()

}
Ejemplo n.º 26
0
Archivo: re.go Proyecto: pschlump/lexie
// mm, nn := lr.parseIterator ( depth+1 )
func (lr *LexReType) parseIterator(depth int) (tree ReTreeNodeType) {
	pos := 0
	com.DbPrintf("db2", "parseIterator Top: depth=%d\n", depth)
	s := ""
	c, w := lr.Next()
	for w != LR_EOF {
		switch w {
		case LR_MINUS: // -		-- Text if not in CCL and not 1st char in CCL
			fallthrough
		case LR_CARROT: // ^		-- BOL
			fallthrough
		case LR_DOT: // .		-- Match any char
			fallthrough
		case LR_STAR: // *		-- Error if 1st char, else take prev item from list, star and replace it.
			fallthrough
		case LR_PLUS: // +		-- Error if 1st char
			fallthrough
		case LR_QUEST: // ?		-- Error if 1st char
			fallthrough
		case LR_OP_PAR: // (		-- Start of Sub_Re
			fallthrough
		case LR_CL_PAR: // )
			fallthrough
		case LR_CCL: // [...]	-- CCL Node (Above)
			fallthrough
		case LR_OR: // |
			fallthrough
		case LR_OP_BR: // {
			fallthrough
		case LR_DOLLAR: // $
			fallthrough
		case LR_E_CCL:
			fallthrough
		case LR_N_CCL: // [^...]	-- N_CCL Node
			lr.Error = append(lr.Error, errors.New(fmt.Sprintf("in parseIterator, Invalid {m,n} - invalid chars found, %s", com.LF())))
			tree.Mm, tree.Nn = 1, 1
			tree.LR_Tok = LR_OP_BR
			return

		case LR_Text: //			-- Add a node to list, move right
			if c[0] >= '0' && c[0] <= '9' || c[0] == ',' {
				s += c // Add To Iterator
			} else {
				lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Unreacable Code, invalid token in parseIterator, %s", com.LF())))
				tree.Mm, tree.Nn = lr.parseIteratorString(s) // Do Something, Return
				tree.Item = "{"
				tree.LR_Tok = LR_OP_BR
				return
			}
		case LR_COMMA: // ,
			s += c // Add To Iterator
		case LR_CL_BR: // }
			tree.Mm, tree.Nn = lr.parseIteratorString(s) // Do Something, Return
			tree.Item = "{"
			tree.LR_Tok = LR_OP_BR
			return
		default:
			lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Unreacable Code, invalid token in parseIterator, %s", com.LF())))
			tree.Mm, tree.Nn = lr.parseIteratorString(s) // Do Something, Return
			tree.Item = "{"
			tree.LR_Tok = LR_OP_BR
			return
		}
		pos++
		c, w = lr.Next()
	}
	if w == LR_EOF {
		lr.Err("EOF found in Iterator {m,n}.")
	}
	tree.Mm, tree.Nn = lr.parseIteratorString(s) // Do Something, Return
	tree.Item = "{"
	tree.LR_Tok = LR_OP_BR
	return
}
Ejemplo n.º 27
0
func (dfa *DFA_PoolType) DumpPool(all bool) {
	if all {
		com.DbPrintf("db_DumpDFAPool", "Cur: %d Top: %d NextFree %d\n", dfa.Cur, dfa.Top, dfa.NextFree)
	}
	com.DbPrintf("db_DumpDFAPool", "\n---------------------------- DFA Output -----------------------------------------------\n")
	com.DbPrintf("db_DumpDFAPool", "\nDFA InitState: %d, Sigma ->%s<-\n\n", dfa.InitState, dfa.Sigma)
	pLnNo := com.DbOn("db_DFA_LnNo")
	IfLnNo := func(s string) string {
		if pLnNo {
			t := fmt.Sprintf("[%3s]", s)
			return t
		}
		return ""
	}
	com.DbPrintf("db_DumpDFAPool", "%3s%s: ", "St", IfLnNo("/Ln"))
	com.DbPrintf("db_DumpDFAPool", " %12s %12s \u2714              \tEdges", "StateName", "StateSet")
	com.DbPrintf("db_DumpDFAPool", "\n\n")
	for ii, vv := range dfa.Pool {
		if all || vv.IsUsed {
			com.DbPrintf("db_DumpDFAPool", "%3d%s: ", ii, IfLnNo(vv.LineNo))
			com.DbPrintf("db_DumpDFAPool", " %12s %12s %s :", vv.StateName, com.SVar(vv.StateSet), com.ChkOrBlank(vv.Visited))
			if vv.Rv > 0 {
				if vv.Is0Ch {
					com.DbPrintf("db_DumpDFAPool", " \u03c4:Tau:%04d ", vv.Rv)
				} else {
					com.DbPrintf("db_DumpDFAPool", " T:%04d ", vv.Rv)
				}
			} else {
				com.DbPrintf("db_DumpDFAPool", "        ")
			}
			if com.DbOn("db_DumpDFAPool") {
				fmt.Printf("\t E:")
				for _, ww := range vv.Next2 {
					if ww.Is0ChMatch {
						fmt.Printf("//Found  \u03c4 (%s) //", com.LF()) // Show a Tau(t) for a lambda that matchiens on else conditions.
					}
					if ww.IsLambda {
						fmt.Printf("{  ERROR!! \u03bb  %2d -> %2d  %s}  ", ww.From, ww.To, ww.LineNo)
					} else {
						// fmt.Printf("{ \"%s\" %2d -> %2d  %s}  ", ww.On, ww.From, ww.To, IfLnNo(ww.LineNo))
						on, _ := utf8.DecodeRune([]byte(ww.On))
						son := fmt.Sprintf("%q", ww.On)
						switch on {
						case re.R_DOT: // = '\uF8FA' // Any char in Sigma
							son = "DOT/uF8FA"
						case re.R_BOL: // = '\uF8F3' // Beginning of line
							son = "BOL/uF8F3"
						case re.R_EOL: // = '\uF8F4' // End of line
							son = "EOL/uF8F4"
						case re.R_NUMERIC: // = '\uF8F5'
							son = "NUMERIC/uF8F5"
						case re.R_LOWER: // = '\uF8F6'
							son = "LOWER/uF8F6"
						case re.R_UPPER: // = '\uF8F7'
							son = "UPPER/uF8F7"
						case re.R_ALPHA: // = '\uF8F8'
							son = "ALPHA/uF8F8"
						case re.R_ALPHNUM: // = '\uF8F9'
							son = "ALPHANUM/uF8F9"
						case re.R_EOF: // = '\uF8FB'
							son = "EOF/uF8FB"
						case re.R_not_CH: // = '\uF8FC' // On input lookup if the char is NOT in Signa then it is returned as this.
							son = "else_CH/uF8Fc"
						case re.R_N_CCL: // = '\uF8FD' // If char is not matched in this state then take this path
							son = "N_CCL/uF8Fd"
						case re.R_LAMBDA_MATCH: // = '\uF8FE'
							son = "LambdaM/uF8FE"
						}
						fmt.Printf("{ %s  %2d -> %2d  %s}  ", son, ww.From, ww.To, IfLnNo(ww.LineNo))
					}
				}
				fmt.Printf("\n")
				if vv.Info.Action != 0 || vv.Info.MatchLength != 0 {
					// fmt.Printf("\t\t\tInfo: %s\n", com.SVar(vv.Info))		// xyzzy - output Info
					// xyzzy - NextState info
					fmt.Printf("\t\t\tDFA.Info: %s", nfa.DumpInfo(vv.Info))
					// if ((vv.Info.Action&com.A_Pop) != 0 || (vv.Info.Action&com.A_Push) != 0 || (vv.Info.Action&com.A_Reset) != 0) && !vv.Info.HardMatch {		// xyzzy8
					fmt.Printf(" IsHard=%v (((false imples else case Rv!)))\n", vv.Info.HardMatch)
				}
				fmt.Printf("\n")
			}
		}
	}
}
Ejemplo n.º 28
0
Archivo: re.go Proyecto: pschlump/lexie
func (lr *LexReType) Err(s string) {
	if com.DbOn("OutputErrors") {
		fmt.Printf("Error: %s\n", s)
	}
	lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Error: %s, %s", s, com.LF(2))))
}
Ejemplo n.º 29
0
func main() {

	// ------------------------------------------------------ cli processing --------------------------------------------------------------
	ifnList, err := flags.ParseArgs(&opts, os.Args)

	if err != nil {
		fmt.Printf("Invalid Command Line: %s\n", err)
		os.Exit(1)
	}

	test01.Dbf = os.Stdout
	if opts.Debug != "" {
		s := strings.Split(opts.Debug, ";")
		com.DbOnFlags[opts.Debug] = true
		for _, v := range s {
			if len(v) > 5 && v[0:4] == "out:" {
				test01.Dbf, _ = com.Fopen(v[4:], "w")
			} else {
				com.DbOnFlags[v] = true
			}
		}
	}

	fmt.Fprintf(test01.Dbf, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF())

	// ------------------------------------------------------ Options --------------------------------------------------------------
	// should be read in from a .json file!
	Options.MdExtensions = []string{".md", ".makrdown"}
	Options.ConvertMdToHtml = true
	Options.LeaveTmpFiles = false
	Options.TmpDir = "./tmp"

	if !com.Exists(Options.TmpDir) {
		os.Mkdir(Options.TmpDir, 0700)
	}

	// ------------------------------------------------------ setup Lexie --------------------------------------------------------------
	pt := test01.NewParse2Type()
	pt.Lex = dfa.NewLexie()
	pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel

	// ------------------------------------------------------ input machine  --------------------------------------------------------------
	if opts.LexPat != "" {
		if !com.Exists(opts.LexPat) {
			fmt.Fprintf(os.Stderr, "Fatal: Must have -l <fn> lexical analyzer machine.  Missing file.\n")
			os.Exit(1)
		}
		pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex")
	} else {
		fmt.Fprintf(os.Stderr, "Fatal: Must have -l <fn> lexical analyzer machine.\n")
		os.Exit(1)
	}

	// -------------------------------------------------- start scanning process  ----------------------------------------------------------
	tp := strings.Split(opts.TemplatePath, ";")
	for _, tps := range tp {
		pt.OpenLibraries(tps)
	}

	if opts.Recursive {

		CopyInAssets(opts.SiteName, opts.BaseAssets, opts.SiteAssets, opts.Output, opts.User, opts.Theme, opts.ForcedCpFlag)

		//fmt.Printf("After CopyInAssets: Not Implemented Yet\n")
		//os.Exit(1)

		data2 := make(map[string]string)
		data2["site_name"] = opts.SiteName

		if opts.Input == "" && opts.SiteName != "" {
			opts.Input = com.Qt("./site/%{site_name%}/", data2)
		}
		if opts.Output == "" && opts.SiteName != "" {
			opts.Output = com.Qt("./www/%{site_name%}/", data2)
		}

		// ---------------------------------------------------------------------------------------------------------------------------------

		// 1. Do the rsync copy ops
		// 2. Process the set of fiels from -i -> -o

		// -- Process the static files -----------------------------------------------------------------------------------------------------
		dp := make([]string, 0, 10)
		if opts.Input != "" {
			dp = append(dp, opts.Input)
		} else {
			for _, fn := range ifnList[1:] {
				dp = append(dp, fn)
			}
		}

		var fns, dirs []string

		for _, dir := range dp {
			if db_debug3 {
				fmt.Printf("Getting for %s\n", dir)
			}
			t_fns, t_dirs, err := com.GetFilenamesRecrusive(dir)
			if err != nil {
				if db_debug3 {
					fmt.Printf("Error: %s on %s\n", err, dir)
				}
			} else {
				fns = append(fns, t_fns...)
				dirs = append(dirs, t_dirs...)
			}
		}

		if db_debug3 {
			fmt.Printf("fns: %+v\n", fns)
			fmt.Printf("dirs: %+v\n", dirs)
		}

		mds := com.ReplaceEach(dirs, opts.Input, opts.Output)
		for _, aDir := range mds {
			if !com.Exists(aDir) {
				err := os.Mkdir(aDir, 0764)
				if err != nil {
					if db_debug3 {
						fmt.Printf("Error: Unable to create directory %s, error: %s\n", aDir, err)
					}
				}
			}
		}

		mf := com.ReplaceEach(fns, opts.Input, opts.Input+"/%{user%}/%{theme%}/")
		mO := com.ReplaceEach(fns, opts.Input, opts.Output)
		if db_debug3 {
			fmt.Printf("modded_files: %+v\n", mf)
		}

		final := make([]string, 0, len(mf))
		data := make(map[string]string)
		has_err := false

		for _, mff := range mf {
			data["user"] = opts.User
			data["theme"] = opts.Theme
			mfmod := com.Qt(mff, data)
			if com.Exists(mfmod) {
				final = append(final, mfmod)
			} else {

				data["user"] = ""
				// data["theme"] = "A-Theme"
				mfmod := com.Qt(mff, data)
				if com.Exists(mfmod) {
					final = append(final, mfmod)
				} else {

					data["user"] = opts.User
					// data["user"] = ""
					data["theme"] = ""
					mfmod := com.Qt(mff, data)
					if com.Exists(mfmod) {
						final = append(final, mfmod)
					} else {
						fmt.Printf("Error: File Missing %s\n", mfmod)
						has_err = true
					}
				}
			}
		}
		if has_err {
			fmt.Printf("Error occured...\n")
			os.Exit(1)
		}

		if db_debug3 {
			fmt.Printf("Final Files:%s\n", final)
		}
		tmpFiles := make([]string, 0, len(final))
		for ii, yy := range final {
			yyt := yy
			fmt.Printf("Process %s to %s\n", yy, mO[ii])
			ext := filepath.Ext(yy)
			if Options.ConvertMdToHtml && com.InArray(ext, Options.MdExtensions) { // if ext == ".md" || ext == ".markdown" {
				fmt.Printf("\t Convetting from MD to HTML\n")
				in := yy
				//yyt = "./tmp/" + com.Basename(yy) + ".html"	// old code - not using a Tempfile
				//err := ConvertMdToHtmlFile(in, yyt)
				yyt, err = ConvertMdToHtmlFileTmpFile(in)
				if err != nil {
					fmt.Printf("Error: In processing from markdown %s to HTML %s: %s\n", in, yyt, err)
					os.Exit(1)
				}
				mO[ii] = com.RmExt(mO[ii]) + ".html"
				tmpFiles = append(tmpFiles, yyt)
			}
			ProcessFileList(pt, []string{yyt}, mO[ii])
		}
		if !Options.LeaveTmpFiles {
			for _, fn := range tmpFiles {
				os.Remove(fn)
			}
		}

	} else {

		inList := make([]string, 0, 10)
		if opts.Input != "" {
			inList = append(inList, opts.Input)
		} else {
			inList = ifnList[1:]
		}

		ProcessFileList(pt, inList, opts.Output)

	}

}
Ejemplo n.º 30
0
Archivo: re.go Proyecto: pschlump/lexie
func (lr *LexReType) parseCCL(depth int, ww LR_TokType) (tree ReTreeNodeType) {
	pos := 0
	com.DbPrintf("db2", "parseCCL Top: depth=%d,  %d=%s\n", depth, ww, NameOfLR_TokType(ww))
	s := ""
	dx := 0
	marked := false
	c, w := lr.Next()
	for w != LR_EOF {
		com.DbPrintf("re2", "Top of parseCCL dx=%d ->%s<-\n", dx, c)
		switch w {
		case LR_MINUS: // -		-- Text if not in CCL and not 1st char in CCL
			fallthrough
		case LR_Text: //			-- Add a node to list, move right
			fallthrough
		case LR_CARROT: // ^		-- BOL
			fallthrough
		case LR_DOT: // .		-- Match any char
			fallthrough
		case LR_STAR: // *		-- Error if 1st char, else take prev item from list, star and replace it.
			fallthrough
		case LR_PLUS: // +		-- Error if 1st char
			fallthrough
		case LR_QUEST: // ?		-- Error if 1st char
			fallthrough
		case LR_OP_PAR: // (		-- Start of Sub_Re
			fallthrough
		case LR_CL_PAR: // )
			fallthrough
		case LR_OR: // |
			fallthrough
		case LR_OP_BR: // {
			fallthrough
		case LR_CL_BR: // }
			fallthrough
		case LR_COMMA: // ,
			fallthrough
		case LR_DOLLAR: // $
			s += c

		case LR_N_CCL: // [^...]	-- N_CCL Node
			marked = true
			com.DbPrintf("re2", "    incr to %d, %s\n", dx, com.LF())
			s += c // Add To CCL

		case LR_CCL: // [...]	-- CCL Node (Above)
			marked = true
			com.DbPrintf("re2", "    incr to %d, %s\n", dx, com.LF())
			s += c // Add To CCL

		case LR_E_CCL:
			dx--
			com.DbPrintf("re2", "    decr to %d, %s\n", dx, com.LF())
			if dx < 0 {
				tree.Item = expandCCL(s) // Do Something, Return
				tree.LR_Tok = ww
				return
			} else {
				s += c //
			}

		default:
			lr.Error = append(lr.Error, errors.New(fmt.Sprintf("Unreacable Code, invalid token in parseCCL = %d, %s", w, com.LF())))
			tree.Item = expandCCL(s) // Do Something, Return
			tree.LR_Tok = ww
			return
		}
		pos++
		c, w = lr.Next()

		if c == ":" && marked {
			marked = false
			dx++
		}
	}
	if w == LR_EOF {
		lr.Err("EOF found in Character Class [...] or [^...].")
	}
	tree.Item = expandCCL(s)
	tree.LR_Tok = ww
	return
}