コード例 #1
0
ファイル: t01_test.go プロジェクト: pschlump/lexie
// ---------------------------------------------------------------------------------------------------------------------------------------
func Test_Test01_01(t *testing.T) {

	com.DbOnFlags["trace-builtin"] = true
	com.DbOnFlags["match"] = true

	com.DbOnFlags["db_DumpDFAPool"] = true
	com.DbOnFlags["db_DumpPool"] = true
	com.DbOnFlags["db_Matcher_02"] = true
	// com.DbOnFlags["db_NFA_LnNo"] = true
	com.DbOnFlags["match"] = true
	// com.DbOnFlags["nfa3"] = true
	com.DbOnFlags["output-machine"] = true
	com.DbOnFlags["match"] = true
	com.DbOnFlags["match4"] = true
	com.DbOnFlags["match_x"] = true
	// com.DbOnFlags["nfa3"] = true
	// com.DbOnFlags["nfa4"] = true
	// com.DbOnFlags["db_DFAGen"] = true
	// com.DbOnFlags["pbbuf02"] = true
	// com.DbOnFlags["DumpParseNodes2"] = true
	com.DbOnFlags["db_FlushTokenBeforeBefore"] = true
	com.DbOnFlags["db_FlushTokenBeforeAfter"] = true
	com.DbOnFlags["db_tok01"] = true
	com.DbOnFlags["in-echo-machine"] = true // Output machine

	Dbf = os.Stdout

	pt := NewParse2Type()
	pt.Lex = dfa.NewLexie()
	pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel

	pt.Lex.NewReadFile("../in/django3.lex")

	pt.OpenLibraries("./tmpl")

	for _, fn := range data_01 {
		fn_o := com.RmExt(fn) + ".out"
		fn_r := com.RmExt(fn) + ".ref"
		pt.ReadFileAndRun(fn, fn_o)
		if !com.CompareFiles(fn_o, fn_r) {
			fmt.Printf("Files did not match %s %s\n", fn_o, fn_r)
			t.Errorf("%s error\n", fn_o)
		}
	}

}
コード例 #2
0
ファイル: main.go プロジェクト: pschlump/lexie
func main() {

	// ------------------------------------------------------ cli processing --------------------------------------------------------------
	ifnList, err := flags.ParseArgs(&opts, os.Args)

	if err != nil {
		fmt.Printf("Invalid Command Line: %s\n", err)
		os.Exit(1)
	}

	test01.Dbf = os.Stdout
	if opts.Debug != "" {
		s := strings.Split(opts.Debug, ";")
		com.DbOnFlags[opts.Debug] = true
		for _, v := range s {
			if len(v) > 5 && v[0:4] == "out:" {
				test01.Dbf, _ = com.Fopen(v[4:], "w")
			} else {
				com.DbOnFlags[v] = true
			}
		}
	}

	fmt.Fprintf(test01.Dbf, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF())

	// ------------------------------------------------------ Options --------------------------------------------------------------
	// should be read in from a .json file!
	Options.MdExtensions = []string{".md", ".makrdown"}
	Options.ConvertMdToHtml = true
	Options.LeaveTmpFiles = false
	Options.TmpDir = "./tmp"

	if !com.Exists(Options.TmpDir) {
		os.Mkdir(Options.TmpDir, 0700)
	}

	// ------------------------------------------------------ setup Lexie --------------------------------------------------------------
	pt := test01.NewParse2Type()
	pt.Lex = dfa.NewLexie()
	pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel

	// ------------------------------------------------------ input machine  --------------------------------------------------------------
	if opts.LexPat != "" {
		if !com.Exists(opts.LexPat) {
			fmt.Fprintf(os.Stderr, "Fatal: Must have -l <fn> lexical analyzer machine.  Missing file.\n")
			os.Exit(1)
		}
		pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex")
	} else {
		fmt.Fprintf(os.Stderr, "Fatal: Must have -l <fn> lexical analyzer machine.\n")
		os.Exit(1)
	}

	// -------------------------------------------------- start scanning process  ----------------------------------------------------------
	tp := strings.Split(opts.TemplatePath, ";")
	for _, tps := range tp {
		pt.OpenLibraries(tps)
	}

	if opts.Recursive {

		CopyInAssets(opts.SiteName, opts.BaseAssets, opts.SiteAssets, opts.Output, opts.User, opts.Theme, opts.ForcedCpFlag)

		//fmt.Printf("After CopyInAssets: Not Implemented Yet\n")
		//os.Exit(1)

		data2 := make(map[string]string)
		data2["site_name"] = opts.SiteName

		if opts.Input == "" && opts.SiteName != "" {
			opts.Input = com.Qt("./site/%{site_name%}/", data2)
		}
		if opts.Output == "" && opts.SiteName != "" {
			opts.Output = com.Qt("./www/%{site_name%}/", data2)
		}

		// ---------------------------------------------------------------------------------------------------------------------------------

		// 1. Do the rsync copy ops
		// 2. Process the set of fiels from -i -> -o

		// -- Process the static files -----------------------------------------------------------------------------------------------------
		dp := make([]string, 0, 10)
		if opts.Input != "" {
			dp = append(dp, opts.Input)
		} else {
			for _, fn := range ifnList[1:] {
				dp = append(dp, fn)
			}
		}

		var fns, dirs []string

		for _, dir := range dp {
			if db_debug3 {
				fmt.Printf("Getting for %s\n", dir)
			}
			t_fns, t_dirs, err := com.GetFilenamesRecrusive(dir)
			if err != nil {
				if db_debug3 {
					fmt.Printf("Error: %s on %s\n", err, dir)
				}
			} else {
				fns = append(fns, t_fns...)
				dirs = append(dirs, t_dirs...)
			}
		}

		if db_debug3 {
			fmt.Printf("fns: %+v\n", fns)
			fmt.Printf("dirs: %+v\n", dirs)
		}

		mds := com.ReplaceEach(dirs, opts.Input, opts.Output)
		for _, aDir := range mds {
			if !com.Exists(aDir) {
				err := os.Mkdir(aDir, 0764)
				if err != nil {
					if db_debug3 {
						fmt.Printf("Error: Unable to create directory %s, error: %s\n", aDir, err)
					}
				}
			}
		}

		mf := com.ReplaceEach(fns, opts.Input, opts.Input+"/%{user%}/%{theme%}/")
		mO := com.ReplaceEach(fns, opts.Input, opts.Output)
		if db_debug3 {
			fmt.Printf("modded_files: %+v\n", mf)
		}

		final := make([]string, 0, len(mf))
		data := make(map[string]string)
		has_err := false

		for _, mff := range mf {
			data["user"] = opts.User
			data["theme"] = opts.Theme
			mfmod := com.Qt(mff, data)
			if com.Exists(mfmod) {
				final = append(final, mfmod)
			} else {

				data["user"] = ""
				// data["theme"] = "A-Theme"
				mfmod := com.Qt(mff, data)
				if com.Exists(mfmod) {
					final = append(final, mfmod)
				} else {

					data["user"] = opts.User
					// data["user"] = ""
					data["theme"] = ""
					mfmod := com.Qt(mff, data)
					if com.Exists(mfmod) {
						final = append(final, mfmod)
					} else {
						fmt.Printf("Error: File Missing %s\n", mfmod)
						has_err = true
					}
				}
			}
		}
		if has_err {
			fmt.Printf("Error occured...\n")
			os.Exit(1)
		}

		if db_debug3 {
			fmt.Printf("Final Files:%s\n", final)
		}
		tmpFiles := make([]string, 0, len(final))
		for ii, yy := range final {
			yyt := yy
			fmt.Printf("Process %s to %s\n", yy, mO[ii])
			ext := filepath.Ext(yy)
			if Options.ConvertMdToHtml && com.InArray(ext, Options.MdExtensions) { // if ext == ".md" || ext == ".markdown" {
				fmt.Printf("\t Convetting from MD to HTML\n")
				in := yy
				//yyt = "./tmp/" + com.Basename(yy) + ".html"	// old code - not using a Tempfile
				//err := ConvertMdToHtmlFile(in, yyt)
				yyt, err = ConvertMdToHtmlFileTmpFile(in)
				if err != nil {
					fmt.Printf("Error: In processing from markdown %s to HTML %s: %s\n", in, yyt, err)
					os.Exit(1)
				}
				mO[ii] = com.RmExt(mO[ii]) + ".html"
				tmpFiles = append(tmpFiles, yyt)
			}
			ProcessFileList(pt, []string{yyt}, mO[ii])
		}
		if !Options.LeaveTmpFiles {
			for _, fn := range tmpFiles {
				os.Remove(fn)
			}
		}

	} else {

		inList := make([]string, 0, 10)
		if opts.Input != "" {
			inList = append(inList, opts.Input)
		} else {
			inList = ifnList[1:]
		}

		ProcessFileList(pt, inList, opts.Output)

	}

}
コード例 #3
0
ファイル: ringo-cli.go プロジェクト: pschlump/lexie
func main() {
	var fp *os.File

	// ------------------------------------------------------ cli processing --------------------------------------------------------------
	ifnList, err := flags.ParseArgs(&opts, os.Args)

	if err != nil {
		fmt.Printf("Invalid Command Line: %s\n", err)
		os.Exit(1)
	}

	if opts.Debug != "" {
		s := strings.Split(opts.Debug, ",")
		com.DbOnFlags[opts.Debug] = true
		for _, v := range s {
			com.DbOnFlags[v] = true
		}
	}

	if opts.Echo != "" {
		com.DbOnFlags["in-echo-machine"] = true // Output machine
	}

	fmt.Fprintf(os.Stderr, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF())

	// ------------------------------------------------------ setup Lexie --------------------------------------------------------------
	pt := NewParse2Type()
	pt.Lex = dfa.NewLexie()
	pt.Lex.SetChanelOnOff(true) // Set for getting back stuff via Chanel

	// ------------------------------------------------------ input machine  --------------------------------------------------------------
	if opts.LexPat != "" {
		pt.Lex.NewReadFile(opts.LexPat) // pstk.Lex.NewReadFile("../in/django3.lex")
	} else if opts.ReadMachine != "" {
		fmt.Printf("Should input machine at this point\n")
		// xyzzy
	} else {
		fmt.Printf("Fatal: Must have -l <fn> or -r <fn>, neither supplied.\n")
		os.Exit(1)
	}

	if opts.Machine != "" {
		fmt.Printf("Should output machine at this point\n")
		// xyzzy
	}

	// -------------------------------------------------- start scanning process  ----------------------------------------------------------
	if opts.Tokens != "" {
		fp, _ = com.Fopen(opts.Tokens, "w")
	} else {
		fp = os.Stdout
	}

	if opts.Input != "" {

		go func() {
			r := pbread.NewPbRead()
			r.OpenFile(opts.Input)
			pt.Lex.MatcherLexieTable(r, "S_Init")
		}()

	} else {

		go func() {
			r := pbread.NewPbRead()
			for _, fn := range ifnList[1:] {
				r.OpenFile(fn)
			}
			pt.Lex.MatcherLexieTable(r, "S_Init")
		}()

	}

	// ------------------------------------------------------ process tokens --------------------------------------------------------------
	if false {
		// just print tokens out to check the scanning prcess and CLI options
		for msg := range pt.Lex.Message {
			fmt.Fprintf(fp, "%+v\n", msg)
		}
	} else {
		// Generate a parse tree and print out.
		xpt := pt.GenParseTree(0)
		pt.TheTree = xpt
		xpt.DumpMtType(fp, 0, 0)
		fmt.Printf("----------------------------------- start execute ----------------------------------------------------\n")
		pt.ExecuteFunctions(0)
		fmt.Printf("----------------------------------- debug output ----------------------------------------------------\n")
		if true {
			fmt.Printf("%s\n", com.SVarI(xpt))
		}
		fmt.Printf("----------------------------------- output ----------------------------------------------------\n")
		for i := 0; i < 1000000; i++ {
			pt.OutputTree0(fp, 0)
		}
		fmt.Printf("----------------------------------- errors ----------------------------------------------------\n")
		pp := pt.CollectErrorNodes(0)
		for ii, vv := range pp {
			fmt.Printf("Error [%3d]: msg=%s\n", ii, vv.ErrorMsg)
		}
		fmt.Printf("----------------------------------- final template results  ----------------------------------------------------\n")
		pt.OutputTree(fp, 0)
	}

	if opts.Tokens != "" {
		fp.Close()
	}

}
コード例 #4
0
ファイル: lexie.go プロジェクト: pschlump/lexie
func main() {
	var fp *os.File

	ifnList, err := flags.ParseArgs(&opts, os.Args)

	if err != nil {
		fmt.Printf("Invalid Command Line: %s\n", err)
		os.Exit(1)
	}

	if opts.Debug != "" {
		s := strings.Split(opts.Debug, ",")
		com.DbOnFlags[opts.Debug] = true
		for _, v := range s {
			com.DbOnFlags[v] = true
		}
	}

	if opts.Echo != "" {
		com.DbOnFlags["in-echo-machine"] = true // Output machine
	}

	fmt.Fprintf(os.Stderr, "Test Matcher test from %s file, %s\n", opts.LexPat, com.LF())

	lex := dfa.NewLexie()
	if opts.LexPat != "" {
		lex.NewReadFile(opts.LexPat) // lex.NewReadFile("../in/django3.lex")
	} else if opts.ReadMachine != "" {
		fmt.Printf("Should input machine at this point\n")
		// xyzzy
	} else {
		fmt.Printf("Fatal: Must have -l <fn> or -r <fn>, neither supplied.\n")
		os.Exit(1)
	}

	if opts.Machine != "" {
		fmt.Printf("Should output machine at this point\n")
		// xyzzy
	}

	if opts.Tokens != "" {
		fp, _ = com.Fopen(opts.Tokens, "w")
	} else {
		fp = os.Stdout
	}
	if opts.Input != "" {

		s := in.ReadFileIntoString(opts.Input)
		r := strings.NewReader(s)
		lex.MatcherLexieTable(r, "S_Init")

		lex.TokList.TokenData = CategorizeToken(lex.TokList.TokenData)

		lex.DumpTokenBuffer(fp)

	} else {

		for ii, fn := range ifnList[1:] {
			s := in.ReadFileIntoString(fn)
			r := strings.NewReader(s)
			lex.MatcherLexieTable(r, "S_Init")

			lex.TokList.TokenData = CategorizeToken(lex.TokList.TokenData)

			fmt.Fprintf(fp, "%d: %s -----Start---------------------------------------------------------------------------------\n", ii, fn)
			lex.DumpTokenBuffer2(fp)
			fmt.Fprintf(fp, "%d: %s -----End-----------------------------------------------------------------------------------\n\n\n", ii, fn)
		}

	}
	if opts.Tokens != "" {
		fp.Close()
	}

}