예제 #1
0
func verify(name, start string, r io.Reader) error {
	if r == nil {
		f, err := os.Open(name)
		if err != nil {
			return err
		}
		defer f.Close()
		r = f
	}

	src, err := ioutil.ReadAll(r)
	if err != nil {
		return err
	}

	if filepath.Ext(name) == ".html" || bytes.Index(src, open) >= 0 {
		src = extractEBNF(src)
	}

	grammar, err := ebnf.Parse(name, bytes.NewBuffer(src))
	if err != nil {
		return err
	}

	return ebnf.Verify(grammar, start)
}
예제 #2
0
파일: main.go 프로젝트: crunchyroll/rebnf
func main() {
	var (
		name string
		r    io.Reader
	)
	switch flag.NArg() {
	case 0:
		name, r = "-", os.Stdin
	case 1:
		name = flag.Arg(0)
	default:
		usage()
	}

	grammar, err := rebnf.Parse(name, r)
	if err != nil {
		log.Fatal(err)
	}

	// Verify grammar before attempting any productions.
	err = ebnf.Verify(grammar, args.start)
	if err != nil {
		log.Fatal(err)
	}

	ctx := rebnf.NewCtx(args.maxreps, args.maxdepth, args.padding, args.debug)
	log.Printf("seed %d", args.seed)
	err = ctx.Random(os.Stdout, grammar, args.start)
	if err != nil {
		log.Fatal(err)
	}
}
예제 #3
0
파일: lexer.go 프로젝트: cznic/lexer
//TODO:full docs
func CompileLexer(starts [][]int, tokdefs map[string]int, grammar, start string) (lexer *Lexer, err error) {
	lexer = &Lexer{}

	defer func() {
		if e := recover(); e != nil {
			lexer = nil
			err = e.(error)
		}
	}()

	var prodnames string
	res, xref := map[int]string{}, map[int]string{}

	for tokdef, id := range tokdefs {
		if _, ok := res[id]; ok {
			panic(fmt.Errorf("duplicate id %d for token %q", id, tokdef))
		}

		xref[id] = fmt.Sprintf("id-%d", id)
		if re, ok := isRE(tokdef); ok {
			res[id] = re
			continue
		}

		if grammar == "" || !isIdent(tokdef) {
			res[id] = regexp.QuoteMeta(tokdef)
			continue
		}

		if prodnames != "" {
			prodnames += " | "
		}
		prodnames += tokdef
		res[id] = ""
	}

	if prodnames != "" {
		var g ebnf.Grammar
		ebnfSrc := grammar + fmt.Sprintf("\n%s = %s .", start, prodnames)
		if g, err = ebnf.Parse(start, bytes.NewBufferString(ebnfSrc)); err != nil {
			panic(err)
		}

		if err = ebnf.Verify(g, start); err != nil {
			panic(err)
		}

		grammarREs := map[*ebnf.Production]string{}
		for tokdef, id := range tokdefs {
			if isIdent(tokdef) {
				res[id], xref[id] = ebnf2RE(g, tokdef, grammarREs), tokdef
			}
		}
	}

	if starts == nil { // create the default, all inclusive start set
		starts = [][]int{{}}
		for id := range res {
			starts[0] = append(starts[0], id)
		}
	}

	lexer.accept = lexer.nfa.NewState()
	lexer.starts = make([]*NfaState, len(starts))
	for i, set := range starts {
		state := lexer.nfa.NewState()
		lexer.starts[i] = state
		for _, id := range set {
			var in, out *NfaState
			re, ok := res[int(id)]
			if !ok {
				panic(fmt.Errorf("unknown token id %d in set %d", id, i))
			}

			if in, out, err = lexer.nfa.ParseRE(fmt.Sprintf("%s-%s", start, xref[int(id)]), re); err != nil {
				panic(err)
			}

			state.AddNonConsuming(&EpsilonEdge{int(id), in})
			out.AddNonConsuming(&EpsilonEdge{0, lexer.accept})
		}
	}

	lexer.nfa.reduce()
	return
}