Exemple #1
0
func (gb *GrammarBuilder) Build() (*BasicGrammar, error) {
	if gb.name == "" {
		return nil, errors.New("Name() not called")
	}
	if gb.ruleOpen {
		return nil, errors.New("Rhs() not called after Rule()")
	}
	if gb.initialRule == nil {
		return nil, errors.New("No initial production `* -> ... given")
	}
	var k int
	if gb.usedEpsilon {
		k = 1
	}
	g := &BasicGrammar{
		name:         gb.name,
		nonterminals: make([]GrammarParticle, len(gb.nonterms)+1),
		terminals:    make([]GrammarParticle, len(gb.terms)+k),
		productions:  make([]Production, gb.rules.Size()),
	}
	g.epsilon = &GenericEpsilon{grammar: g}
	g.asterisk = &GenericAsterisk{grammar: g}
	g.bottom = &GenericBottom{grammar: g}
	smap := make(map[GrammarParticle]GrammarParticle)
	smap[gb.asterisk] = g.asterisk
	smap[gb.epsilon] = g.epsilon
	smap[gb.bottom] = g.bottom
	i := 0
	for _, p := range gb.nonterms {
		g.nonterminals[i+1] = &GenericNonterminal{
			name:    p.Name(),
			grammar: g,
		}
		smap[p] = g.nonterminals[i+1]
		i++
	}
	g.nonterminals[0] = g.asterisk
	i = 0
	for _, p := range gb.terms {
		g.terminals[i+k] = &GenericTerminal{
			name:    p.Name(),
			grammar: g,
		}
		smap[p] = g.terminals[i+k]
		i++
	}
	if gb.usedEpsilon {
		g.terminals[0] = g.epsilon
	}
	i = 0
	for c := gb.rules.First(); c.HasNext(); {
		g.productions[i], _ = c.Next().(Production).Substitute(smap)
		i++
	}
	return g, nil
}
Exemple #2
0
func (ts *treeSet) Difference(s Set) Set {
	nt := tree.NewTree()
	c := ts.OpenCursor()
	for c.HasNext() {
		k := c.Next()
		if !s.Contains(k) {
			nt.Insert(k)
		}
	}
	return TreeSet(nt)
}
Exemple #3
0
func (ps *pairSet) Union(s Set) Set {
	t := tree.NewTree()
	t.Insert(ps.x)
	t.Insert(ps.y)
	c := s.OpenCursor()
	for c.HasNext() {
		t.Insert(c.Next())
	}
	if t.Size() == 2 {
		return ps
	}
	return &treeSet{tree: t}
}
Exemple #4
0
func (ts *treeSet) Intersection(s Set) Set {
	if ts.Size() > s.Size() {
		return s.Intersection(ts)
	}
	nt := tree.NewTree()
	c := ts.OpenCursor()
	for c.HasNext() {
		k := c.Next()
		if s.Contains(k) {
			nt.Insert(k)
		}
	}
	return TreeSet(nt)
}
Exemple #5
0
func (ff *FFIndex) Initialize(g parser.Grammar) error {
	ff.grammar = g
	index := parser.GetIndexedGrammar(g)
	idx, err := index.GetIndex(GRAMMAR_CLASS_INDEX)
	if err != nil {
		return err
	}
	cidx := idx.(*GrammarClassIndex)
	if cidx.Class() >= parser.CONTEXT_SENSITIVE {
		return errors.New("cannot first/follow index a non-context-free grammar")
	}
	idx, err = index.GetIndex(BASIC_INDEX)
	bidx := idx.(*BasicGrammarIndex)
	if err != nil {
		return err
	}

	// FIRST set calculation
	ff.firstSets = make(map[parser.GrammarParticle][]parser.GrammarParticle)
	for _, nt := range index.Nonterminals() {
		fs := tree.NewTree()
		ntseen := tree.NewTree()
		ntpending := []parser.GrammarParticle{nt}
		for len(ntpending) > 0 {
			cnt := ntpending[0]
			ntpending = ntpending[1:]
			for i := 0; i < bidx.NumLhsStarts(cnt); i++ {
				p := bidx.LhsStart(cnt, i)
				for j := 0; j < p.RhsLen(); j++ {
					rt := p.Rhs(j)
					if rt.Terminal() {
						fs.Insert(rt)
						break
					} else if rt.Nonterminal() {
						if _, has := ntseen.Lookup(c.LTE, rt); !has {
							ntseen.Insert(rt)
							fs.Insert(rt)
							ntpending = append(ntpending, rt)
						}
						if !bidx.Epsilon(rt) {
							break
						}
					} else {
						break
					}
				}
			}
		}
		ff.firstSets[nt] = make([]parser.GrammarParticle, 0, fs.Size())
		for c := fs.First(); c.HasNext(); {
			ff.firstSets[nt] = append(ff.firstSets[nt], c.Next().(parser.GrammarParticle))
		}
	}

	// LAST set calculation
	ff.lastSets = make(map[parser.GrammarParticle][]parser.GrammarParticle)
	for _, nt := range index.Nonterminals() {
		fs := tree.NewTree()
		ntseen := tree.NewTree()
		ntpending := []parser.GrammarParticle{nt}
		for len(ntpending) > 0 {
			cnt := ntpending[0]
			ntpending = ntpending[1:]
			for i := 0; i < bidx.NumLhsStarts(cnt); i++ {
				p := bidx.LhsStart(cnt, i)
				for j := p.RhsLen() - 1; j >= 0; j-- {
					rt := p.Rhs(j)
					if rt.Terminal() {
						fs.Insert(rt)
						break
					}
					if rt.Nonterminal() {
						if _, has := ntseen.Lookup(c.LTE, rt); !has {
							ntseen.Insert(rt)
							fs.Insert(rt)
							ntpending = append(ntpending, rt)
							if !bidx.Epsilon(rt) {
								break
							}
						}
					}
				}
			}
		}
		ff.lastSets[nt] = make([]parser.GrammarParticle, 0, fs.Size())
		for c := fs.First(); c.HasNext(); {
			ff.lastSets[nt] = append(ff.lastSets[nt], c.Next().(parser.GrammarParticle))
		}
	}

	// IN set calculation
	ff.inSets = make(map[parser.GrammarParticle][]parser.GrammarParticle)
	for _, nt := range index.Nonterminals() {
		fs := tree.NewTree()
		ntseen := tree.NewTree()
		ntpending := []parser.GrammarParticle{nt}
		for len(ntpending) > 0 {
			cnt := ntpending[0]
			ntpending = ntpending[1:]
			for i := 0; i < bidx.NumLhsStarts(cnt); i++ {
				p := bidx.LhsStart(cnt, i)
				for j := p.RhsLen() - 1; j >= 0; j-- {
					rt := p.Rhs(j)
					if rt.Terminal() {
						fs.Insert(rt)
					}
					if rt.Nonterminal() {
						if _, has := ntseen.Lookup(c.LTE, rt); !has {
							ntseen.Insert(rt)
							fs.Insert(rt)
							ntpending = append(ntpending, rt)
						}
					}
				}
			}
		}
		ff.inSets[nt] = make([]parser.GrammarParticle, 0, fs.Size())
		for c := fs.First(); c.HasNext(); {
			ff.inSets[nt] = append(ff.inSets[nt], c.Next().(parser.GrammarParticle))
		}
	}

	// FOLLOW set calculation
	followRefs := make(map[parser.GrammarParticle]tree.Tree)
	followSets := make(map[parser.GrammarParticle]tree.Tree)
	for _, p := range g.Productions() { // First-pass.
		for i := 0; i < p.RhsLen()-1; i++ {
			for j := i + 1; j < p.RhsLen(); j++ {
				if _, has := followSets[p.Rhs(i)]; !has {
					followSets[p.Rhs(i)] = tree.NewTree()
				}
				followSets[p.Rhs(i)].Insert(p.Rhs(j))
				if !bidx.Epsilon(p.Rhs(j)) {
					break
				}
			}
		}
		tp := p.Rhs(p.RhsLen() - 1)
		if _, has := followRefs[tp]; !has {
			followRefs[tp] = tree.NewTree()
		}
		followRefs[tp].Insert(p.Lhs(0))
	}
	var changed bool = true
	for changed { // Take closure.
		changed = false
		for p, prt := range followRefs {
			for cr := prt.First(); cr.HasNext(); {
				fp := cr.Next().(parser.GrammarParticle) // x in Follow(fp) -> x in Follow(p)
				if fromSet, has := followSets[fp]; has {
					if _, has := followSets[p]; !has {
						followSets[p] = tree.NewTree()
					}
					for k := fromSet.First(); k.HasNext(); {
						x := k.Next().(parser.GrammarParticle)
						if _, has := followSets[p].Lookup(c.LTE, x); !has {
							changed = true
							followSets[p].Insert(x)
						}
					}
				}
			}
		}
	}
	ff.followSets = make(map[parser.GrammarParticle][]parser.GrammarParticle)
	for r, v := range followSets { // Collect results.
		ff.followSets[r] = make([]parser.GrammarParticle, 0, v.Size())
		for c := v.First(); c.HasNext(); {
			ff.followSets[r] = append(ff.followSets[r], c.Next().(parser.GrammarParticle))
		}
	}

	return nil
}
Exemple #6
0
func GenerateParser(g parser.Grammar) (parser.Parser, error) {

	var dfa *Elr0Dfa
	var invT parser.SyntaxTreeTransform

	itChain := []parser.SyntaxTreeTransform{}
	nnf, err := IsNihilisticNormalForm(g)
	if err != nil {
		return nil, err
	}
	if !nnf {
		// Transform the grammar to NNF.
		g, invT, err = GetNihilisticAugmentGrammar(g)
		if err != nil {
			return nil, err
		}
		if invT != nil {
			itChain = append(itChain, invT)
		}
	}

	// XXX - Check grammar for unreachable terms and productions and prune these.

	// Create the dfa.
	dfa, err = BuildEpsilonLR0Dfa(g)
	if err != nil {
		return nil, err
	}

	// Reduce the dfa to the minimal internal parser structure.
	states := make([]Elr0ReducedDfaState, dfa.NumStates())
	for i := 0; i < len(states); i++ {
		state := dfa.states[i]
		states[i].id = state.Id()
		for c := state.FirstItem(); c.HasNext(); {
			item := c.Next().(*lr0.LR0Item)
			if !item.HasNext() {
				nt := item.Production().Lhs(0)
				if states[i].reductions == nil {
					states[i].reductions = make(map[parser.GrammarParticle][]parser.Production)
				}
				if _, has := states[i].reductions[nt]; !has {
					states[i].reductions[nt] = []parser.Production{item.Production()}
				} else {
					states[i].reductions[nt] = append(states[i].reductions[nt], item.Production())
				}
			}
		}
		for j := 0; j < state.NumTransitions(); j++ {
			key := state.TransitionKey(j).(parser.GrammarParticle)
			toid := state.Transition(key).Id()
			if states[i].transitions == nil {
				states[i].transitions = make(map[parser.GrammarParticle]*Elr0ReducedDfaState)
			}
			states[i].transitions[key] = &states[toid]
		}
	}
	parser := &Elr0Parser{
		grammar:        g,
		states:         states,
		transformChain: itChain,
	}

	for _, dfaState := range states {
		fmt.Printf("DFA STATE [%d]:\n", dfaState.id)
		for nt, next := range dfaState.transitions {
			fmt.Printf("    %s -> [%d]\n", nt.String(), next.id)
		}
		for nt, rs := range dfaState.reductions {
			for _, r := range rs {
				fmt.Printf("    [%s] : %s\n", nt.String(), r.String())
			}
		}
	}
	return parser, nil
}