// consume a postfix operator. indicate which one it was along with its priority func (r *TermReader) postfix(op *string, opP, argP *priority, i *lex.List, o **lex.List) bool { if i.Value.Type != lex.Atom { return false } // is this an operator at all? name := i.Value.Content priorities, ok := r.operators[name] if !ok { return false } // what class of operator is it? switch { case priorities[xf] > 0: *opP = priorities[xf] *argP = *opP - 1 case priorities[yf] > 0: *opP = priorities[yf] *argP = *opP default: // wasn't a postfix operator after all return false } *op = name *o = i.Next() return true }
// consume a single character token func (r *TermReader) tok(c rune, in *lex.List, out **lex.List) bool { if in.Value.Type == c { *out = in.Next() return true } return false }
// parse a single functor func (r *TermReader) functor(in *lex.List, out **lex.List, f *string) bool { if in.Value.Type == lex.Functor { *f = in.Value.Content *out = in.Next() // skip functor we just processed return true } return false }
// consume an infix operator and indicate which one it was along with its priorities func (r *TermReader) infix(op *string, opP, lap, rap *priority, i *lex.List, o **lex.List) bool { // fmt.Printf("seeking infix with %s\n", i.Value.Content) typ := i.Value.Type if typ != lex.Atom && typ != lex.Functor && typ != ',' { // fmt.Printf(" type mismatch: %s\n", lex.TokenString(i.Value.Type)) return false } // is this an operator at all? name := i.Value.Content priorities, ok := r.operators[name] if !ok { // fmt.Printf(" no operator %s found\n", name) return false } // what class of operator is it? switch { case priorities[yfx] > 0: *opP = priorities[yfx] *lap = *opP *rap = *opP - 1 case priorities[xfy] > 0: *opP = priorities[xfy] *lap = *opP - 1 *rap = *opP case priorities[xfx] > 0: *opP = priorities[xfx] *lap = *opP - 1 *rap = *opP - 1 default: // wasn't an infix operator after all // fmt.Printf(" %s wasn't infix after all", name) return false } *op = name *o = i.Next() // fmt.Printf(" found %s %d %d %d\n", name, *lap, *opP, *rap) return true }
// parse a single term func (r *TermReader) term(p priority, i *lex.List, o **lex.List, t *term.Term) bool { var op, f string var t0, t1 term.Term var opP, argP priority // fmt.Printf("seeking term with %s\n", i.Value.Content) // prefix operator if r.prefix(&op, &opP, &argP, i, o) && opP <= p && r.term(argP, *o, o, &t0) { opT := term.NewCallable(op, t0) return r.restTerm(opP, p, *o, o, opT, t) } // list notation for compound terms §6.3.5 if r.tok('[', i, o) && r.term(999, *o, o, &t0) && r.listItems(*o, o, &t1) { list := term.NewCallable(".", t0, t1) return r.restTerm(0, p, *o, o, list, t) } if r.tok('[', i, o) && r.tok(']', *o, o) { list := term.NewAtom("[]") return r.restTerm(0, p, *o, o, list, t) } // parenthesized terms if r.tok('(', i, o) && r.term(1200, *o, o, &t0) && r.tok(')', *o, o) { // fmt.Printf("open paren %s close paren\n", t0) return r.restTerm(0, p, *o, o, t0, t) } switch i.Value.Type { case lex.Int: // integer term §6.3.1.1 n := term.NewInt(i.Value.Content) *o = i.Next() return r.restTerm(0, p, *o, o, n, t) case lex.Float: // float term §6.3.1.1 f := term.NewFloat(i.Value.Content) *o = i.Next() return r.restTerm(0, p, *o, o, f, t) case lex.Atom: // atom term §6.3.1.3 a := term.NewAtomFromLexeme(i.Value.Content) *o = i.Next() return r.restTerm(0, p, *o, o, a, t) case lex.String: // double quated string §6.3.7 cl := term.NewCodeListFromDoubleQuotedString(i.Value.Content) *o = i.Next() return r.restTerm(0, p, *o, o, cl, t) case lex.Variable: // variable term §6.3.2 v := term.NewVar(i.Value.Content) *o = i.Next() return r.restTerm(0, p, *o, o, v, t) case lex.Void: // variable term §6.3.2 v := term.NewVar("_") *o = i.Next() return r.restTerm(0, p, *o, o, v, t) case lex.Comment: *o = i.Next() // skip the comment return r.term(p, *o, o, t) // ... and try again } // compound term - functional notation §6.3.3 if r.functor(i, o, &f) && r.tok('(', *o, o) { var args []term.Term var arg term.Term for r.term(999, *o, o, &arg) { // 999 priority per §6.3.3.1 args = append(args, arg) if r.tok(')', *o, o) { break } if r.tok(',', *o, o) { continue } panic("Unexpected content inside compound term arguments") } f := term.NewTermFromLexeme(f, args...) return r.restTerm(0, p, *o, o, f, t) } *t = term.NewError("Syntax error", i.Value) return false }