Exemple #1
0
func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
	if p.trace {
		defer un(trace(p, keyword.String()+"Decl"))
	}

	doc := p.leadComment
	pos := p.expect(keyword)
	var lparen, rparen token.Position
	var list vector.Vector
	if p.tok == token.LPAREN {
		lparen = p.pos
		p.next()
		for p.tok != token.RPAREN && p.tok != token.EOF {
			list.Push(f(p, p.leadComment))
		}
		rparen = p.expect(token.RPAREN)
		p.expectSemi()
	} else {
		list.Push(f(p, nil))
	}

	// convert vector
	specs := make([]ast.Spec, len(list))
	for i, x := range list {
		specs[i] = x.(ast.Spec)
	}

	return &ast.GenDecl{doc, pos, keyword, lparen, specs, rparen}
}
Exemple #2
0
func (p *Temple) addToken(tok token.Token, lit string) {
	if len(lit) > 0 {
		p.prnt.addData(" " + lit)
	} else {
		p.prnt.addData(" " + tok.String())
	}
}
Exemple #3
0
// intersperseComments consumes all comments that appear before the next token
// tok and prints it together with the buffered whitespace (i.e., the whitespace
// that needs to be written before the next token). A heuristic is used to mix
// the comments and whitespace. intersperseComments returns true if a pending
// formfeed was dropped from the whitespace buffer.
//
func (p *printer) intersperseComments(next token.Position, tok token.Token) (droppedFF bool) {
	var last *ast.Comment
	for ; p.commentBefore(next); p.cindex++ {
		for _, c := range p.comments[p.cindex].List {
			p.writeCommentPrefix(p.fset.Position(c.Pos()), next, last, tok.IsKeyword())
			p.writeComment(c)
			last = c
		}
	}

	if last != nil {
		if last.Text[1] == '*' && p.fset.Position(last.Pos()).Line == next.Line {
			// the last comment is a /*-style comment and the next item
			// follows on the same line: separate with an extra blank
			p.write([]byte{' '})
		}
		// ensure that there is a line break after a //-style comment,
		// before a closing '}' unless explicitly disabled, or at eof
		needsLinebreak :=
			last.Text[1] == '/' ||
				tok == token.RBRACE && p.mode&noExtraLinebreak == 0 ||
				tok == token.EOF
		return p.writeCommentSuffix(needsLinebreak)
	}

	// no comment was written - we should never reach here since
	// intersperseComments should not be called in that case
	p.internalError("intersperseComments called without pending comments")
	return false
}
Exemple #4
0
// emitArith emits to f code to compute the binary operation op(x, y)
// where op is an eager shift, logical or arithmetic operation.
// (Use emitCompare() for comparisons and Builder.logicalBinop() for
// non-eager operations.)
//
func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value {
	switch op {
	case token.SHL, token.SHR:
		x = emitConv(f, x, t)
		// y may be signed or an 'untyped' constant.
		// TODO(adonovan): whence signed values?
		if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 {
			y = emitConv(f, y, types.Typ[types.Uint64])
		}

	case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
		x = emitConv(f, x, t)
		y = emitConv(f, y, t)

	default:
		panic("illegal op in emitArith: " + op.String())

	}
	v := &BinOp{
		Op: op,
		X:  x,
		Y:  y,
	}
	v.setPos(pos)
	v.setType(t)
	return f.emit(v)
}
Exemple #5
0
// intersperseComments consumes all comments that appear before the next token
// tok and prints it together with the buffered whitespace (i.e., the whitespace
// that needs to be written before the next token). A heuristic is used to mix
// the comments and whitespace. intersperseComments returns true if a pending
// formfeed was dropped from the whitespace buffer.
//
func (p *printer) intersperseComments(next token.Position, tok token.Token) (droppedFF bool) {
	var last *ast.Comment
	for ; p.commentBefore(next); p.cindex++ {
		for _, c := range p.comments[p.cindex].List {
			p.writeCommentPrefix(c.Pos(), next, last == nil, tok.IsKeyword())
			p.writeComment(c)
			last = c
		}
	}

	if last != nil {
		if last.Text[1] == '*' && last.Pos().Line == next.Line {
			// the last comment is a /*-style comment and the next item
			// follows on the same line: separate with an extra blank
			p.write([]byte{' '})
		}
		// ensure that there is a newline after a //-style comment
		// or if we are before a closing '}' or at the end of a file
		return p.writeCommentSuffix(last.Text[1] == '/' || tok == token.RBRACE || tok == token.EOF)
	}

	// no comment was written - we should never reach here since
	// intersperseComments should not be called in that case
	p.internalError("intersperseComments called without pending comments")
	return false
}
Exemple #6
0
// intersperseComments consumes all comments that appear before the next token
// tok and prints it together with the buffered whitespace (i.e., the whitespace
// that needs to be written before the next token). A heuristic is used to mix
// the comments and whitespace. The intersperseComments result indicates if a
// newline was written or if a formfeed was dropped from the whitespace buffer.
//
func (p *printer) intersperseComments(next token.Position, tok token.Token) (wroteNewline, droppedFF bool) {
	var last *ast.Comment
	for p.commentBefore(next) {
		for _, c := range p.comment.List {
			p.writeCommentPrefix(p.posFor(c.Pos()), next, last, c, tok.IsKeyword())
			p.writeComment(c)
			last = c
		}
		p.nextComment()
	}

	if last != nil {
		// if the last comment is a /*-style comment and the next item
		// follows on the same line but is not a comma or a "closing"
		// token, add an extra blank for separation
		if last.Text[1] == '*' && p.lineFor(last.Pos()) == next.Line && tok != token.COMMA &&
			tok != token.RPAREN && tok != token.RBRACK && tok != token.RBRACE {
			p.writeByte(' ', 1)
		}
		// ensure that there is a line break after a //-style comment,
		// before a closing '}' unless explicitly disabled, or at eof
		needsLinebreak :=
			last.Text[1] == '/' ||
				tok == token.RBRACE && p.mode&noExtraLinebreak == 0 ||
				tok == token.EOF
		return p.writeCommentSuffix(needsLinebreak)
	}

	// no comment was written - we should never reach here since
	// intersperseComments should not be called in that case
	p.internalError("intersperseComments called without pending comments")
	return
}
Exemple #7
0
func (p *parser) expect(tok token.Token) token.Pos {
	pos := p.pos
	if p.tok != tok {
		p.errorExpected(pos, "'"+tok.String()+"'")
	}
	p.next() // make progress in any case
	return pos
}
Exemple #8
0
// kindToType transforms Go token kind to type name.
func kindToType(kind token.Token) string {
	switch kind.String() {
	case "STRING":
		return "string"
	case "INT":
		return "int"
	default:
		return ""
	}
}
Exemple #9
0
// Comapres xv to yv using operator op
// Both xv and yv must be loaded and have a compatible type (as determined by negotiateType)
func compareOp(op token.Token, xv *Variable, yv *Variable) (bool, error) {
	switch xv.Kind {
	case reflect.Bool:
		fallthrough
	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
		fallthrough
	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
		fallthrough
	case reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128:
		return constantCompare(op, xv.Value, yv.Value)
	case reflect.String:
		if int64(len(constant.StringVal(xv.Value))) != xv.Len || int64(len(constant.StringVal(yv.Value))) != yv.Len {
			return false, fmt.Errorf("string too long for comparison")
		}
		return constantCompare(op, xv.Value, yv.Value)
	}

	if op != token.EQL && op != token.NEQ {
		return false, fmt.Errorf("operator %s not defined on %s", op.String(), xv.Kind.String())
	}

	var eql bool
	var err error

	switch xv.Kind {
	case reflect.Ptr:
		eql = xv.Children[0].Addr == yv.Children[0].Addr
	case reflect.Array:
		if int64(len(xv.Children)) != xv.Len || int64(len(yv.Children)) != yv.Len {
			return false, fmt.Errorf("array too long for comparison")
		}
		eql, err = equalChildren(xv, yv, true)
	case reflect.Struct:
		if len(xv.Children) != len(yv.Children) {
			return false, nil
		}
		if int64(len(xv.Children)) != xv.Len || int64(len(yv.Children)) != yv.Len {
			return false, fmt.Errorf("sturcture too deep for comparison")
		}
		eql, err = equalChildren(xv, yv, false)
	case reflect.Slice, reflect.Map, reflect.Func, reflect.Chan:
		if xv != nilVariable && yv != nilVariable {
			return false, fmt.Errorf("can not compare %s variables", xv.Kind.String())
		}

		eql = xv.base == yv.base
	default:
		return false, fmt.Errorf("unimplemented comparison of %s variables", xv.Kind.String())
	}

	if op == token.NEQ {
		return !eql, err
	}
	return eql, err
}
Exemple #10
0
func negotiateTypeNil(op token.Token, v *Variable) error {
	if op != token.EQL && op != token.NEQ {
		return fmt.Errorf("operator %s can not be applied to \"nil\"", op.String())
	}
	switch v.Kind {
	case reflect.Ptr, reflect.UnsafePointer, reflect.Chan, reflect.Map, reflect.Interface, reflect.Slice, reflect.Func:
		return nil
	default:
		return fmt.Errorf("can not compare %s to nil", v.Kind.String())
	}
}
Exemple #11
0
func evalUnaryBoolExpr(ctx *Ctx, x reflect.Value, op token.Token) (reflect.Value, error) {
	var err error
	var r bool

	xx := x.Bool()
	switch op {
	case token.NOT:
		r = !xx
	default:
		panic("eval: impossible unary op " + op.String())
	}
	return reflect.ValueOf(r).Convert(x.Type()), err
}
Exemple #12
0
func evalUnaryFloatExpr(ctx *Ctx, x reflect.Value, op token.Token) (reflect.Value, error) {
	var err error
	var r float64

	xx := x.Float()
	switch op {
	case token.ADD:
		r = +xx
	case token.SUB:
		r = -xx
	default:
		panic("eval: impossible unary op " + op.String())
	}
	return reflect.ValueOf(r).Convert(x.Type()), err
}
Exemple #13
0
// TokenKind returns a syntaxhighlight token kind value for the given tok and lit.
func TokenKind(tok token.Token, lit string) syntaxhighlight.Kind {
	switch {
	case tok.IsKeyword() || (tok.IsOperator() && tok <= token.ELLIPSIS):
		return syntaxhighlight.Keyword

	// Literals.
	case tok == token.INT || tok == token.FLOAT || tok == token.IMAG || tok == token.CHAR:
		return syntaxhighlight.Decimal
	case tok == token.STRING:
		return syntaxhighlight.String
	case lit == "true" || lit == "false" || lit == "iota" || lit == "nil":
		return syntaxhighlight.Literal

	case tok == token.COMMENT:
		return syntaxhighlight.Comment
	default:
		return syntaxhighlight.Plaintext
	}
}
Exemple #14
0
func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction, getSemi bool) (decl *ast.GenDecl, gotSemi bool) {
	if p.trace {
		defer un(trace(p, keyword.String()+"Decl"))
	}

	doc := p.leadComment
	pos := p.expect(keyword)
	var lparen, rparen token.Position
	list := new(vector.Vector)
	if p.tok == token.LPAREN {
		lparen = p.pos
		p.next()
		for p.tok != token.RPAREN && p.tok != token.EOF {
			doc := p.leadComment
			spec, semi := f(p, doc, true) // consume semicolon if any
			list.Push(spec)
			if !semi {
				break
			}
		}
		rparen = p.expect(token.RPAREN)

		if getSemi && p.tok == token.SEMICOLON {
			p.next()
			gotSemi = true
		} else {
			p.optSemi = true
		}
	} else {
		spec, semi := f(p, nil, getSemi)
		list.Push(spec)
		gotSemi = semi
	}

	// convert vector
	specs := make([]ast.Spec, list.Len())
	for i := 0; i < list.Len(); i++ {
		specs[i] = list.At(i).(ast.Spec)
	}

	return &ast.GenDecl{doc, pos, keyword, lparen, specs, rparen}, gotSemi
}
Exemple #15
0
// emitArith emits to f code to compute the binary operation op(x, y)
// where op is an eager shift, logical or arithmetic operation.
// (Use emitCompare() for comparisons and Builder.logicalBinop() for
// non-eager operations.)
//
func emitArith(f *Function, op token.Token, x, y Value, t types.Type) Value {
	switch op {
	case token.SHL, token.SHR:
		x = emitConv(f, x, t)
		y = emitConv(f, y, types.Typ[types.Uint64])

	case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
		x = emitConv(f, x, t)
		y = emitConv(f, y, t)

	default:
		panic("illegal op in emitArith: " + op.String())

	}
	v := &BinOp{
		Op: op,
		X:  x,
		Y:  y,
	}
	v.setType(t)
	return f.emit(v)
}
func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.GenDecl {
	if p.trace {
		defer un(trace(p, "GenDecl("+keyword.String()+")"))
	}

	doc := p.leadComment
	pos := p.expect(keyword)
	var lparen, rparen token.Pos
	var list []ast.Spec
	if p.tok == token.LPAREN {
		lparen = p.pos
		p.next()
		for p.tok != token.RPAREN && p.tok != token.EOF {
			list = append(list, f(p, p.leadComment))
		}
		rparen = p.expect(token.RPAREN)
		p.expectSemi()
	} else {
		list = append(list, f(p, nil))
	}

	return &ast.GenDecl{doc, pos, keyword, lparen, list, rparen}
}
Exemple #17
0
func tokenclass(tok token.Token) int {
	switch {
	case tok.IsLiteral():
		return literal
	case tok.IsOperator():
		return operator
	case tok.IsKeyword():
		return keyword
	}
	return special
}
Exemple #18
0
func (self *HTMLStyler) Token(tok token.Token) ([]byte, printer.HTMLTag) {
	extra := ""

	if tok.IsKeyword() {
		extra += " go-keyword"
	}

	if tok.IsLiteral() {
		extra += " go-literal"
	}

	if tok.IsOperator() {
		extra += " go-operator"
	}

	self.prev = tok

	return []byte(tok.String()), printer.HTMLTag{
		Start: "<span class=\"go-token" + extra + "\">",
		End:   "</span>",
	}
}
Exemple #19
0
func getColor(tok token.Token) string {
	switch {
	case tok.IsKeyword():
		return Colors[Keyword]
	case tok.IsLiteral():
		if tok == token.IDENT {
			return Colors[Identifier]
		} else {
			return Colors[Literal]
		}
	case tok.IsOperator():
		return Colors[Operator]
	case tok == token.COMMENT:
		return Colors[Comment]
	case tok == token.ILLEGAL:
		return Colors[Illegal]
	default:
		panic(fmt.Sprintf("unknown token type: %v", tok))
	}
	return ""
}
Exemple #20
0
// getClass returns the CSS class name associated with tok.
func (h *Highlighter) getClass(tok token.Token) string {
	switch {
	case tok.IsKeyword():
		return h.KeywordClass
	case tok.IsLiteral():
		if tok == token.IDENT {
			return h.IdentClass
		} else {
			return h.LiteralClass
		}
	case tok.IsOperator():
		return h.OperatorClass
	case tok == token.COMMENT:
		return h.CommentClass
	case tok == token.ILLEGAL:
		break
	default:
		panic(fmt.Sprintf("unknown token type: %v", tok))
	}
	return ""
}
Exemple #21
0
func Evaluate(in string) (float64, error) {
	floats := NewFloatStack()
	ops := NewStringStack()
	s := initScanner(in)

	var prev token.Token = token.ILLEGAL
	var back int = -1

ScanLoop:
	for {
		_, tok, lit := s.Scan()

		if lit != "@" && back > -1 {
			floats.Push(getHistory(back))
			if prev == token.RPAREN || constants.IsConstant(prev.String()) {
				evalUnprecedenced("*", ops, floats)
			}
			back = -1
		}

		switch {
		case tok == token.EOF:
			break ScanLoop
		case lit == "@":
			back += 1
		case constants.IsConstant(lit):
			floats.Push(constants.GetValue(lit))
			if prev == token.RPAREN || isOperand(prev) {
				evalUnprecedenced("*", ops, floats)
			}
		case isOperand(tok):
			val, err := parseFloat(lit)
			if err != nil {
				return 0, err
			}
			floats.Push(val)
			if prev == token.RPAREN || constants.IsConstant(prev.String()) {
				evalUnprecedenced("*", ops, floats)
			}
		case functions.IsFunction(lit):
			if isOperand(prev) || prev == token.RPAREN {
				evalUnprecedenced("*", ops, floats)
			}
			ops.Push(lit)
		case isOperator(tok.String()):
			op := tok.String()
			if isNegation(tok, prev) {
				op = "neg"
			}
			evalUnprecedenced(op, ops, floats)
		case tok == token.LPAREN:
			if isOperand(prev) {
				evalUnprecedenced("*", ops, floats)
			}
			ops.Push(tok.String())
		case tok == token.RPAREN:
			for ops.Pos >= 0 && ops.SafeTop() != "(" {
				err := evalOp(ops.SafePop(), floats)
				if err != nil {
					return 0, err
				}
			}
			_, err := ops.Pop()
			if err != nil {
				return 0, errors.New("Can't find matching parenthesis!")
			}
			if ops.Pos >= 0 {
				if functions.IsFunction(ops.SafeTop()) {
					err := evalOp(ops.SafePop(), floats)
					if err != nil {
						return 0, err
					}
				}
			}
		case tok == token.SEMICOLON:
		default:
			inspect := tok.String()
			if strings.TrimSpace(lit) != "" {
				inspect += " (`" + lit + "`)"
			}
			return 0, errors.New("Unrecognized token " + inspect + " in expression")
		}
		prev = tok
	}

	for ops.Pos >= 0 {
		op, _ := ops.Pop()
		err := evalOp(op, floats)
		if err != nil {
			return 0, err
		}
	}

	res, err := floats.Top()
	if err != nil {
		return 0, errors.New("Expression could not be parsed!")
	}
	pushHistory(res)
	return res, nil
}
Exemple #22
0
func isNegation(tok token.Token, prev token.Token) bool {
	return tok == token.SUB &&
		(prev == token.ILLEGAL || isOperator(prev.String()) || prev == token.LPAREN)
}
Exemple #23
0
// Writes variables for both declarations and assignments.
func (tr *transform) writeVar(names interface{}, values []ast.Expr, type_ interface{}, operator token.Token, isGlobal, isMultipleLine bool) {
	var sign string
	var isNewVar, isBitClear bool

	tr.isVar = true
	defer func() { tr.isVar = false }()

	if !isGlobal && isMultipleLine {
		tr.WriteString(strings.Repeat(TAB, tr.tabLevel))
	}

	// === Operator
	switch operator {
	case token.DEFINE:
		isNewVar = true
		tr.WriteString("var ")
		sign = "="
	case token.ASSIGN,
		token.ADD_ASSIGN, token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
		token.REM_ASSIGN,
		token.AND_ASSIGN, token.OR_ASSIGN, token.XOR_ASSIGN, token.SHL_ASSIGN,
		token.SHR_ASSIGN:

		sign = operator.String()
	case token.AND_NOT_ASSIGN:
		sign = "&="
		isBitClear = true

	default:
		panic(fmt.Sprintf("operator unimplemented: %s", operator.String()))
	}

	// === Names
	var _names []string
	var idxValidNames []int // index of variables which are not in blank
	var nameIsPointer []bool

	switch t := names.(type) {
	case []*ast.Ident:
		_names = make([]string, len(t))
		nameIsPointer = make([]bool, len(t))

		for i, v := range t {
			expr := tr.getExpression(v)

			_names[i] = expr.String()
			nameIsPointer[i] = expr.isPointer
		}
	case []ast.Expr: // like avobe
		_names = make([]string, len(t))
		nameIsPointer = make([]bool, len(t))

		for i, v := range t {
			expr := tr.getExpression(v)

			_names[i] = expr.String()
			nameIsPointer[i] = expr.isPointer
		}
	default:
		panic("unreachable")
	}

	// Check if there is any variable to use; and it is exported
	for i, v := range _names {
		if v != BLANK {
			idxValidNames = append(idxValidNames, i)

			if isGlobal {
				tr.addIfExported(v)
			}
		}
	}
	if len(idxValidNames) == 0 {
		return
	}

	if values != nil {
		// === Function
		if call, ok := values[0].(*ast.CallExpr); ok {

			// Function literal
			if _, ok := call.Fun.(*ast.SelectorExpr); ok {
				goto _noFunc
			}

			// Declaration of slice/array
			fun := call.Fun.(*ast.Ident).Name
			if fun == "make" || fun == "new" {
				goto _noFunc
			}

			// === Assign variable to the output of a function
			fun = tr.getExpression(call).String()

			if len(_names) == 1 {
				tr.WriteString(_names[0] + SP + sign + SP + fun + ";")
				return
			}
			if len(idxValidNames) == 1 {
				i := idxValidNames[0]
				tr.WriteString(fmt.Sprintf("%s[%d];",
					_names[i]+SP+sign+SP+fun, i))
				return
			}

			// multiple variables
			str := fmt.Sprintf("_%s", SP+sign+SP+fun)

			for _, i := range idxValidNames {
				str += fmt.Sprintf(",%s_[%d]", SP+_names[i]+SP+sign+SP, i)
			}

			tr.WriteString(str + ";")
			return
		}
	}

_noFunc:
	expr := tr.newExpression(nil)
	typeIs := otherType
	isFuncLit := false
	isZeroValue := false
	isFirst := true
	value := ""

	if values == nil { // initialization explicit
		value, typeIs = tr.zeroValue(true, type_)
		isZeroValue = true
	}

	for _, i := range idxValidNames {
		name := _names[i]
		nameExpr := ""

		tr.lastVarName = name

		// === Name
		if isFirst {
			nameExpr += name
			isFirst = false
		} else {
			nameExpr += "," + SP + name
		}

		if !isNewVar {
			nameExpr += tagPointer(false, 'P', tr.funcId, tr.blockId, name)
		}

		// === Value
		if isZeroValue {
			if typeIs == sliceType {
				tr.slices[tr.funcId][tr.blockId][name] = void
			}
		} else {
			var valueOfValidName ast.Expr

			// _, ok = m[k]
			if len(values) == 1 && i == 1 {
				valueOfValidName = values[0]
			} else {
				valueOfValidName = values[i]
			}

			// If the expression is an anonymous function, then, at transforming,
			// it is written in the main buffer.
			expr = tr.newExpression(name)
			expr.isValue = true

			if _, ok := valueOfValidName.(*ast.FuncLit); !ok {
				expr.transform(valueOfValidName)
				exprStr := expr.String()

				if isBitClear {
					exprStr = "~(" + exprStr + ")"
				}
				value = exprStr

				_, typeIs = tr.zeroValue(false, type_)

				if expr.isAddress {
					tr.addr[tr.funcId][tr.blockId][name] = true
					if !isNewVar {
						nameExpr += ADDR
					}
				} /*else {
					tr.addr[tr.funcId][tr.blockId][name] = false
				}*/

				// == Map: v, ok := m[k]
				if len(values) == 1 && tr.isType(mapType, expr.mapName) {
					value = value[:len(value)-3] // remove '[0]'

					if len(idxValidNames) == 1 {
						tr.WriteString(fmt.Sprintf("%s%s%s[%d];",
							_names[idxValidNames[0]],
							SP+sign+SP,
							value, idxValidNames[0]))
					} else {
						tr.WriteString(fmt.Sprintf("_%s,%s_[%d],%s_[%d];",
							SP+sign+SP+value,
							SP+_names[0]+SP+sign+SP, 0,
							SP+_names[1]+SP+sign+SP, 1))
					}

					return
				}
				// ==
			} else {
				isFuncLit = true

				tr.WriteString(nameExpr)
				expr.transform(valueOfValidName)
			}

			// Check if new variables assigned to another ones are slices or maps.
			if isNewVar && expr.isIdent {
				if tr.isType(sliceType, value) {
					tr.slices[tr.funcId][tr.blockId][name] = void
				}
				if tr.isType(mapType, value) {
					tr.maps[tr.funcId][tr.blockId][name] = void
				}
			}
		}

		if isNewVar {
			typeIsPointer := false
			if typeIs == pointerType {
				typeIsPointer = true
			}

			tr.vars[tr.funcId][tr.blockId][name] = typeIsPointer

			// The value could be a pointer so this new variable has to be it.
			if tr.vars[tr.funcId][tr.blockId][value] {
				tr.vars[tr.funcId][tr.blockId][name] = true
			}

			// Could be addressed ahead
			if value != "" && !expr.isPointer && !expr.isAddress && !typeIsPointer {
				value = tagPointer(isZeroValue, 'L', tr.funcId, tr.blockId, name) +
					value +
					tagPointer(isZeroValue, 'R', tr.funcId, tr.blockId, name)
			}
		}

		if !isFuncLit {
			tr.WriteString(nameExpr)

			if expr.isSlice {
				if isNewVar {
					tr.WriteString(fmt.Sprintf("%sg.NewSlice(%s)", SP+sign+SP, value))
				} else {
					tr.WriteString(".set(" + value + ")")
				}
			} else if expr.isMake {
				tr.WriteString(fmt.Sprintf("%sg.MakeSlice(%s)", SP+sign+SP, value))

			} else if value != "" {
				tr.WriteString(SP + sign + SP + value)
			}
		}
	}

	if !isFirst && !expr.skipSemicolon && !tr.skipSemicolon {
		tr.WriteString(";")
	}
	if tr.skipSemicolon {
		tr.skipSemicolon = false
	}
}
Exemple #24
0
func (s *Styler) Token(tok token.Token) (text []byte, tag printer.HTMLTag) {
	text = strings.Bytes(tok.String())
	return
}
Exemple #25
0
// writeVar translates variables for both declarations and assignments.
func (tr *translation) writeVar(names interface{}, values []ast.Expr, type_ interface{}, operator token.Token, isGlobal, isMultipleLine bool) {
	var sign string
	var signIsAssign, signIsDefine, isBitClear bool

	tr.isVar = true
	defer func() { tr.isVar = false }()

	if !isGlobal && isMultipleLine {
		tr.WriteString(strings.Repeat(TAB, tr.tabLevel))
	}

	// == Operator
	switch operator {
	case token.DEFINE:
		tr.WriteString("var ")
		sign = "="
		signIsDefine = true
	case token.ASSIGN:
		sign = operator.String()
		signIsAssign = true
	case token.ADD_ASSIGN, token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN,
		token.REM_ASSIGN,
		token.AND_ASSIGN, token.OR_ASSIGN, token.XOR_ASSIGN, token.SHL_ASSIGN,
		token.SHR_ASSIGN:

		sign = operator.String()
	case token.AND_NOT_ASSIGN:
		sign = "&="
		isBitClear = true

	default:
		panic(fmt.Sprintf("operator unimplemented: %s", operator.String()))
	}

	// == Names
	// TODO: use this struct
	/*var Name = []struct {
		str      string
		idxValid int
		expr     *expression
	}{}*/

	var _names []string
	var idxValidNames []int // index of variables which are not in blank
	var name_expr []*expression

	switch t := names.(type) {
	case []*ast.Ident:
		_names = make([]string, len(t))
		name_expr = make([]*expression, len(t))

		for i, v := range t {
			expr := tr.getExpression(v)

			_names[i] = validIdent(expr.String())
			name_expr[i] = expr
		}
	case []ast.Expr: // like avobe
		_names = make([]string, len(t))
		name_expr = make([]*expression, len(t))

		for i, v := range t {
			expr := tr.getExpression(v)

			_names[i] = expr.String()
			name_expr[i] = expr
		}
	default:
		panic("unreachable")
	}

	// Check if there is any variable to use; and it is exported
	for i, v := range _names {
		if v != BLANK {
			idxValidNames = append(idxValidNames, i)

			if isGlobal {
				tr.addIfExported(v)
			}
		}
	}
	if len(idxValidNames) == 0 {
		return
	}

	if values != nil {
		// == Function
		if call, ok := values[0].(*ast.CallExpr); ok {

			// Function literal
			if _, ok := call.Fun.(*ast.SelectorExpr); ok {
				goto _noFunc
			}

			// Declaration of slice/array
			fun := call.Fun.(*ast.Ident).Name
			if fun == "make" || fun == "new" {
				goto _noFunc
			}

			// == Assign variable to the output of a function
			fun = tr.getExpression(call).String()

			if len(_names) == 1 {
				if tr.resultUseFunc[0] {
					_names[0] = stripField(_names[0])
				}
				tr.WriteString(_names[0] + SP + sign + SP + fun + ";")
				return
			}
			if len(idxValidNames) == 1 {
				i := idxValidNames[0]
				if tr.resultUseFunc[i] {
					_names[i] = stripField(_names[i])
				}
				tr.WriteString(fmt.Sprintf("%s%s%s[%d];", _names[i], SP+sign+SP, fun, i))
				return
			}

			// multiple variables
			str := fmt.Sprintf("_%s", SP+sign+SP+fun)

			for _, i := range idxValidNames {
				if tr.resultUseFunc[i] {
					_names[i] = stripField(_names[i])
				}
				str += fmt.Sprintf(",%s%s_[%d]", SP+_names[i], SP+sign+SP, i)
			}

			tr.WriteString(str + ";")
			return
		}
	}

_noFunc:
	expr := tr.newExpression(nil)
	typeIs := otherType
	isFuncLit := false
	isZeroValue := false
	isFirst := true
	value := ""
	numericFunc := ""

	if values == nil { // initialization explicit
		value, typeIs = tr.zeroValue(true, type_)
		isZeroValue = true
	}

	for iValidNames, idxName := range idxValidNames {
		name := _names[idxName]
		nameExpr := ""

		tr.lastVarName = name

		// == Name
		if isFirst {
			nameExpr += name
			isFirst = false
		} else {
			nameExpr += "," + SP + name
		}

		if !signIsDefine && len(name_expr[idxName].index) == 0 {
			nameExpr += tagPointer(false, 'P', tr.funcId, tr.blockId, name)
		}

		// == Value
		if isZeroValue {
			if typeIs == sliceType {
				tr.slices[tr.funcId][tr.blockId][name] = void
			}
		} else {
			var valueOfValidName ast.Expr

			// _, ok = m[k]
			if len(values) == 1 && idxName == 1 {
				valueOfValidName = values[0]
			} else {
				valueOfValidName = values[idxName]
			}

			// If the expression is an anonymous function, then, at translating,
			// it is written in the main buffer.
			expr = tr.newExpression(name)
			expr.isValue = true

			if _, ok := valueOfValidName.(*ast.FuncLit); !ok {
				expr.translate(valueOfValidName)
				exprStr := expr.String()

				if isBitClear {
					exprStr = "~(" + exprStr + ")"
				}
				value = exprStr

				_, typeIs = tr.zeroValue(false, type_)

				if expr.isVarAddress {
					tr.addr[tr.funcId][tr.blockId][name] = true
					if !signIsDefine {
						nameExpr += ADDR
					}
				} /*else {
					tr.addr[tr.funcId][tr.blockId][name] = false
				}*/

				// == Map: v, ok := m[k]
				if len(values) == 1 && tr.isType(mapType, expr.mapName) {
					value = value[:len(value)-3] // remove '[0]'

					if len(idxValidNames) == 1 {
						tr.WriteString(fmt.Sprintf("%s%s%s[%d];",
							_names[idxValidNames[0]],
							SP+sign+SP,
							value, idxValidNames[0]))
					} else {
						tr.WriteString(fmt.Sprintf("_%s,%s_[%d],%s_[%d];",
							SP+sign+SP+value,
							SP+_names[0]+SP+sign+SP, 0,
							SP+_names[1]+SP+sign+SP, 1))
					}

					return
				}
				// ==
			} else {
				isFuncLit = true

				tr.WriteString(nameExpr)
				expr.translate(valueOfValidName)
			}

			// Check if new variables assigned to another ones are slices or maps.
			if signIsDefine && expr.isIdent {
				if tr.isType(sliceType, value) {
					tr.slices[tr.funcId][tr.blockId][name] = void
				}
				if tr.isType(mapType, value) {
					tr.maps[tr.funcId][tr.blockId][name] = void
				}
			}
		}

		if signIsDefine {
			typeIsPointer := false
			if typeIs == pointerType {
				typeIsPointer = true
			}

			tr.vars[tr.funcId][tr.blockId][name] = typeIsPointer

			// The value could be a pointer so this new variable has to be it.
			if tr.vars[tr.funcId][tr.blockId][value] {
				tr.vars[tr.funcId][tr.blockId][name] = true
			}

			// Could be addressed ahead
			if value != "" && !expr.isPointer && !expr.isVarAddress && !typeIsPointer {
				value = tagPointer(isZeroValue, 'L', tr.funcId, tr.blockId, name) +
					value +
					tagPointer(isZeroValue, 'R', tr.funcId, tr.blockId, name)
			}
		}

		if !isFuncLit {
			// Insert "var" to variable of anonymous struct.
			if tr.insertVar && tr.isType(structType, name) {
				tr.WriteString("var ")
				tr.insertVar = false
			}
			tr.WriteString(nameExpr)

			/*switch expr.kind {
			case sliceKind:
			}*/

			if name_expr[idxName].addSet {
				tr.WriteString(SP + value + ")")

			} else if expr.kind == sliceKind || expr.isSliceExpr {
				if signIsDefine || signIsAssign {
					tr.slices[tr.funcId][tr.blockId][nameExpr] = void

					if value == "" {
						tr.WriteString(fmt.Sprintf("%sg.MkSlice(0,%s0)", SP+sign+SP, SP))
					} else {
						if expr.isSliceExpr {
							tr.WriteString(fmt.Sprintf("%sg.SliceFrom(%s)", SP+sign+SP, value))
						} else {
							tr.WriteString(fmt.Sprintf("%sg.Slice(%s)", SP+sign+SP, value))
						}
					}
				}
			} else if expr.isMake {
				tr.WriteString(fmt.Sprintf("%sg.MkSlice(%s)", SP+sign+SP, value))
				tr.slices[tr.funcId][tr.blockId][nameExpr] = void

			} else {
				if value != "" {
					// Get the numeric function
					if iValidNames == 0 {
						if ident, ok := type_.(*ast.Ident); ok {
							switch ident.Name {
							case "uint", "uint8", "uint16", "uint32",
								"int", "int8", "int16", "int32",
								"float32", "float64",
								"byte", "rune":
								numericFunc = "g." + strings.Title(ident.Name)
							}
						}
					}
					if numericFunc != "" {
						tr.WriteString(fmt.Sprintf("%s%s(%s)",
							SP+sign+SP, numericFunc, value))
					} else {
						tr.WriteString(SP + sign + SP + value)
					}
				}

				if tr.isArray {
					tr.WriteString(")")
					tr.isArray = false
				}
			}
		}
	}

	if !isFirst {
		tr.WriteString(";")
	}
}
func (a *exprInfo) compileBinaryExpr(op token.Token, l, r *expr) *expr {
	// Save the original types of l.t and r.t for error messages.
	origlt := l.t
	origrt := r.t

	// XXX(Spec) What is the exact definition of a "named type"?

	// XXX(Spec) Arithmetic operators: "Integer types" apparently
	// means all types compatible with basic integer types, though
	// this is never explained.  Likewise for float types, etc.
	// This relates to the missing explanation of named types.

	// XXX(Spec) Operators: "If both operands are ideal numbers,
	// the conversion is to ideal floats if one of the operands is
	// an ideal float (relevant for / and %)."  How is that
	// relevant only for / and %?  If I add an ideal int and an
	// ideal float, I get an ideal float.

	if op != token.SHL && op != token.SHR {
		// Except in shift expressions, if one operand has
		// numeric type and the other operand is an ideal
		// number, the ideal number is converted to match the
		// type of the other operand.
		if (l.t.isInteger() || l.t.isFloat()) && !l.t.isIdeal() && r.t.isIdeal() {
			r = r.convertTo(l.t)
		} else if (r.t.isInteger() || r.t.isFloat()) && !r.t.isIdeal() && l.t.isIdeal() {
			l = l.convertTo(r.t)
		}
		if l == nil || r == nil {
			return nil
		}

		// Except in shift expressions, if both operands are
		// ideal numbers and one is an ideal float, the other
		// is converted to ideal float.
		if l.t.isIdeal() && r.t.isIdeal() {
			if l.t.isInteger() && r.t.isFloat() {
				l = l.convertTo(r.t)
			} else if l.t.isFloat() && r.t.isInteger() {
				r = r.convertTo(l.t)
			}
			if l == nil || r == nil {
				return nil
			}
		}
	}

	// Useful type predicates
	// TODO(austin) CL 33668 mandates identical types except for comparisons.
	compat := func() bool { return l.t.compat(r.t, false) }
	integers := func() bool { return l.t.isInteger() && r.t.isInteger() }
	floats := func() bool { return l.t.isFloat() && r.t.isFloat() }
	strings := func() bool {
		// TODO(austin) Deal with named types
		return l.t == StringType && r.t == StringType
	}
	booleans := func() bool { return l.t.isBoolean() && r.t.isBoolean() }

	// Type check
	var t Type
	switch op {
	case token.ADD:
		if !compat() || (!integers() && !floats() && !strings()) {
			a.diagOpTypes(op, origlt, origrt)
			return nil
		}
		t = l.t

	case token.SUB, token.MUL, token.QUO:
		if !compat() || (!integers() && !floats()) {
			a.diagOpTypes(op, origlt, origrt)
			return nil
		}
		t = l.t

	case token.REM, token.AND, token.OR, token.XOR, token.AND_NOT:
		if !compat() || !integers() {
			a.diagOpTypes(op, origlt, origrt)
			return nil
		}
		t = l.t

	case token.SHL, token.SHR:
		// XXX(Spec) Is it okay for the right operand to be an
		// ideal float with no fractional part?  "The right
		// operand in a shift operation must be always be of
		// unsigned integer type or an ideal number that can
		// be safely converted into an unsigned integer type
		// (§Arithmetic operators)" suggests so and 6g agrees.

		if !l.t.isInteger() || !(r.t.isInteger() || r.t.isIdeal()) {
			a.diagOpTypes(op, origlt, origrt)
			return nil
		}

		// The right operand in a shift operation must be
		// always be of unsigned integer type or an ideal
		// number that can be safely converted into an
		// unsigned integer type.
		if r.t.isIdeal() {
			r2 := r.convertTo(UintType)
			if r2 == nil {
				return nil
			}

			// If the left operand is not ideal, convert
			// the right to not ideal.
			if !l.t.isIdeal() {
				r = r2
			}

			// If both are ideal, but the right side isn't
			// an ideal int, convert it to simplify things.
			if l.t.isIdeal() && !r.t.isInteger() {
				r = r.convertTo(IdealIntType)
				if r == nil {
					log.Panicf("conversion to uintType succeeded, but conversion to idealIntType failed")
				}
			}
		} else if _, ok := r.t.lit().(*uintType); !ok {
			a.diag("right operand of shift must be unsigned")
			return nil
		}

		if l.t.isIdeal() && !r.t.isIdeal() {
			// XXX(Spec) What is the meaning of "ideal >>
			// non-ideal"?  Russ says the ideal should be
			// converted to an int.  6g propagates the
			// type down from assignments as a hint.

			l = l.convertTo(IntType)
			if l == nil {
				return nil
			}
		}

		// At this point, we should have one of three cases:
		// 1) uint SHIFT uint
		// 2) int SHIFT uint
		// 3) ideal int SHIFT ideal int

		t = l.t

	case token.LOR, token.LAND:
		if !booleans() {
			return nil
		}
		// XXX(Spec) There's no mention of *which* boolean
		// type the logical operators return.  From poking at
		// 6g, it appears to be the named boolean type, NOT
		// the type of the left operand, and NOT an unnamed
		// boolean type.

		t = BoolType

	case token.ARROW:
		// The operands in channel sends differ in type: one
		// is always a channel and the other is a variable or
		// value of the channel's element type.
		log.Panic("Binary op <- not implemented")
		t = BoolType

	case token.LSS, token.GTR, token.LEQ, token.GEQ:
		// XXX(Spec) It's really unclear what types which
		// comparison operators apply to.  I feel like the
		// text is trying to paint a Venn diagram for me,
		// which it's really pretty simple: <, <=, >, >= apply
		// only to numeric types and strings.  == and != apply
		// to everything except arrays and structs, and there
		// are some restrictions on when it applies to slices.

		if !compat() || (!integers() && !floats() && !strings()) {
			a.diagOpTypes(op, origlt, origrt)
			return nil
		}
		t = BoolType

	case token.EQL, token.NEQ:
		// XXX(Spec) The rules for type checking comparison
		// operators are spread across three places that all
		// partially overlap with each other: the Comparison
		// Compatibility section, the Operators section, and
		// the Comparison Operators section.  The Operators
		// section should just say that operators require
		// identical types (as it does currently) except that
		// there a few special cases for comparison, which are
		// described in section X.  Currently it includes just
		// one of the four special cases.  The Comparison
		// Compatibility section and the Comparison Operators
		// section should either be merged, or at least the
		// Comparison Compatibility section should be
		// exclusively about type checking and the Comparison
		// Operators section should be exclusively about
		// semantics.

		// XXX(Spec) Comparison operators: "All comparison
		// operators apply to basic types except bools."  This
		// is very difficult to parse.  It's explained much
		// better in the Comparison Compatibility section.

		// XXX(Spec) Comparison compatibility: "Function
		// values are equal if they refer to the same
		// function." is rather vague.  It should probably be
		// similar to the way the rule for map values is
		// written: Function values are equal if they were
		// created by the same execution of a function literal
		// or refer to the same function declaration.  This is
		// *almost* but not quite waht 6g implements.  If a
		// function literals does not capture any variables,
		// then multiple executions of it will result in the
		// same closure.  Russ says he'll change that.

		// TODO(austin) Deal with remaining special cases

		if !compat() {
			a.diagOpTypes(op, origlt, origrt)
			return nil
		}
		// Arrays and structs may not be compared to anything.
		switch l.t.(type) {
		case *ArrayType, *StructType:
			a.diagOpTypes(op, origlt, origrt)
			return nil
		}
		t = BoolType

	default:
		log.Panicf("unknown binary operator %v", op)
	}

	desc, ok := binOpDescs[op]
	if !ok {
		desc = op.String() + " expression"
		binOpDescs[op] = desc
	}

	// Check for ideal divide by zero
	switch op {
	case token.QUO, token.REM:
		if r.t.isIdeal() {
			if (r.t.isInteger() && r.asIdealInt()().Sign() == 0) ||
				(r.t.isFloat() && r.asIdealFloat()().Sign() == 0) {
				a.diag("divide by zero")
				return nil
			}
		}
	}

	// Compile
	expr := a.newExpr(t, desc)
	switch op {
	case token.ADD:
		expr.genBinOpAdd(l, r)

	case token.SUB:
		expr.genBinOpSub(l, r)

	case token.MUL:
		expr.genBinOpMul(l, r)

	case token.QUO:
		expr.genBinOpQuo(l, r)

	case token.REM:
		expr.genBinOpRem(l, r)

	case token.AND:
		expr.genBinOpAnd(l, r)

	case token.OR:
		expr.genBinOpOr(l, r)

	case token.XOR:
		expr.genBinOpXor(l, r)

	case token.AND_NOT:
		expr.genBinOpAndNot(l, r)

	case token.SHL:
		if l.t.isIdeal() {
			lv := l.asIdealInt()()
			rv := r.asIdealInt()()
			const maxShift = 99999
			if rv.Cmp(big.NewInt(maxShift)) > 0 {
				a.diag("left shift by %v; exceeds implementation limit of %v", rv, maxShift)
				expr.t = nil
				return nil
			}
			val := new(big.Int).Lsh(lv, uint(rv.Int64()))
			expr.eval = func() *big.Int { return val }
		} else {
			expr.genBinOpShl(l, r)
		}

	case token.SHR:
		if l.t.isIdeal() {
			lv := l.asIdealInt()()
			rv := r.asIdealInt()()
			val := new(big.Int).Rsh(lv, uint(rv.Int64()))
			expr.eval = func() *big.Int { return val }
		} else {
			expr.genBinOpShr(l, r)
		}

	case token.LSS:
		expr.genBinOpLss(l, r)

	case token.GTR:
		expr.genBinOpGtr(l, r)

	case token.LEQ:
		expr.genBinOpLeq(l, r)

	case token.GEQ:
		expr.genBinOpGeq(l, r)

	case token.EQL:
		expr.genBinOpEql(l, r)

	case token.NEQ:
		expr.genBinOpNeq(l, r)

	case token.LAND:
		expr.genBinOpLogAnd(l, r)

	case token.LOR:
		expr.genBinOpLogOr(l, r)

	default:
		log.Panicf("Compilation of binary op %v not implemented", op)
	}

	return expr
}
func (a *exprInfo) compileUnaryExpr(op token.Token, v *expr) *expr {
	// Type check
	var t Type
	switch op {
	case token.ADD, token.SUB:
		if !v.t.isInteger() && !v.t.isFloat() {
			a.diagOpType(op, v.t)
			return nil
		}
		t = v.t

	case token.NOT:
		if !v.t.isBoolean() {
			a.diagOpType(op, v.t)
			return nil
		}
		t = BoolType

	case token.XOR:
		if !v.t.isInteger() {
			a.diagOpType(op, v.t)
			return nil
		}
		t = v.t

	case token.AND:
		// The unary prefix address-of operator & generates
		// the address of its operand, which must be a
		// variable, pointer indirection, field selector, or
		// array or slice indexing operation.
		if v.evalAddr == nil {
			a.diag("cannot take the address of %s", v.desc)
			return nil
		}

		// TODO(austin) Implement "It is illegal to take the
		// address of a function result variable" once I have
		// function result variables.

		t = NewPtrType(v.t)

	case token.ARROW:
		log.Panicf("Unary op %v not implemented", op)

	default:
		log.Panicf("unknown unary operator %v", op)
	}

	desc, ok := unaryOpDescs[op]
	if !ok {
		desc = "unary " + op.String() + " expression"
		unaryOpDescs[op] = desc
	}

	// Compile
	expr := a.newExpr(t, desc)
	switch op {
	case token.ADD:
		// Just compile it out
		expr = v
		expr.desc = desc

	case token.SUB:
		expr.genUnaryOpNeg(v)

	case token.NOT:
		expr.genUnaryOpNot(v)

	case token.XOR:
		expr.genUnaryOpXor(v)

	case token.AND:
		vf := v.evalAddr
		expr.eval = func(t *Thread) Value { return vf(t) }

	default:
		log.Panicf("Compilation of unary op %v not implemented", op)
	}

	return expr
}