Example #1
0
func Getoperator(value int) {
	value2 := Check()
	if value2 == -1 {
		return
	}
	t.value = string(book[bookmark])
	t.enum = value
	if bookmark+1 != booklength {
		value2, length2, valid2 := lexer.Operator(string(book[bookmark+1]))
		if value2 != -1 && valid2 && length2 >= 1 {
			input := string(book[bookmark]) + string(book[bookmark+1])
			value3, length3, valid3 := lexer.Operator(input)
			if valid3 && length3 > 1 {
				t.value = input
				bookmark += 2
				t.enum = value3
				typeofword = Operator
				return
			}
		}
	}
	bookmark++
	typeofword = Operator
}
Example #2
0
func Getnextword() {
	t.value = ""
	t.enum = 0
	typeofword = Error
	value := Check()
	if value == -1 {
		return
	}
	Skipwhitespace()
	value3 := Check()
	if value3 == -1 {
		return
	}
	if book[bookmark] == '(' && book[bookmark+1] == '*' { //Check for Comment
		result := Skipcomment()
		if result == -1 {
			return
		}
	}
	Skipwhitespace()
	if unicode.IsLetter(rune(book[bookmark])) { //check for Identifier and Keyword
		Getidentifier()
		return
	}
	value, length, valid := lexer.Operator(string(book[bookmark])) //check for Operator
	if valid && length >= 1 {
		Getoperator(value)
		return
	}
	if book[bookmark] == '"' { // Check for string
		Getstring()
		return
	}
	if unicode.IsDigit(rune(book[bookmark])) { // Check for Number
		Getnumber()
		return
	}
	value2 := Check()
	if value2 == -1 {
		return
	}

}
Example #3
0
func Lexer(input string, out chan<- lexer.Token) {
	var output string
	var token Token
	follow := 0
	ext := 0
	match_star := false
	length := len(input)
	lines := 1
	columns := 1
	if length == 0 {
		token = Token{value: "<EOF>", location: 0, line: 1, column: 1, enum: lexer.EOF}
		out <- token
		return
	}

	i := 0
	for i < length {
		for input[i] == 10 || input[i] == ' ' || input[i] == 9 || input[i] == 13 {
			if input[i] == 10 {
				lines++
				columns = 1
				i++
			} else {
				columns++
				i++
			}
			if !(i < length) {
				token = Token{value: "<EOF>", location: 0, line: lines, column: columns, enum: lexer.EOF}
				out <- token
				return
			}
		}
		if !(i < length) {
			token = Token{value: "<EOF>", location: 0, line: lines, column: columns, enum: lexer.EOF}
			out <- token
			return
		}
		tok, size, match := lexer.Operator(input[i:])
		if IsALetter(input[i]) {
			output = Reading_Identifier(input[i:])
			lexed, paired := lexer.Keyword(output)
			if paired {
				token = Token{value: output, location: 0, line: lines, column: columns, enum: lexed}
				out <- token
			} else {
				token = Token{value: output, location: 0, line: lines, column: columns, enum: lexer.IDENT}
				out <- token
			}
			columns += len(output)
			i += len(output)
			continue
		} else if IsADigit(input[i]) {
			output = IsAnId(input[i])
			if len(input[i:]) < 2 {
				token = Token{value: output, location: 0, line: lines, column: columns, enum: lexer.INTEGER}
				out <- token
				columns++
				token = Token{value: "<EOF>", location: 0, line: lines, column: columns, enum: lexer.EOF}
				out <- token
				return
			}
			number, bel := Reading_Number(input[i:])
			output = number
			token = Token{value: output, location: 0, line: lines, column: columns, enum: bel}
			out <- token
			columns += len(output)
			i += len(output)
			continue
		} else if input[i] == '"' {
			//we are reading a string
			if len(input[i:]) < 2 {
				token = Token{value: "\"", location: 0, line: lines, column: columns, enum: lexer.ERROR}
				out <- token
				return
			}
			output = Reading_String(input[i+1:])
			j := 0
			error_flag := 0
			for j < len(output) {
				if output[j] == '"' {
					error_flag++
				}
				j++
			}

			if error_flag == 1 {
				token = Token{value: output, location: 0, line: lines, column: columns, enum: lexer.ERROR}
				out <- token
				return
			}
			token = Token{value: output, location: 0, line: lines, column: columns, enum: lexer.STRING}
			out <- token
			j = 0
			for j < len(output) {
				if output[j] == 10 {
					lines++
					columns = 1
				} else {
					columns++
				}
				j++
			}
			i += len(output)
			continue
		} else if match {
			if i < (len(input) - 1) {
				follow, ext, match_star = lexer.Operator(input[i+1:])
			}
			if (tok == lexer.LPAREN) && follow == lexer.STAR {
				if match_star {
					ext++
				}
				output = Reading_Comment(input[i:])
				if output == "bad comment" {
					token = Token{value: output, location: 0, line: lines, column: columns, enum: lexer.ERROR}
					out <- token
					return
				}
				j := 0
				for j < len(output) {
					if output[j] == 10 {
						lines++
						columns = 1
					} else {
						columns++
					}
					j++
				}
				i += len(output)
				if i == length {
					token = Token{value: "<EOF>", location: 0, line: lines, column: columns, enum: lexer.EOF}
					out <- token
					return
				}
			} else {
				output = input[i : i+size]
				token = Token{value: output, location: 0, line: lines, column: columns, enum: tok}
				out <- token
				columns += size
				i += size
				if i == length {
					token = Token{value: "<EOF>", location: 0, line: lines, column: columns, enum: lexer.EOF}
					out <- token
					return
				}
				continue
			}

		} else {
			token = Token{value: "-1", location: 0, line: lines, column: columns, enum: lexer.ERROR}
			out <- token
			return
		}

	}

	token = Token{value: "<EOF>", location: 0, line: lines, column: columns, enum: lexer.EOF}
	out <- token
}