Пример #1
0
/*
FixPrematureTransform - Used by clients to fix incoming and outgoing transforms when local changes
have been applied to a document before being routed through the server.

In order for a client UI to be unblocking it must apply local changes as the user types them before
knowing the correct order of the change. Therefore, it is possible to apply a local change before
receiving incoming transforms that are meant to be applied beforehand.

As a solution to those situations this function allows a client to alter and incoming interations
such that if they were to be applied to the local document after our local change they would result
in the same document. The outgoing transform is also modified for sending out to the server.

It is possible that the local change has already been dispatched to the server, in which case it is
the servers responsibility to fix the transform so that other clients end up at the same result.

NOTE: These fixes do not regard or alter the versions of either transform.
*/
func FixPrematureTransform(unapplied, unsent *OTransform) {
	var before, after *OTransform

	// Order the OTs by position in the document.
	if unapplied.Position < unsent.Position {
		before = unapplied
		after = unsent
	} else {
		before = unsent
		after = unapplied
	}

	// Get insertion lengths (codepoints)
	bInsert, aInsert := bytes.Runes([]byte(before.Insert)), bytes.Runes([]byte(after.Insert))
	bLength, aLength := len(bInsert), len(aInsert)

	if before.Delete == 0 {
		after.Position += bLength
	} else if (before.Delete + before.Position) <= after.Position {
		after.Position += (bLength - before.Delete)
	} else {
		posGap := after.Position - before.Position
		excess := intMax(0, before.Delete-posGap)

		if excess > after.Delete {
			before.Delete += (aLength - after.Delete)
			before.Insert = before.Insert + after.Insert
		} else {
			before.Delete = posGap
		}

		after.Delete = intMax(0, after.Delete-excess)
		after.Position = before.Position + bLength
	}
}
Пример #2
0
Файл: io.go Проект: haldean/xt
func NewBuffer(r io.Reader) (Buffer, error) {
	var b Buffer
	b.Lines = make([]Line, 0)

	// bytes are read into this from the reader
	rb := make([]byte, READ_BUFFER_SIZE)
	// bytes are queued here until we find EOL
	lb := make([]byte, 0, READ_BUFFER_SIZE)
	for {
		n, err := r.Read(rb)
		for i := 0; i < n; i++ {
			lb = append(lb, rb[i])
			if rb[i] == '\n' {
				l := Line(bytes.Runes(lb))
				b.Lines = append(b.Lines, l)
				lb = make([]byte, 0, READ_BUFFER_SIZE)
			}
		}
		if err != nil && err == io.EOF {
			lb = append(lb, '\n')
			l := Line(bytes.Runes(lb))
			b.Lines = append(b.Lines, l)
			lb = make([]byte, 0, READ_BUFFER_SIZE)
			break
		} else if err != nil {
			return b, err
		}
	}
	return b, nil
}
Пример #3
0
/*
FixOutOfDateTransform - When a transform created for a specific version is later determined to come
after one or more other transforms it can be fixed. This fix translates the transform such that
being applied in the correct order will preserve the original intention.

In order to apply these fixes this function should be called with the target transform and the
actual versioned transform that the target currently 'believes' it is. So, for example, if the
transform was written for version 7 and was actually 10 you would call FixOutOfDateTransform in this
order:

FixOutOfDateTransform(target, version7)
FixOutOfDateTransform(target, version8)
FixOutOfDateTransform(target, version9)

Once the transform is adjusted through this fix it can be harmlessly dispatched to all other clients
which will end up with the same document as the client that submitted this transform.

NOTE: These fixes do not regard or alter the versions of either transform.
*/
func FixOutOfDateTransform(sub, pre *OTransform) {
	// Get insertion lengths (codepoints)
	subInsert, preInsert := bytes.Runes([]byte(sub.Insert)), bytes.Runes([]byte(pre.Insert))
	subLength, preLength := len(subInsert), len(preInsert)

	if pre.Position <= sub.Position {
		if preLength > 0 && pre.Delete == 0 {
			sub.Position += preLength
		} else if pre.Delete > 0 && (pre.Position+pre.Delete) <= sub.Position {
			sub.Position += (preLength - pre.Delete)
		} else if pre.Delete > 0 && (pre.Position+pre.Delete) > sub.Position {
			overhang := intMin(sub.Delete, (pre.Position+pre.Delete)-sub.Position)
			sub.Delete -= overhang
			sub.Position = pre.Position + preLength
		}
	} else if sub.Delete > 0 && (sub.Position+sub.Delete) > pre.Position {
		posGap := pre.Position - sub.Position
		excess := intMax(0, (sub.Delete - posGap))

		if excess > pre.Delete {
			sub.Delete += (preLength - pre.Delete)

			newInsert := make([]rune, subLength+preLength)
			copy(newInsert[:], subInsert)
			copy(newInsert[subLength:], preInsert)

			sub.Insert = string(newInsert)
		} else {
			sub.Delete = posGap
		}
	}
}
Пример #4
0
func makeEncoding(decode bool) (encoding map[rune]rune) {
	base64Runes := bytes.Runes([]byte("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="))
	encojiRunes := bytes.Runes([]byte("😀😬😁😂😃😄😅😆😇😉😊😋😌😍😘😗😙😚😜😝😛😎😏💩👻😺😸😹😻🐶🐱🐭🐹🐰🐻🐼🐨🐯🐮🐷🐽🐸🐙🐵🙈🙉🙊🐒🐔🐧🐦🐤🐣🐥🐺🐗🐴🐝🐛🐌🐞🐜🐘🐬🐳"))

	if len(base64Runes) != len(encojiRunes) {
		panic("Charsets must be of same length")
	}

	encoding = make(map[rune]rune)

	var from, to []rune

	if decode {
		from = encojiRunes
		to = base64Runes
	} else {
		from = base64Runes
		to = encojiRunes
	}

	for i := 0; i < len(from); i++ {
		encoding[from[i]] = to[i]
	}
	return encoding
}
Пример #5
0
func (self *Scanner) scanIdentifier() (*Token, error) {
	var (
		buf  bytes.Buffer
		stop bool
	)

	for !stop {
		char := self.read()
		switch {
		case isLowerCaseLetter(char):
			buf.WriteRune(char)
		case char == eof:
			stop = true
		default:
			self.unread()
			stop = true
		}
	}

	lit := buf.String()
	switch lit {
	case "null":
		return &Token{NULL, lit, bytes.Runes(buf.Bytes())}, nil
	case "true":
		return &Token{TRUE, lit, bytes.Runes(buf.Bytes())}, nil
	case "false":
		return &Token{FALSE, lit, bytes.Runes(buf.Bytes())}, nil
	default:
		return &Token{ILLEGAL, lit, bytes.Runes(buf.Bytes())}, nil
	}
}
Пример #6
0
func (self *Scanner) scanString() (*Token, error) {
	var (
		buf    bytes.Buffer
		screen bool
		stop   bool
	)

	// skip first quote
	char := self.read()
	if char != '"' {
		// todo err here?
		self.unread()
		return &Token{STRING, buf.String(), bytes.Runes(buf.Bytes())}, nil
	}
	buf.WriteRune(char)

	for !stop {
		char := self.read()

		if unicode.IsControl(char) {
			self.unread()
			stop = true
			continue
		}

		if screen {
			screen = false

			switch char {
			case '"', '\\', '/', 'b', 'f', 'n', 'r', 't', 'u':
				buf.WriteRune(char)
			default:
				return nil, fmt.Errorf("unexpected end of input: %q", string(char))
			}

			continue
		}

		switch {
		case char == '\\':
			screen = true
			buf.WriteRune(char)
		case char == '"':
			buf.WriteRune(char)
			stop = true
		case char == eof:
			stop = true
		default:
			buf.WriteRune(char)
		}
	}

	return &Token{STRING, buf.String(), bytes.Runes(buf.Bytes())}, nil
}
Пример #7
0
func (fs *FungeSpace) Load(reader io.Reader) error {
	b, err := ioutil.ReadAll(reader)
	if err != nil {
		return err
	}

	address := fs.funge.Origin()

	for _, r := range bytes.Runes(b) {
		switch r {
		case 012, 015:
			if fs.funge > 1 {
				address = address.Add(fs.funge.Delta(YAxis, Forward))
				address.Set(XAxis, 0)
			}
		case 014:
			if fs.funge > 2 {
				address = address.Add(fs.funge.Delta(ZAxis, Forward))
				address.Set(XAxis, 0)
				address.Set(YAxis, 0)
			} else if fs.funge == 2 {
				address = address.Add(fs.funge.Delta(YAxis, Forward))
				address.Set(XAxis, 0)
			}
		default:
			fs.Put(address, r)
			address = address.Add(fs.funge.Delta(XAxis, Forward))
		}
	}

	return nil
}
Пример #8
0
func main() {
	h := []byte("Hello, I think you are right.你是对的!")

	fmt.Println(`Say you have a string:`, "\n\t", string(h))
	fmt.Println(`Whose byte array in decimal are:`, "\n\t", h)
	fmt.Println(`So in hex: `, "\n\t", fmt.Sprintf("% x", h))
	hex := fmt.Sprintf("%x", h)
	fmt.Println(`So you see this form a lot:`, "\n\t", hex)

	buffer := new(bytes.Buffer)
	encoder := base64.NewEncoder(base64.URLEncoding, buffer)
	encoder.Write(h)
	encoder.Close()

	fmt.Println(`And base64 of the same data:`, "\n\t", buffer.String())

	var bs []byte
	fmt.Sscanf(hex, "%x", &bs)

	fmt.Println(`And you can parse`, "\n\t", hex, "\n", "into bytes", "\n\t", bs)

	fmt.Println(`Whose runes are:`, "\n\t", bytes.Runes(bs))
	fmt.Println(`And each rune matching one charactor not matter its chinese or english:`, "\n\t", string(bs))

}
Пример #9
0
// Write appends a byte slice into the view's internal buffer. Because
// View implements the io.Writer interface, it can be passed as parameter
// of functions like fmt.Fprintf, fmt.Fprintln, io.Copy, etc. Clear must
// be called to clear the view's buffer.
func (v *View) Write(p []byte) (n int, err error) {
	v.tainted = true

	for _, ch := range bytes.Runes(p) {
		switch ch {
		case '\n':
			v.lines = append(v.lines, nil)
		case '\r':
			nl := len(v.lines)
			if nl > 0 {
				v.lines[nl-1] = nil
			} else {
				v.lines = make([][]cell, 1)
			}
		default:
			cells := v.parseInput(ch)
			if cells == nil {
				continue
			}

			nl := len(v.lines)
			if nl > 0 {
				v.lines[nl-1] = append(v.lines[nl-1], cells...)
			} else {
				v.lines = append(v.lines, cells)
			}
		}
	}
	return len(p), nil
}
Пример #10
0
func (b *Buffer) Load(q0 uint, fd *os.File, nulls *int) int {
	if q0 > b.nc {
		panic("internal error: buffer.Load")
	}

	p := make([]byte, MaxBlock+utf8.UTFMax+1)

	m := 0
	n := 1
	q1 := q0
	var err error

	for n > 0 {
		n, err = fd.Read(p[m:])
		if err != nil {
			panic(err)
		}
		m += n
		l := m
		if n > 0 {
			l -= utf8.UTFMax
		}
		r := bytes.Runes(p[:l])
		nr := len(r)
		//nb := len([]byte(string(r)))
		copy(p, p[:m])
		b.Insert(q1, uint(nr), r)
		q1 += uint(nr)
	}
	return int(q1 - q0)
}
Пример #11
0
// Encode writes the transliteration encoding of data to the stream.
func (enc *Encoder) Encode(data []byte) error {
	r := bytes.Runes(data)
	l := len(r)
	var rPrev, rNext rune
	for i, rCurr := range r {
		if i+1 <= l {
			rNext = r[i]
		} else {
			rNext = 0
		}
		var err error
		if s, ok := enc.tbl[rCurr]; ok {
			if sFix, ok := fixRuleRune(rPrev, rCurr, rNext, enc.sys); ok {
				s = sFix
			}
			_, err = enc.WriteString(s)
			if err != nil {
				return err
			}
		} else {
			_, err = enc.WriteRune(rCurr)
			if err != nil {
				return err
			}
		}
		rPrev = rCurr
	}
	return enc.Flush()
}
Пример #12
0
Файл: string.go Проект: kego/ke
func truncate(s string, length int) string {
	runes := bytes.Runes([]byte(s))
	if len(runes) > length {
		return string(runes[:length-3]) + "..."
	}
	return string(runes)
}
Пример #13
0
func (s *CJKWidthFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
	for _, token := range input {
		runeCount := utf8.RuneCount(token.Term)
		runes := bytes.Runes(token.Term)
		for i := 0; i < runeCount; i++ {
			ch := runes[i]
			if ch >= 0xFF01 && ch <= 0xFF5E {
				// fullwidth ASCII variants
				runes[i] -= 0xFEE0
			} else if ch >= 0xFF65 && ch <= 0xFF9F {
				// halfwidth Katakana variants
				if (ch == 0xFF9E || ch == 0xFF9F) && i > 0 && combine(runes, i, ch) {
					runes = analysis.DeleteRune(runes, i)
					i--
					runeCount = len(runes)
				} else {
					runes[i] = kanaNorm[ch-0xFF65]
				}
			}
		}
		token.Term = analysis.BuildTermFromRunes(runes)
	}

	return input
}
Пример #14
0
func redirToWs(fd int, ws *websocket.Conn) {
	defer func() {
		if r := recover(); r != nil {
			fmt.Fprintf(os.Stderr, "Error occured: %s\n", r)
			runtime.Goexit()
		}
	}()

	var buf [8192]byte
	start, end, buflen := 0, 0, 0
	for {
		switch nr, er := syscall.Read(fd, buf[start:]); {
		case nr < 0:
			fmt.Fprintf(os.Stderr, "error reading from websocket %d with code %d\n", fd, er)
			return
		case nr == 0: // EOF
			return
		case nr > 0:
			buflen = start + nr
			for end = buflen - 1; end >= 0; end-- {
				if utf8.RuneStart(buf[end]) {
					ch, width := utf8.DecodeRune(buf[end:buflen])
					if ch != utf8.RuneError {
						end += width
					}
					break
				}

				if buflen-end >= 6 {
					fmt.Fprintf(os.Stderr, "Invalid UTF-8 sequence in output")
					end = nr
					break
				}
			}

			runes := bytes.Runes(buf[0:end])
			buf_clean := []byte(string(runes))

			nw, ew := ws.Write(buf_clean[:])
			if ew != nil {
				fmt.Fprintf(os.Stderr, "error writing to websocket with code %s\n", ew)
				return
			}

			if nw != len(buf_clean) {
				fmt.Fprintf(os.Stderr, "Written %d instead of expected %d\n", nw, end)
			}

			start = buflen - end

			if start > 0 {
				// copy remaning read bytes from the end to the beginning of a buffer
				// so that we will get normal bytes
				for i := 0; i < start; i++ {
					buf[i] = buf[end+i]
				}
			}
		}
	}
}
Пример #15
0
func encodeStringAsASCII(str string) string {
	var output bytes.Buffer
	output.WriteByte('"')
	for _, b := range bytes.Runes([]byte(str)) {
		if b < utf8.RuneSelf {
			switch b {
			case '\\', '"':
				output.WriteByte('\\')
				output.WriteByte(byte(b))
			case '\n':
				output.WriteByte('\\')
				output.WriteByte('n')
			case '\r':
				output.WriteByte('\\')
				output.WriteByte('r')
			case '\t':
				output.WriteByte('\\')
				output.WriteByte('t')
			default:
				if b < 0x20 || b == '<' || b == '>' || b == '&' {
					output.WriteString(`\u00`)
					output.WriteByte(hex[b>>4])
					output.WriteByte(hex[b&0xF])
				} else {
					output.WriteByte(byte(b))
				}
			}
		} else {
			output.WriteString(fmt.Sprintf("\\u%04x", b))
		}
	}
	output.WriteByte('"')
	return output.String()
}
Пример #16
0
func (s *NgramFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
	rv := make(analysis.TokenStream, 0, len(input))

	for _, token := range input {
		runeCount := utf8.RuneCount(token.Term)
		runes := bytes.Runes(token.Term)
		for i := 0; i < runeCount; i++ {
			// index of the starting rune for this token
			for ngramSize := s.minLength; ngramSize <= s.maxLength; ngramSize++ {
				// build an ngram of this size starting at i
				if i+ngramSize <= runeCount {
					ngramTerm := buildTermFromRunes(runes[i : i+ngramSize])
					token := analysis.Token{
						Position: token.Position,
						Start:    token.Start,
						End:      token.End,
						Type:     token.Type,
						Term:     ngramTerm,
					}
					rv = append(rv, &token)
				}
			}
		}
	}

	return rv
}
Пример #17
0
func (t T) Rmo(n N) sequence {
	rv := bytes.Runes([]byte(t))
	a := rv[:n]
	b := rv[n+1:]
	rv = append(a, b...)
	return T(rv)
}
Пример #18
0
// Write appends a byte slice into the view's internal buffer. Because
// View implements the io.Writer interface, it can be passed as parameter
// of functions like fmt.Fprintf, fmt.Fprintln, io.Copy, etc. Clear must
// be called to clear the view's buffer.
func (v *View) Write(p []byte) (n int, err error) {
	v.tainted = true

	for _, ch := range bytes.Runes(p) {
		switch ch {
		case '\n':
			v.lines = append(v.lines, nil)
		case '\r':
			nl := len(v.lines)
			if nl > 0 {
				v.lines[nl-1] = nil
			} else {
				v.lines = make([][]rune, 1)
			}
		default:
			nl := len(v.lines)
			if nl > 0 {
				v.lines[nl-1] = append(v.lines[nl-1], ch)
			} else {
				v.lines = append(v.lines, []rune{ch})
			}
		}
	}
	return len(p), nil
}
Пример #19
0
// decode transforms punycode input bytes (that represent DNS label) into Unicode bytestream
func decode(b []byte) []byte {
	src := b // b would move and we need to keep it

	n, bias := _N, _BIAS
	if !bytes.HasPrefix(b, []byte(_PREFIX)) {
		return b
	}
	out := make([]rune, 0, len(b))
	b = b[len(_PREFIX):]
	for pos := len(b) - 1; pos >= 0; pos-- {
		// only last delimiter is our interest
		if b[pos] == _DELIMITER {
			out = append(out, bytes.Runes(b[:pos])...)
			b = b[pos+1:] // trim source string
			break
		}
	}
	if len(b) == 0 {
		return src
	}
	var (
		i, oldi, w rune
		ch         byte
		t, digit   rune
		ln         int
	)

	for i = 0; len(b) > 0; i++ {
		oldi, w = i, 1
		for k := _BASE; len(b) > 0; k += _BASE {
			ch, b = b[0], b[1:]
			digit = digitval(rune(ch))
			if digit == errdigit {
				return src
			}
			i += digit * w

			t = tfunc(k, bias)
			if digit < t {
				break
			}

			w *= _BASE - t
		}
		ln = len(out) + 1
		bias = adapt(i-oldi, ln, oldi == 0)
		n += i / rune(ln)
		i = i % rune(ln)
		// insert
		out = append(out, 0)
		copy(out[i+1:], out[i:])
		out[i] = n
	}

	var ret bytes.Buffer
	for _, r := range out {
		ret.WriteRune(r)
	}
	return ret.Bytes()
}
Пример #20
0
func (f *DictionaryCompoundFilter) decompose(token *analysis.Token) []*analysis.Token {
	runes := bytes.Runes(token.Term)
	rv := make([]*analysis.Token, 0)
	rlen := len(runes)
	for i := 0; i <= (rlen - f.minSubWordSize); i++ {
		var longestMatchToken *analysis.Token
		for j := f.minSubWordSize; j <= f.maxSubWordSize; j++ {
			if i+j > rlen {
				break
			}
			_, inDict := f.dict[string(runes[i:i+j])]
			if inDict {
				newtoken := analysis.Token{
					Term:     []byte(string(runes[i : i+j])),
					Position: token.Position,
					Start:    token.Start + i,
					End:      token.Start + i + j,
					Type:     token.Type,
					KeyWord:  token.KeyWord,
				}
				if f.onlyLongestMatch {
					if longestMatchToken == nil || utf8.RuneCount(longestMatchToken.Term) < j {
						longestMatchToken = &newtoken
					}
				} else {
					rv = append(rv, &newtoken)
				}
			}
		}
		if f.onlyLongestMatch && longestMatchToken != nil {
			rv = append(rv, longestMatchToken)
		}
	}
	return rv
}
Пример #21
0
func main() {
	in := os.Stdin

	b := make([]byte, 1)

	for {
		r := make([]byte, 0, utf8.UTFMax)

		for {
			n, err := in.Read(b)

			if err != nil {
				if err.Error() == "EOF" {
					os.Exit(0)
				}
				panic(err)
			}
			if n < 1 {
				panic("read less than 1 byte")
			}

			r = append(r, b[0])
			if utf8.Valid(r) {
				ru := bytes.Runes(r)[0]
				fmt.Printf("%U %c\n", ru, ru)
				break
			}
		}
	}
}
Пример #22
0
/*
MergeTransforms - Takes two transforms (the next to be sent, and the one that follows) and attempts
to merge them into one transform. This will not be possible with some combinations, and the function
returns a boolean to indicate whether the merge was successful.
*/
func MergeTransforms(first, second *OTransform) bool {
	var overlap, remainder int

	// Get insertion lengths (codepoints)
	fInsert := bytes.Runes([]byte(first.Insert))
	fLength := len(fInsert)

	if first.Position+fLength == second.Position {
		first.Insert = first.Insert + second.Insert
		first.Delete += second.Delete
		return true
	}
	if second.Position == first.Position {
		remainder = intMax(0, second.Delete-fLength)
		first.Delete += remainder
		first.Insert = second.Insert + first.Insert[second.Delete:]
		return true
	}
	if second.Position > first.Position && second.Position < (first.Position+fLength) {
		overlap = second.Position - first.Position
		remainder = intMax(0, second.Delete-(fLength-overlap))
		first.Delete += remainder
		first.Insert = first.Insert[0:overlap] + second.Insert +
			first.Insert[intMin(fLength, overlap+second.Delete):]
		return true
	}
	return false
}
Пример #23
0
func (s *PortugueseLightStemmerFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
	for _, token := range input {
		runes := bytes.Runes(token.Term)
		runes = stem(runes)
		token.Term = analysis.BuildTermFromRunes(runes)
	}
	return input
}
Пример #24
0
func (b *FixedWidthBuffer) WriteString(s string) (n int, err error) {
	runes := bytes.Runes([]byte(s))
	if b.maxwidth != 0 && len(runes) > int(b.maxwidth) {
		return b.Buffer.WriteString(string(runes[:b.maxwidth]))
	} else {
		return b.Buffer.WriteString(s)
	}
}
Пример #25
0
func (s *IndicNormalizeFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
	for _, token := range input {
		runes := bytes.Runes(token.Term)
		runes = normalize(runes)
		token.Term = analysis.BuildTermFromRunes(runes)
	}
	return input
}
Пример #26
0
func loadFile(filename string) []rune {
	contents, err := ioutil.ReadFile(filename)
	if err != nil {
		panic(err)
	}

	return bytes.Runes(contents)
}
Пример #27
0
func (s *FrenchMinimalStemmerFilter) Filter(input analysis.TokenStream) analysis.TokenStream {
	for _, token := range input {
		runes := bytes.Runes(token.Term)
		runes = minstem(runes)
		token.Term = analysis.BuildTermFromRunes(runes)
	}
	return input
}
Пример #28
0
func Test_IntToBytes_itShouldCreateBytesFromInts(t *testing.T) {
	g := New(Config{})

	res := g.IntToBytes(20)
	expected := make([]int32, 1)
	expected[0] = 20

	assert.Equal(t, bytes.Runes(res), expected)
}
Пример #29
0
Файл: raku.go Проект: beoran/woe
func (me *Lexer) ReadReader() error {
	me.buffer = make([]byte, 0)
	more, err := me.ReadReaderOnce()
	for err == nil && more {
		more, err = me.ReadReaderOnce()
	}
	me.runes = bytes.Runes(me.buffer)

	return err
}
Пример #30
0
func (ante *Ante) parse(program []byte) *Ante {
	// Split program blob into lines.
	lines := bytes.Split(program, []byte("\n"))

	// Get rid of comments and whitespaces.
	comments := regexp.MustCompile(`#.*$`)
	whitespace := regexp.MustCompile(`^\s+|\s+$`)
	for i := 0; i < len(lines); i++ {
		lines[i] = comments.ReplaceAllLiteral(lines[i], []byte(``))
		lines[i] = whitespace.ReplaceAllLiteral(lines[i], []byte(``))
	}

	// Parse lines to turn them into array of cards.
	re := regexp.MustCompile(`(10|[2-9JQKA])([♦♥♠♣])`)
	for i := 0; i < len(lines); i++ {
		// Line number cards have 0 rank.
		ante.code = append(ante.code, Card{0, rune(i + 1)})
		cards := re.FindAllSubmatch(lines[i], -1)
		for _, c := range cards {
			ante.code = append(ante.code, Card{bytes.Runes(c[1])[0], bytes.Runes(c[2])[0]})
		}
	}

	// A pass to convert ranks to integers and extract labels.
	for pc := 0; pc < len(ante.code)-1; {
		card := ante.code[pc]
		pc++
		if card.rank == '1' {
			ante.code[pc-1].rank = 10
		} else if card.rank >= '2' && card.rank <= '9' {
			ante.code[pc-1].rank = card.rank - '0'
		} else if card.rank == 'Q' {
			queen := card.suit
			for pc < len(ante.code) && ante.code[pc].rank == 'Q' && ante.code[pc].suit == card.suit {
				queen += card.suit
				pc++
			}
			ante.labels[queen] = pc
		}
	}

	return ante
}