Beispiel #1
0
func format(str, dbname string, tm time.Time) string {
	buf := bytes.Buffer{}

	var s scanner.Scanner
	s.Init(strings.NewReader(str))
	s.Mode = 0
	s.Whitespace = 0
	for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
		if tok != '%' {
			buf.WriteRune(tok)
			continue
		}

		switch s := s.Scan(); s {
		case '%':
			buf.WriteRune('%')
		case 'n':
			buf.WriteString(dbname)
		case 'Y', 'y':
			buf.WriteString(strconv.Itoa(tm.Year()))
		case 'm':
			buf.WriteString(strconv.Itoa(int(tm.Month())))
		case 'd':
			buf.WriteString(strconv.Itoa(tm.Day()))
		case 'H':
			buf.WriteString(twodig(strconv.Itoa(tm.Hour())))
		case 'M':
			buf.WriteString(twodig(strconv.Itoa(tm.Minute())))
		case 'S':
			buf.WriteString(twodig(strconv.Itoa(tm.Second())))
		}
	}

	return buf.String()
}
Beispiel #2
0
// parseKey reads a private key from r. It returns a map[string]string,
// with the key-value pairs, or an error when the file is not correct.
func parseKey(r io.Reader, file string) (map[string]string, error) {
	var s scanner.Scanner
	m := make(map[string]string)
	c := make(chan lex)
	k := ""
	s.Init(r)
	s.Mode = 0
	s.Whitespace = 0
	// Start the lexer
	go klexer(s, c)
	for l := range c {
		// It should alternate
		switch l.value {
		case _KEY:
			k = l.token
		case _VALUE:
			if k == "" {
				return nil, &ParseError{file, "No key seen", l}
			}
			//println("Setting", strings.ToLower(k), "to", l.token, "b")
			m[strings.ToLower(k)] = l.token
			k = ""
		}
	}
	return m, nil
}
Beispiel #3
0
func NewScanner(src []byte) *scanner.Scanner {
	var s scanner.Scanner
	s.Init(bytes.NewReader(src))
	s.Error = func(_ *scanner.Scanner, _ string) {}
	s.Whitespace = 0
	s.Mode = s.Mode ^ scanner.SkipComments
	return &s
}
Beispiel #4
0
// NewScannerReader takes a reader src and creates a Scanner.
func NewScannerReader(src io.Reader) *scanner.Scanner {
	var s scanner.Scanner
	s.Init(src)
	s.Error = func(_ *scanner.Scanner, _ string) {}
	s.Whitespace = 0
	s.Mode = s.Mode ^ scanner.SkipComments
	return &s
}
Beispiel #5
0
func (c *chuControlButton) loadBamInfo() ([][]string, error) {
	bamIn, err := os.Open(c.Bam.String() + ".bamd")
	if err != nil {
		return nil, fmt.Errorf("Error loading bamd: %s %s", c.Bam.String()+".bamd", err)
	}
	defer bamIn.Close()

	var s scanner.Scanner
	s.Init(bamIn)
	s.Whitespace = 1<<'\t' | 1<<' '

	frames := map[string]string{}
	sequences := make([][]string, 0)

	for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
		if strings.ToLower(s.TokenText()) == "frame" {
			center_x, center_y := 0, 0
			tok = s.Scan()
			name := strings.TrimSpace(s.TokenText())
			tok = s.Scan()
			path := strings.Trim(s.TokenText(), "\"")
			tok = s.Scan()
			if s.TokenText() != "\n" {
				neg_x := 1
				neg_y := 1
				if s.TokenText() == "-" {
					neg_x = -1
					s.Scan()
				}
				if center_x, err = strconv.Atoi(s.TokenText()); err != nil {
					return nil, fmt.Errorf("Error converting %s to an int: %v", s.TokenText(), err)
				}
				center_x *= neg_x
				tok = s.Scan()
				if s.TokenText() == "-" {
					neg_y = -1
					s.Scan()
				}
				if center_y, err = strconv.Atoi(s.TokenText()); err != nil {
					return nil, fmt.Errorf("Error converting %s to an int: %v", s.TokenText(), err)
				}
				center_y *= neg_y
			}

			frames[name] = path
		} else if strings.ToLower(s.TokenText()) == "sequence" {
			seqframes := make([]string, 0)
			for tok = s.Scan(); !(s.TokenText() == "\n" || s.TokenText() == "\r" || tok == scanner.EOF); tok = s.Scan() {
				frame := strings.TrimSpace(s.TokenText())
				seqframes = append(seqframes, frames[frame])
			}

			sequences = append(sequences, seqframes)
		}
	}
	return sequences, nil
}
Beispiel #6
0
func newLexer(stream io.Reader, fn string) *lexer {
	var s scanner.Scanner
	s.Init(stream)
	s.Whitespace = 1<<'\t' | 1<<'\n' | 1<<'\r' | 1<<' '
	s.Filename = fn

	return &lexer{
		scanner: s,
	}
}
Beispiel #7
0
func (this *LocaleDetector) prepareLayout(layout string) layoutDef {
	s := scanner.Scanner{}
	s.Init(strings.NewReader(layout))
	s.Whitespace = 0
	result := make([]layoutSpanI, 0)
	var tok rune
	// var pos int = 0
	var span layoutSpanI
	var sign bool = false
	//	var neg bool = false
	for tok != scanner.EOF {
		tok = s.Scan()
		switch tok {
		case -2: // text
			span = initLimitedStringSpan(1, -1)
		case -3: // digit
			span = initRangeIntSpan(-1, -1, 1, -1)
			if sign {
				sign = false
			}
		case 45: // negative sign
			sign = true
			// neg = s.TokenText() == "-"
			continue
		case 43: // positive sign
			sign = true
			continue
		case scanner.EOF:
			continue
		default: // fixed character
			span = initDelimiterSpan(s.TokenText(), 1, 1)
		}
		result = append(result, span)
		// length := s.Pos().Offset - pos
		// pos = s.Pos().Offset
		// fmt.Printf("tok'%s' [%d %d] length=%d\n", s.TokenText(), pos, s.Pos().Offset, length)

	}
	if debugLayoutDef {
		fmt.Printf("layout:'%s'\n", layout)
		fmt.Printf("layout:%v\n", result)
	}
	ld := layoutDef{spans: result}
	this.layoutsMap[layout] = ld
	return ld
}
Beispiel #8
0
func parseCode(code string) string {
	highlighted, linenumbers, out := "", "", ""
	b := bytes.NewBufferString(code)
	var scan scanner.Scanner

	scan.Init(b)
	scan.Mode = scanner.ScanIdents | scanner.ScanRawStrings | scanner.ScanChars | scanner.ScanFloats | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments
	scan.Whitespace = 0
	tok := scan.Scan()
	for tok != scanner.EOF {
		escaped := html.EscapeString(scan.TokenText())
		switch scanner.TokenString(tok) {
		case "Ident":
			if isKeyword(scan.TokenText()) {
				out += "<span class=\"ident\">" + escaped + "</span>"
			} else {
				out += html.EscapeString(scan.TokenText())
			}
		case "String":
			out += "<span class=\"string\">" + escaped + "</span>"
		case "Char":
			out += "<span class=\"char\">" + escaped + "</span>"
		case "RawString":
			out += "<span class=\"rawstring\">" + escaped + "</span>"
		case "Int":
			out += "<span class=\"int\">" + escaped + "</span>"
		case "Comment":
			out += "<span class=\"comment\">" + escaped + "</span>"
		default:
			out += html.EscapeString(scan.TokenText())
		}
		tok = scan.Scan()
	}
	out = strings.Replace(out, "\t", "&nbsp;&nbsp;", -1)
	lines := strings.Split(out, "\n")
	for k, v := range lines[0 : len(lines)-1] {
		highlighted += fmt.Sprintf("\n%s<br/>", v)
		linenumbers += fmt.Sprintf("%v<br/>", k+1)
	}
	gutter := "<div class=\"gutter\">" + linenumbers + "</div>"
	parsedcode := "<div class=\"code\">" + highlighted + "</div>"
	return "<div class=\"shaban-syntax\">" + gutter + parsedcode + "</div>"
}
Beispiel #9
0
func funcFromFormula(form string) (string, error) {
	f, _, err := extractFormula(form)
	if err != nil {
		return "", err
	}
	var s scanner.Scanner
	s.Init(strings.NewReader(f))
	s.Error = func(s *scanner.Scanner, msg string) {
		err = fmt.Errorf("error parsing plural formula %s: %s", s.Pos(), msg)
	}
	s.Mode = scanner.ScanIdents | scanner.ScanInts
	s.Whitespace = 0
	tok := s.Scan()
	var code []string
	var buf bytes.Buffer
	for tok != scanner.EOF && err == nil {
		switch tok {
		case scanner.Ident, scanner.Int:
			buf.WriteString(s.TokenText())
		case '?':
			code = append(code, fmt.Sprintf("if %s {\n", buf.String()))
			buf.Reset()
		case ':':
			code = append(code, fmt.Sprintf("return %s\n}\n", buf.String()))
			buf.Reset()
		default:
			buf.WriteRune(tok)
		}
		tok = s.Scan()
	}
	if err != nil {
		return "", err
	}
	if len(code) == 0 && buf.Len() > 0 && buf.String() != "0" {
		code = append(code, fmt.Sprintf("if %s {\nreturn 1\n}\nreturn 0\n", buf.String()))
		buf.Reset()
	}
	if buf.Len() > 0 {
		code = append(code, fmt.Sprintf("\nreturn %s\n", buf.String()))
	}
	return strings.Join(code, ""), nil
}
Beispiel #10
0
func NewTokenizer(name string, r io.Reader, file *os.File) *Tokenizer {
	var s scanner.Scanner
	s.Init(r)
	// Newline is like a semicolon; other space characters are fine.
	s.Whitespace = 1<<'\t' | 1<<'\r' | 1<<' '
	// Don't skip comments: we need to count newlines.
	s.Mode = scanner.ScanChars |
		scanner.ScanFloats |
		scanner.ScanIdents |
		scanner.ScanInts |
		scanner.ScanStrings |
		scanner.ScanComments
	s.Position.Filename = name
	s.IsIdentRune = isIdentRune
	if file != nil {
		linkCtxt.LineHist.Push(histLine, name)
	}
	return &Tokenizer{
		s:        &s,
		line:     1,
		fileName: name,
		file:     file,
	}
}
Beispiel #11
0
func parsePo(r io.Reader, filename string) (*Po, error) {
	comment := false
	s := new(scanner.Scanner)
	var err error
	s.Init(r)
	s.Filename = filename
	s.Error = func(s *scanner.Scanner, msg string) {
		if !comment {
			err = fmt.Errorf("error parsing %s: %s", s.Pos(), msg)
		}
	}
	s.Mode = scanner.ScanIdents | scanner.ScanStrings | scanner.ScanInts
	tok := s.Scan()
	po := &Po{Attrs: make(map[string]string)}
	var trans *Translation
	for tok != scanner.EOF && err == nil {
		if tok == '#' {
			// Skip until EOL
			comment = true
			s.Whitespace = whitespace
			for tok != '\n' && tok != scanner.EOF {
				tok = s.Scan()
			}
			s.Whitespace = scanner.GoWhitespace
			comment = false
			tok = s.Scan()
			continue
		}
		if tok != scanner.Ident {
			err = unexpected(s, tok)
			break
		}
		text := s.TokenText()
		switch text {
		case "msgctxt":
			if trans != nil {
				if len(trans.Translations) == 0 {
					err = unexpected(s, tok)
					break
				}
				po.addTranslation(trans)
			}
			trans = &Translation{Context: readString(s, &tok, &err)}
		case "msgid":
			if trans != nil {
				if len(trans.Translations) > 0 || trans.Singular != "" {
					po.addTranslation(trans)
				} else if trans.Context != "" {
					trans.Singular = readString(s, &tok, &err)
					break
				}
			}
			trans = &Translation{Singular: readString(s, &tok, &err)}
		case "msgid_plural":
			if trans == nil || trans.Plural != "" {
				err = unexpected(s, tok)
				break
			}
			trans.Plural = readString(s, &tok, &err)
		case "msgstr":
			str := readString(s, &tok, &err)
			if tok == '[' {
				tok = s.Scan()
				if tok != scanner.Int {
					err = unexpected(s, tok)
					break
				}
				ii, _ := strconv.Atoi(s.TokenText())
				if ii != len(trans.Translations) {
					err = unexpected(s, tok)
					break
				}
				if tok = s.Scan(); tok != ']' {
					err = unexpected(s, tok)
					break
				}
				str = readString(s, &tok, &err)
			}
			trans.Translations = append(trans.Translations, str)
		default:
			err = unexpected(s, tok)
		}
	}
	if trans != nil {
		po.addTranslation(trans)
	}
	if err != nil {
		return nil, err
	}
	for _, v := range po.Messages {
		if v.Context == "" && v.Singular == "" {
			if len(v.Translations) > 0 {
				meta := v.Translations[0]
				for _, line := range strings.Split(meta, "\n") {
					colon := strings.Index(line, ":")
					if colon > 0 {
						key := strings.TrimSpace(line[:colon])
						value := strings.TrimSpace(line[colon+1:])
						po.Attrs[key] = value
					}
				}
			}
			break
		}
	}
	return po, nil
}
Beispiel #12
0
func parseZone(r io.Reader, f string, t chan Token, include int) {
	defer func() {
		if include == 0 {
			close(t)
		}
	}()
	var s scanner.Scanner
	c := make(chan lex)
	s.Init(r)
	s.Mode = 0
	s.Whitespace = 0
	// Start the lexer
	go zlexer(s, c)
	// 5 possible beginnings of a line, _ is a space
	// 1. _OWNER _ _RRTYPE                     -> class/ttl omitted
	// 2. _OWNER _ _STRING _ _RRTYPE           -> class omitted
	// 3. _OWNER _ _STRING _ _CLASS  _ _RRTYPE -> ttl/class
	// 4. _OWNER _ _CLASS  _ _RRTYPE           -> ttl omitted
	// 5. _OWNER _ _CLASS  _ _STRING _ _RRTYPE -> class/ttl (reversed)
	// After detecting these, we know the _RRTYPE so we can jump to functions
	// handling the rdata for each of these types.
	st := _EXPECT_OWNER_DIR
	var h RR_Header
	var ok bool
	var defttl uint32 = DefaultTtl
	var origin string = "."
	for l := range c {
		if _DEBUG {
			fmt.Printf("[%v]\n", l)
		}
		// Lexer spotted an error already
		if l.err != "" {
			t <- Token{Error: &ParseError{f, l.err, l}}
			return

		}
		switch st {
		case _EXPECT_OWNER_DIR:
			// We can also expect a directive, like $TTL or $ORIGIN
			h.Ttl = defttl
			h.Class = ClassINET
			switch l.value {
			case _NEWLINE: // Empty line
				st = _EXPECT_OWNER_DIR
			case _OWNER:
				h.Name = l.token
				if _, ok := IsDomainName(l.token); !ok {
					t <- Token{Error: &ParseError{f, "bad owner name", l}}
					return
				}
				if !IsFqdn(h.Name) {
					h.Name += origin
				}
				st = _EXPECT_OWNER_BL
			case _DIRTTL:
				st = _EXPECT_DIRTTL_BL
			case _DIRORIGIN:
				st = _EXPECT_DIRORIGIN_BL
			case _DIRINCLUDE:
				st = _EXPECT_DIRINCLUDE_BL
			default:
				t <- Token{Error: &ParseError{f, "Error at the start", l}}
				return
			}
		case _EXPECT_DIRINCLUDE_BL:
			if l.value != _BLANK {
				t <- Token{Error: &ParseError{f, "No blank after $INCLUDE-directive", l}}
				return
			}
			st = _EXPECT_DIRINCLUDE
		case _EXPECT_DIRINCLUDE:
			if l.value != _STRING {
				t <- Token{Error: &ParseError{f, "Expecting $INCLUDE value, not this...", l}}
				return
			}
			// Start with the new file
			r1, e1 := os.Open(l.token)
			if e1 != nil {
				t <- Token{Error: &ParseError{f, "Failed to open `" + l.token + "'", l}}
				return
			}
			if include+1 > 7 {
				t <- Token{Error: &ParseError{f, "Too deeply nested $INCLUDE", l}}
				return
			}
			parseZone(r1, l.token, t, include+1)
			st = _EXPECT_OWNER_DIR
		case _EXPECT_DIRTTL_BL:
			if l.value != _BLANK {
				t <- Token{Error: &ParseError{f, "No blank after $TTL-directive", l}}
				return
			}
			st = _EXPECT_DIRTTL
		case _EXPECT_DIRTTL:
			if l.value != _STRING {
				t <- Token{Error: &ParseError{f, "Expecting $TTL value, not this...", l}}
				return
			}
			if ttl, ok := stringToTtl(l, f, t); !ok {
				return
			} else {
				defttl = ttl
			}
			st = _EXPECT_OWNER_DIR
		case _EXPECT_DIRORIGIN_BL:
			if l.value != _BLANK {
				t <- Token{Error: &ParseError{f, "No blank after $ORIGIN-directive", l}}
				return
			}
			st = _EXPECT_DIRORIGIN
		case _EXPECT_DIRORIGIN:
			if l.value != _STRING {
				t <- Token{Error: &ParseError{f, "Expecting $ORIGIN value, not this...", l}}
				return
			}
			if !IsFqdn(l.token) {
				origin = l.token + origin // Append old origin if the new one isn't a fqdn
			} else {
				origin = l.token
			}
		case _EXPECT_OWNER_BL:
			if l.value != _BLANK {
				t <- Token{Error: &ParseError{f, "No blank after owner", l}}
				return
			}
			st = _EXPECT_ANY
		case _EXPECT_ANY:
			switch l.value {
			case _RRTYPE:
				h.Rrtype, _ = Str_rr[strings.ToUpper(l.token)]
				st = _EXPECT_RDATA
			case _CLASS:
				h.Class, ok = Str_class[strings.ToUpper(l.token)]
				if !ok {
					t <- Token{Error: &ParseError{f, "Unknown class", l}}
					return
				}
				st = _EXPECT_ANY_NOCLASS_BL
			case _STRING: // TTL is this case
				if ttl, ok := stringToTtl(l, f, t); !ok {
					return
				} else {
					h.Ttl = ttl
				}
				st = _EXPECT_ANY_NOTTL_BL
			default:
				t <- Token{Error: &ParseError{f, "Expecting RR type, TTL or class, not this...", l}}
				return
			}
		case _EXPECT_ANY_NOCLASS_BL:
			if l.value != _BLANK {
				t <- Token{Error: &ParseError{f, "No blank before NOCLASS", l}}
				return
			}
			st = _EXPECT_ANY_NOCLASS
		case _EXPECT_ANY_NOTTL_BL:
			if l.value != _BLANK {
				t <- Token{Error: &ParseError{f, "No blank before NOTTL", l}}
				return
			}
			st = _EXPECT_ANY_NOTTL
		case _EXPECT_ANY_NOTTL:
			switch l.value {
			case _CLASS:
				h.Class, ok = Str_class[strings.ToUpper(l.token)]
				if !ok {
					t <- Token{Error: &ParseError{f, "Unknown class", l}}
					return
				}
				st = _EXPECT_RRTYPE_BL
			case _RRTYPE:
				h.Rrtype, _ = Str_rr[strings.ToUpper(l.token)]
				st = _EXPECT_RDATA
			}
		case _EXPECT_ANY_NOCLASS:
			switch l.value {
			case _STRING: // TTL
				if ttl, ok := stringToTtl(l, f, t); !ok {
					return
				} else {
					h.Ttl = ttl
				}
				st = _EXPECT_RRTYPE_BL
			case _RRTYPE:
				h.Rrtype, _ = Str_rr[strings.ToUpper(l.token)]
				st = _EXPECT_RDATA
			default:
				t <- Token{Error: &ParseError{f, "Expecting RR type or TTL, not this...", l}}
				return
			}
		case _EXPECT_RRTYPE_BL:
			if l.value != _BLANK {
				t <- Token{Error: &ParseError{f, "No blank after", l}}
				return
			}
			st = _EXPECT_RRTYPE
		case _EXPECT_RRTYPE:
			if l.value != _RRTYPE {
				t <- Token{Error: &ParseError{f, "Unknown RR type", l}}
				return
			}
			h.Rrtype, _ = Str_rr[strings.ToUpper(l.token)]
			st = _EXPECT_RDATA
		case _EXPECT_RDATA:
			// I could save my token here...? l
			r, e := setRR(h, c, origin, f)
			if e != nil {
				// If e.lex is nil than we have encounter a unknown RR type
				// in that case we substitute our current lex token
				if e.lex.token == "" && e.lex.value == 0 {
					e.lex = l // Uh, dirty
				}
				t <- Token{Error: e}
				return
			}
			t <- Token{RR: r}
			st = _EXPECT_OWNER_DIR
		}
	}
}
Beispiel #13
0
func main() {
	// fmt.Printf("Hey, let's start:\n")
	const playlist = `Ghost Writing Pt.2 - Tim Hecker - Haunt Me, Haunt Me Do It Again		
		Ghost Writing Pt.3 - Tim Hecker - Haunt Me, Haunt Me Do It Again`
	// Previously had \n as 1st [token?] after ` to algin,
	//  this caused the \n if stagement in loop to fire, printing 'Song: '
	var info string = "Song"
	// var and type required outside func def
	// := can be used within func defin and does not have type cast
	// fmt.Println(info)
	var s scanner.Scanner
	// scanner package wtih Scanner method
	var artist bytes.Buffer
	// initalize bytes buffer to hold artist
	s.Init(strings.NewReader(playlist))
	// init scanner with stirngs package.NewReader( string const as param)
	s.Whitespace = 1 << '\t'
	// whitespace is set as a uint64 after initalinz to a var
	// so we take 1 to the power of tab (technciallt the byte equiv)
	// Previously with the OR operator to mask out both tab and space as our whitespace
	//fmt.Println(s.Whitespace)

	var token rune
	// TODO look up runes
	// One byte symbol? only not a actual letter repersentation on print tho
	for token != scanner.EOF {
		// loop until scanner reaches End Of File token

		// defer fmt.Println(info, ": ", artist.String(), "\n byebye")
		// print the final info once the EOF is reached
		// !!NOPE printed every scanner word parse [I think]
		// OUTPUT:
		// Album :  HauntMe,HauntMeDoItAgain
		//  byebye
		// Album :  HauntMe,HauntMeDoIt
		//  byebye
		// Album :  HauntMe,HauntMeDo
		//  byebye
		// Album :  HauntMe,HauntMe

		token = s.Scan()
		// sets token to return of scanner Scan [ a rune]
		// artist buffer expands as much as needed
		// s.TokenText retuns string for most recent scanned token [from s.Scan()]
		// Possibly scanner not returning ' ' (space), all output has no spaces
		// SWITCHED s.Whitespace to only tab as whitespace to print out spaces in output

		if s.TokenText() == "-" {
			fmt.Println(info, ": ", artist.String())
			artist.Reset()
			info = nextStage(info)
			continue
		}
		//My code probably wrong, possible var init on 1st tokenText() call then  compare that value
		// Does output split line on '-'

		if s.TokenText() == "\n" {
			fmt.Println(info, ": ", artist.String(), "\n")
			artist.Reset()
			info = "Song"
			continue
		}
		// Not 2 tokens ?
		// Works though

		artist.WriteString(s.TokenText())
		// write at the end so contiue can run to remove dash
	}
	fmt.Println(info, ": ", artist.String())
	// Runs after EOF breaks loops to print final line
}
Beispiel #14
0
func (d *decoder) decode_bamd(r io.Reader) error {
	var s scanner.Scanner
	var err error
	var imgFrames []image.Image
	s.Init(r)
	s.Whitespace = 1<<'\t' | 1<<' '
	frameNames := map[string]int{}
	for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
		if strings.ToLower(s.TokenText()) == "frame" {
			center_x, center_y := 0, 0
			tok = s.Scan()
			name := strings.TrimSpace(s.TokenText())
			tok = s.Scan()
			path := strings.Trim(s.TokenText(), "\"")
			tok = s.Scan()
			if s.TokenText() != "\n" {
				neg_x := 1
				neg_y := 1
				if s.TokenText() == "-" {
					neg_x = -1
					s.Scan()
				}
				if center_x, err = strconv.Atoi(s.TokenText()); err != nil {
					return fmt.Errorf("Error converting %s to an int: %v", s.TokenText(), err)
				}
				center_x *= neg_x
				tok = s.Scan()
				if s.TokenText() == "-" {
					neg_y = -1
					s.Scan()
				}
				if center_y, err = strconv.Atoi(s.TokenText()); err != nil {
					return fmt.Errorf("Error converting %s to an int: %v", s.TokenText(), err)
				}
				center_y *= neg_y
			}
			imgFile, err := os.Open(filepath.Clean(path))
			if err != nil {
				return fmt.Errorf("Unable to open %s: %v", filepath.Clean(path), err)
			}
			img, _, err := image.Decode(imgFile)
			if err != nil {
				return fmt.Errorf("Unable to decode png %s: %v", filepath.Clean(path), err)
			}
			imgFile.Close()
			imgFrames = append(imgFrames, img)

			/*			paletted_img := image.NewPaletted(img.Bounds(), img.ColorModel().(color.Palette))
						paletted_img.Palette[0] = color.RGBA{0, 255, 0, 255}
						bounds := img.Bounds()
						for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
							for x := bounds.Min.X; x < bounds.Max.X; x++ {
								_, _, _, a := img.At(x, y).RGBA()
								if a == 0 {
									paletted_img.Set(x, y, color.RGBA{0, 255, 0, 255})
								} else {
									paletted_img.Set(x, y, img.At(x, y))
								}
							}
						}*/

			frame := BamFrame{uint16(img.Bounds().Size().X), uint16(img.Bounds().Size().Y), int16(center_x), int16(center_y), 0}

			frameNames[name] = len(d.Frames)
			d.Frames = append(d.Frames, frame)
		} else if strings.ToLower(s.TokenText()) == "sequence" {
			frames := make([]string, 0)
			sequences := make([]uint16, 0)
			for tok = s.Scan(); !(s.TokenText() == "\n" || s.TokenText() == "\r"); tok = s.Scan() {
				frame := strings.TrimSpace(s.TokenText())
				frames = append(frames, frame)
				sequences = append(sequences, uint16(frameNames[frame]))
			}
			var sequence BamSequence

			if len(frames) > 0 {
				newlut := make([]int16, len(d.FrameLUT)+len(frames))
				sequence.Start = len(d.FrameLUT)
				sequence.Count = len(frames)
				copy(newlut, d.FrameLUT)
				for i, f := range frames {
					newlut[sequence.Start+i] = int16(frameNames[f])
				}
				d.FrameLUT = newlut
			} else {
				sequence.Start = -1
				sequence.Count = 0
			}

			d.sequences = append(d.sequences, sequence)
		}

	}

	paletteImg, ok := imgFrames[0].(*image.Paletted)
	quantizeImage := false
	if ok {
		// if we dont have a ranged palette, quantize the image
		for idx, c := range bgPalette {
			if paletteImg.Palette[idx] != c {
				log.Printf("1st frame palette entry: %d does not match: %v, %v\n", idx, paletteImg.Palette[idx], c)
				quantizeImage = true
			}
		}
	} else {
		quantizeImage = true
	}
	if quantizeImage {
		log.Printf("Generating palette")
		maxHeight := 0
		width := 0
		for _, i := range imgFrames {
			if i.Bounds().Dy() > maxHeight {
				maxHeight = i.Bounds().Dy()
			}
			width += i.Bounds().Dx()
		}
		contactSheet := image.NewRGBA(image.Rect(0, 0, width, maxHeight))
		x := 0
		for _, i := range imgFrames {
			r := image.Rect(x, 0, x+i.Bounds().Dx(), i.Bounds().Dy())
			draw.Draw(contactSheet, r, i, image.Pt(0, 0), draw.Over)
			x += i.Bounds().Dx()
		}

		palette := make([]color.Color, 256)
		palette[0] = color.RGBA{0, 255, 0, 255}
		paletteImg = image.NewPaletted(image.Rect(0, 0, width, maxHeight), palette)

		mcq := MedianCutQuantizer{255}
		mcq.Quantize(paletteImg, image.Rect(0, 0, width, maxHeight), contactSheet, image.Pt(0, 0))

		log.Printf("palette size: %d", len(paletteImg.Palette))
		paletteImg.Palette[0] = color.RGBA{0, 255, 0, 255}
	}

	for _, i := range imgFrames {
		img := image.NewPaletted(i.Bounds(), paletteImg.Palette)
		draw.Draw(img, i.Bounds(), i, image.Pt(0, 0), draw.Over)

		d.image = append(d.image, *img)
	}

	d.colorMap = d.image[0].Palette

	return nil
}
Beispiel #15
0
//TODO Use YACC tool
func FromFile(nameFile string) (*Matrix, error) {
	ff, errfile := os.Open(nameFile)

	cout := make([]complex128, 0)
	if errfile != nil {
		return nil, errors.New("Error to open file: nameFile\n ")
	}

	f := bufio.NewReader(ff)
	var s scanner.Scanner
	s.Init(f)
	s.Whitespace = 0

	sign := 1.0
	state := 0
	tok := s.Scan()

	real := 0.0
	img := 0.0
	numb := 0.0

	ncolumnlast := -1
	ncolumn := 0
	nrow := 0

	for tok != scanner.EOF {

		if state == 0 {
			if s.TokenText() == "-" {
				sign = -1.0
				state = 1
			} else if s.TokenText() == "+" {
				state = 1
			} else if tok == scanner.Float || tok == scanner.Int {
				t, _ := strconv.ParseFloat(s.TokenText(), 64)
				numb = t
				state = 2
			} else if s.TokenText() == "\n" {

				if ncolumnlast != ncolumn && ncolumnlast != -1 {

					return nil, errors.New(" Malformed File, columns don't match ")
				}

				ncolumnlast = ncolumn
				ncolumn = 0

				nrow++
			}
		}

		if state == 1 {
			if tok == scanner.Float || tok == scanner.Int {
				t, _ := strconv.ParseFloat(s.TokenText(), 64)
				numb = sign * t
				state = 2
			}
		}

		if state == 2 {
			if tok == scanner.Ident && s.TokenText() == "i" {
				img = numb
				numb = 0
				sign = 1.0
				state = 3
			} else if s.TokenText() == "-" {
				sign = -1.0
				real = numb
				state = 1
			} else if s.TokenText() == "+" {
				sign = 1.0
				real = numb
				state = 1
			} else if s.TokenText() == " " || s.TokenText() == "\t" {
				if numb != 0 {
					real = numb
				}
				sign = 1.0

				cout = append(cout, complex(real, img))
				//println(real,img,"i")

				img = 0
				real = 0
				numb = 0

				ncolumn++

				state = 0
			} else if s.TokenText() == "\n" {
				if numb != 0 {
					real = numb
				}
				sign = 1.0

				cout = append(cout, complex(real, img))
				//println(real,img,"i")

				img = 0
				real = 0
				numb = 0

				ncolumn++

				state = 0

				if ncolumnlast != ncolumn && ncolumnlast != -1 {

					return nil, errors.New(" Malformed File, columns don't match ")
				}
				ncolumnlast = ncolumn
				ncolumn = 0

				nrow++
			}
		}
		if state == 3 {
			if s.TokenText() == "-" {
				sign = -1.0
				numb = 0
				state = 1
			} else if s.TokenText() == "+" {
				state = 1.0
				numb = 0
				state = 1
			} else if s.TokenText() == " " || s.TokenText() == "\t" {

				if numb != 0 {
					real = numb
				}
				sign = 1.0

				cout = append(cout, complex(real, img))
				//println(real,img,"i")

				img = 0
				real = 0
				numb = 0

				ncolumn++

				state = 0
			} else if s.TokenText() == "\n" {
				if numb != 0 {
					real = numb
				}
				sign = 1.0

				cout = append(cout, complex(real, img))
				//println(real,img,"i")

				img = 0
				real = 0
				numb = 0

				ncolumn++
				state = 0

				if ncolumnlast != ncolumn && ncolumnlast != -1 {

					return nil, errors.New(" Malformed File, columns don't match ")
				}
				ncolumnlast = ncolumn
				ncolumn = 0

				nrow++
			}
		}
		tok = s.Scan()
		if tok == scanner.EOF {
			if numb != 0 {
				real = numb
			}
			//println(real,img,"i")
			cout = append(cout, complex(real, img))

			ncolumn++
			nrow++
			state = 0

			if ncolumnlast != ncolumn && ncolumnlast != -1 {

				return nil, errors.New(" Malformed File, columns don't match ")
			}
			ncolumnlast = ncolumn

		}
	}
	out := NullMatrixP(nrow, ncolumn)
	out.A = cout
	return out, nil
}
Beispiel #16
0
func parseMarkdown(str string) (string, []*Fact, error) {
	var s scanner.Scanner
	var tok rune

	// result fields
	var facts []*Fact
	var header string

	// current parsing state
	var buf bytes.Buffer
	var fact *Fact
	inHeader := false
	inNote := false
	indentCount := 0
	whitespaceCount := 0

	debug := func(format string, a ...interface{}) {
		if debugEnabled {
			info := "(indent=%v header=%v note=%v)"
			a = append(a, indentCount, boolToString(inHeader), boolToString(inNote))
			fmt.Printf("scanner: "+format+"\t\t"+info+"\n", a...)
		}
	}

	debugPlain := func(format string, a ...interface{}) {
		if debugEnabled {
			fmt.Printf("scanner: "+format+"\n", a...)
		}
	}

	s.Init(strings.NewReader(str))

	// set mode to 0 to avoid any attempt at string reading, which may cause
	// problems when we run into apostrophes
	s.Mode = scanner.ScanFloats | scanner.ScanIdents | scanner.ScanInts
	s.Whitespace = 1 << '\r'

	for tok != scanner.EOF {
		tok = s.Scan()

		switch s.TokenText() {
		case " ":
			debug("[SPACE]")

			if inHeader || inNote {
				buf.WriteString(s.TokenText())
			} else {
				whitespaceCount += 1
				if whitespaceCount == 4 {
					indentCount += 1
					whitespaceCount = 0
				}
			}

		// reset everything and handle a completed string
		case "\n":
			debug("\\n")

			// string is likely left with leading and possibly trailing
			// whitespace
			trimmed := strings.TrimSpace(buf.String())

			if inNote && trimmed != "" {
				if indentCount == 0 {
					debugPlain("FACT (front): %v\n", trimmed)
					fact = &Fact{Front: trimmed}
					facts = append(facts, fact)
				} else {
					if fact != nil {
						debugPlain("FACT (back): %v\n", trimmed)
						fact.Back = trimmed
					}
				}
			}

			if inHeader {
				debugPlain("HEADER: %v\n", trimmed)
				header = trimmed
			}

			buf.Reset()
			inHeader = false
			inNote = false
			indentCount = 0
			whitespaceCount = 0

		case "#":
			debug("#")

			if !inHeader && !inNote {
				inHeader = true
			} else {
				buf.WriteString(s.TokenText())
			}

		case "*":
			debug("*")

			if !inHeader && !inNote {
				inNote = true
			} else {
				buf.WriteString(s.TokenText())
			}

		default:
			debug("%s", s.TokenText())

			if inHeader || inNote {
				buf.WriteString(s.TokenText())
			}

			// any non-whitespace character resets whitespace count to zero
			whitespaceCount = 0
		}
	}

	return header, facts, nil
}