//Skip skip func (t *TTFParser) Skip(fd *bytes.Reader, length int) error { _, err := fd.Seek(int64(length), 1) if err != nil { return err } return nil }
// degob converts a gob into a new copy of a subtree. func degob(buf *bytes.Reader) Node { var tree Node buf.Seek(0, 0) dec := gob.NewDecoder(buf) CkErr(dec.Decode(&tree)) return tree }
func UnmarshalStream(url string, reader io.Reader) (feed *Feed, err error) { // Read the stream into memory (we'll need to parse it twice) var contentReader *bytes.Reader var content []byte if content, err = ioutil.ReadAll(reader); err == nil { contentReader = bytes.NewReader(content) genericFeed := GenericFeed{} decoder := xml.NewDecoder(contentReader) decoder.CharsetReader = charset.NewReader // First pass - parse the feed as-is if err = decoder.Decode(&genericFeed); err != nil { // Error - check for invalid entities and correct as appropriate if fixed, fixedContent := fixEntities(content); fixed { // At least one replacement was made. Retry contentReader = bytes.NewReader(fixedContent) decoder = xml.NewDecoder(contentReader) decoder.CharsetReader = charset.NewReader // Try decoding again err = decoder.Decode(&genericFeed) } } if err != nil { return } var xmlFeed FeedMarshaler if genericFeed.XMLName.Space == "http://www.w3.org/1999/02/22-rdf-syntax-ns#" && genericFeed.XMLName.Local == "RDF" { xmlFeed = &rss1Feed{} } else if genericFeed.XMLName.Local == "rss" { xmlFeed = &rss2Feed{} } else if genericFeed.XMLName.Space == "http://www.w3.org/2005/Atom" && genericFeed.XMLName.Local == "feed" { xmlFeed = &atomFeed{} } else { err = errors.New("Unsupported type of feed (" + genericFeed.XMLName.Space + ":" + genericFeed.XMLName.Local + ")") return } contentReader.Seek(0, 0) decoder = xml.NewDecoder(contentReader) decoder.CharsetReader = charset.NewReader if err = decoder.Decode(xmlFeed); err == nil { if feed, err = xmlFeed.Marshal(); err == nil { feed.URL = url } } } return }
func copyResp(code int, headers http.Header, body *bytes.Reader, newRw http.ResponseWriter) { h := newRw.Header() for k, v := range headers { h[k] = append(h[k], v...) } newRw.WriteHeader(code) body.WriteTo(newRw) body.Seek(0, 0) }
//Seek seek by tag func (t *TTFParser) Seek(fd *bytes.Reader, tag string) error { table, ok := t.tables[tag] if !ok { return ErrTableNotFound } val := table.Offset _, err := fd.Seek(int64(val), 0) if err != nil { return err } return nil }
func processQueries( db *cablastp.DB, transQueries *bytes.Reader, searchBuf *bytes.Buffer) error { // now we will read from queryBuf! // I think we create a NewReader from queryBuf? // this now needs to become the replacement for inputFastaQuery // so must use a different buffer for that. // we need a buffer for the query trans/reduce // and a buffer for coarse blast results cablastp.Vprintln("\nBlasting query on coarse database...") err := blastCoarse(db, transQueries, searchBuf) handleFatalError("Error blasting coarse database", err) cablastp.Vprintln("Decompressing blast hits...") expandedSequences, err := expandBlastHits(db, searchBuf) handleFatalError("Error decompressing blast hits", err) if len(expandedSequences) == 0 { cablastp.Vprintln("No results from coarse search") } else { // Write the contents of the expanded sequences to a fasta file. // It is then indexed using makeblastdb. searchBuf.Reset() err = writeFasta(expandedSequences, searchBuf) handleFatalError("Could not create FASTA input from coarse hits", err) // Create the fine blast db in a temporary directory cablastp.Vprintln("Building fine BLAST database...") tmpDir, err := makeFineBlastDB(db, searchBuf) handleFatalError("Could not create fine database to search on", err) // retrieve the cluster members for the original representative query seq // pass them to blastx on the expanded (fine) db // Finally, run the query against the fine fasta database and pass on the // stdout and stderr... cablastp.Vprintln("Blasting query on fine database...") _, err = transQueries.Seek(0, 0) // First 0 is amount to offset, Second 0 // is code for absolute handleFatalError("Could not seek to start of query fasta input", err) err = blastFine(db, tmpDir, transQueries) handleFatalError("Error blasting fine database", err) // Delete the temporary fine database. if !flagNoCleanup { err := os.RemoveAll(tmpDir) handleFatalError("Could not delete fine BLAST database", err) } } return nil }
func UnmarshalStream(reader io.Reader) (*Feed, string, error) { format := "" // Read the stream into memory (we'll need to parse it twice) var contentReader *bytes.Reader if buffer, err := ioutil.ReadAll(reader); err == nil { contentReader = bytes.NewReader(buffer) } else { return nil, format, err } genericFeed := GenericFeed{} decoder := xml.NewDecoder(contentReader) decoder.CharsetReader = charsetReader if err := decoder.Decode(&genericFeed); err != nil { return nil, format, err } var xmlFeed FeedMarshaler if genericFeed.XMLName.Space == "http://www.w3.org/1999/02/22-rdf-syntax-ns#" && genericFeed.XMLName.Local == "RDF" { xmlFeed = &rss1Feed{} format = "RSS1" } else if genericFeed.XMLName.Local == "rss" { xmlFeed = &rss2Feed{} format = "RSS2" } else if genericFeed.XMLName.Space == "http://www.w3.org/2005/Atom" && genericFeed.XMLName.Local == "feed" { xmlFeed = &atomFeed{} format = "Atom" } else { return nil, format, errors.New("Unsupported type of feed (" + genericFeed.XMLName.Space + ":" + genericFeed.XMLName.Local + ")") } contentReader.Seek(0, 0) decoder = xml.NewDecoder(contentReader) decoder.CharsetReader = charsetReader if err := decoder.Decode(xmlFeed); err != nil { return nil, format, err } feed, err := xmlFeed.Marshal() if err != nil { return nil, format, err } return &feed, format, nil }
func testZipArc(f *bytes.Reader) { za, err := New(&reader{f}, f.Size()) if err != nil { panic(err) } compareFile(za, "a.txt", "This is a file.\n") compareFile(za, "k/l/m/a.txt", "k-l-m-a!\n") compareFile(za, "z.txt", "This is also a file.\n") d, err := za.Open("a") if err != nil { panic(err) } dfi, err := d.Readdir(0) if err != nil { panic(err) } h := sha256.New() for _, x := range dfi { s := fmt.Sprintf("%v\t%v\t%v\t%v\n", x.Name(), x.IsDir(), x.Mode(), x.Size()) h.Write([]byte(s)) } hs := hex.EncodeToString(h.Sum(nil)) if hs != "93d5c5b0a7a5205b5ad687a8726dbec1a355a3937151ca96f7e704675ae1e536" { panic("hash mismatch: " + hs) } }
// HandleLog is the default http log handler func HandleLog(h HTTP, entries <-chan *log.Entry) { var e *log.Entry var b []byte var reader *bytes.Reader formatter := h.FormatFunc()(h) remoteHost := h.RemoteHost() httpClient := stdhttp.Client{} req, _ := stdhttp.NewRequest(h.Method(), remoteHost, nil) req.Header = h.Headers() for e = range entries { b = formatter(e) reader = bytes.NewReader(b) req.Body = ioutil.NopCloser(reader) req.ContentLength = int64(reader.Len()) resp, err := httpClient.Do(req) if err != nil { log.Error("Could not post data to %s: %v\n", remoteHost, err) goto END } if resp.StatusCode < 200 || resp.StatusCode >= 299 { bt, _ := ioutil.ReadAll(resp.Body) log.Error("Received HTTP %d during POST request to %s body: %s\n", resp.StatusCode, remoteHost, string(bt)) } END: e.Consumed() } }
func _readLong3(file *bytes.Reader, offset int64) int64 { buf := make([]byte, 4) file.ReadAt(buf, offset) buf[3] = 0x00 return int64(binary.LittleEndian.Uint32(buf)) }
func readRune(r *bytes.Reader) rune { n, s, err := r.ReadRune() if n == _EOF_ || s == 0 || err != nil { return _EOF_ } return n }
func installZip(source *bytes.Reader, dest string) error { zr, err := zip.NewReader(source, int64(source.Len())) if err != nil { return err } for _, f := range zr.File { fileCopy, err := os.OpenFile(dest, installFlag, f.Mode()) if err != nil { return err } defer fileCopy.Close() rc, err := f.Open() if err != nil { return err } defer rc.Close() _, err = io.Copy(fileCopy, rc) if err != nil { return err } } return nil }
func (k *KeenIoMetrics) AuthedRequest(method, path string, body *bytes.Reader) (resp *http.Response, err error) { path = fmt.Sprintf("https://api.keen.io/3.0/projects/%s%s", k.ProjectToken, path) req, err := http.NewRequest(method, path, body) if err != nil { return } req.Header.Add("Authorization", k.ApiKey) if body != nil { req.Header.Add("Content-Type", "application/json") req.ContentLength = int64(body.Len()) } resp, err = k.HttpClient.Do(req) if err != nil { k.Error("Failed to send metric event to keen.io %v", err) } else { defer resp.Body.Close() if resp.StatusCode != 201 { bytes, _ := ioutil.ReadAll(resp.Body) k.Error("Got %v response from keen.io: %s", resp.StatusCode, bytes) } } return }
func (this *HSPerfData) readEntryValueAsLong(reader *bytes.Reader, StartOfs int64, entry *PerfDataEntry) error { reader.Seek(StartOfs+int64(entry.DataOffset), os.SEEK_SET) reader.Read(this.globalbuf[:8]) entry.LongValue = int64(this.byteOrder.Uint64(this.globalbuf[:8])) return nil }
func readBytes(f *bytes.Reader, len int) ([]byte, error) { b := make([]byte, len) _, err := f.Read(b) if err != nil { return nil, err } return b, nil }
func ReadMac(reader *bytes.Reader) uint64 { mac_byte := make([]byte, 6) reader.Read(mac_byte) mac := []byte{0, 0} mac = append(mac, mac_byte...) return binary.BigEndian.Uint64(mac) }
func UnpackString(in *bytes.Reader, n uint16) (Text, error) { buf := make([]byte, n) _, e := in.Read(buf) if e != nil { return "", e } return Text(string(buf)), nil }
func UnpackBytes(in *bytes.Reader, n uint16) (Bytes, error) { ret := make([]byte, n) if _, e := in.Read([]byte(ret)); e != nil { return nil, e } return Bytes(ret), nil }
func (section_header *Section_Header) Verify(datar *bytes.Reader) bool { var buf []byte datar.Read(buf) fmt.Println(section_header.Checksum, len(buf)) return section_header.Checksum == adler32.Checksum(buf[:72]) }
func (sitemap *SitemapPage) parseSitemapPageNoGzip(mem_seek *bytes.Reader) (byte_reader *bytes.Reader, err error) { // validate uncompressed size if mem_seek.Len() > max_sitemap_page_size { return mem_seek, max_sitemap_page_size_error } err = sitemap.determineIteratorFormat(mem_seek) return mem_seek, err }
func render_line(line *bytes.Reader, h int) { m_x, _ := termbox.Size() for x := 0; x <= m_x; x++ { if r, _, err := line.ReadRune(); err == nil { termbox.SetCell(x, h, r, termbox.ColorBlack, termbox.ColorWhite) } else { termbox.SetCell(x, h, ' ', termbox.ColorBlack, termbox.ColorWhite) } } }
func (self *Question) unpackFlags(in *bytes.Reader) error { buf := make([]byte, 4) if _, e := in.Read(buf); e != nil { return e } self.Type = enc.Uint16(buf[0:2]) self.Class = enc.Uint16(buf[2:4]) return nil }
func ReadString(r *bytes.Reader) string { length, _ := binary.ReadUvarint(r) if length < 1 { return "" } else { buf := make([]byte, length) r.Read(buf) return string(buf) } }
func (self *RR) unpackFlags(in *bytes.Reader) error { var buf [8]byte if _, e := in.Read(buf[:]); e != nil { return e } self.Type = enc.Uint16(buf[0:2]) self.Class = enc.Uint16(buf[2:4]) self.TTL = enc.Uint32(buf[4:8]) return nil }
func UnpackIPv6(in *bytes.Reader, n uint16) (IPv6, error) { if n != 16 { return nil, fmt.Errorf("IPv6 with %d bytes", n) } buf := make([]byte, 16) _, e := in.Read(buf) if e != nil { return nil, e } return IPv6(buf), nil }
//Read read func (t *TTFParser) Read(fd *bytes.Reader, length int) ([]byte, error) { buff := make([]byte, length) readlength, err := fd.Read(buff) if err != nil { return nil, err } if readlength != length { return nil, errors.New("file out of length") } //fmt.Printf("%d,%s\n", readlength, string(buff)) return buff, nil }
func (self *Entity) Store( storageObj *data.Storage, objectObj *data.Object, input *bytes.Reader, ) error { ctx := self.Ctx() closer := ctx.LogMark("[Entity.Store]") defer closer() uri := strings.Join([]string{storageObj.Uri, objectObj.InternalName}, "/") cl := input.Len() ctx.Debugf("Going to store %d bytes in %s", cl, uri) req, err := http.NewRequest("PUT", uri, input) if err != nil { ctx.Debugf("Failed to create request: %s", err) return err } // XXX Need to check if this vanilla http client is ok client := &http.Client{} resp, err := client.Do(req) if err != nil { ctx.Debugf("Failed to send PUT request to %s (storage = %d): %s", uri, storageObj.Id, err) return err } if resp.StatusCode != 201 { err = errors.New( fmt.Sprintf( "Expected response 201 for PUT request, but did not get it: %s", resp.Status, ), ) ctx.Debugf("Failed to store PUT request to %s (storage = %d): %s", uri, storageObj.Id, err) return err } ctx.Debugf("Successfully stored object in %s", uri) err = self.Create( objectObj.Id, storageObj.Id, ) if err != nil { return err } return nil }
func UnpackIPv4(in *bytes.Reader, n uint16) (IPv4, error) { if n != 4 { return nil, fmt.Errorf("IPv4 with %d bytes", n) } buf := make([]byte, 4) _, e := in.Read(buf) if e != nil { return nil, e } return IPv4(net.IPv4(buf[0], buf[1], buf[2], buf[3])), nil }
func (p *Pattern) parseTracks(r *bytes.Reader) error { for r.Len() > 0 { t := &Track{} err := t.parse(r) if err != nil { fmt.Println("parse Track failed:", err) return err } p.Tracks = append(p.Tracks, *t) } return nil }
func (h *HTTPSender) buildBaseRequest(contextPath string, method string, headers map[string][]string, bodyReader *bytes.Reader) *http.Request { var req http.Request req.Method = h.method req.ProtoMajor = 1 req.ProtoMinor = 1 req.Close = false req.Header = h.headers req.URL = h.parsedContextPath req.URL.Host = h.hosts[h.currentHost] req.Body = ioutil.NopCloser(bodyReader) req.ContentLength = int64(bodyReader.Len()) return &req }