// ReadRawRecord reads a raw record from the provided file. // // On error, err will no non-nil. Expected error values are io.EOF // when the end of the file has been reached or io.ErrUnexpectedEOF if // a complete record was unable to be read. // // In the case of io.ErrUnexpectedEOF the file offset will be reset // back to where it was upon entering this function so it is ready to // be read from again if it is expected more data will be written to // the file. func ReadRawRecord(file io.ReadWriteSeeker) (*RawRecord, error) { var header RawHeader /* Get the current offset so we can seek back to it. */ offset, _ := file.Seek(0, 1) /* Now read in the header. */ err := binary.Read(file, binary.BigEndian, &header) if err != nil { file.Seek(offset, 0) return nil, err } /* Create a buffer to hold the raw record data and read the /* record data into it */ data := make([]byte, header.Len) n, err := file.Read(data) if err != nil { file.Seek(offset, 0) return nil, err } if uint32(n) != header.Len { file.Seek(offset, 0) return nil, io.ErrUnexpectedEOF } return &RawRecord{header.Type, data}, nil }
// hashCopyN - Calculates Md5sum and SHA256sum for upto partSize amount of bytes. func (c Client) hashCopyN(writer io.ReadWriteSeeker, reader io.Reader, partSize int64) (md5Sum, sha256Sum []byte, size int64, err error) { // MD5 and SHA256 hasher. var hashMD5, hashSHA256 hash.Hash // MD5 and SHA256 hasher. hashMD5 = md5.New() hashWriter := io.MultiWriter(writer, hashMD5) if c.signature.isV4() { hashSHA256 = sha256.New() hashWriter = io.MultiWriter(writer, hashMD5, hashSHA256) } // Copies to input at writer. size, err = io.CopyN(hashWriter, reader, partSize) if err != nil { // If not EOF return error right here. if err != io.EOF { return nil, nil, 0, err } } // Seek back to beginning of input, any error fail right here. if _, err := writer.Seek(0, 0); err != nil { return nil, nil, 0, err } // Finalize md5shum and sha256 sum. md5Sum = hashMD5.Sum(nil) if c.signature.isV4() { sha256Sum = hashSHA256.Sum(nil) } return md5Sum, sha256Sum, size, err }
func Compress(infile, outfile io.ReadWriteSeeker, passwd string, cb Callback) (err error) { waveHdr := WaveHeader{} var dataSize uint32 if dataSize, err = waveHdr.Read(infile); err != nil { err = errRead return } else if dataSize >= 0x7FFFFFFF { err = fmt.Errorf("incorrect data size info in wav file: %x", dataSize) return } if (waveHdr.chunkId != riffSign) || (waveHdr.format != waveSign) || (waveHdr.numChannels == 0) || (waveHdr.numChannels > maxNCH) || (waveHdr.bitsPerSample == 0) || (waveHdr.bitsPerSample > maxBPS) { err = errFormat return } encoder := NewEncoder(outfile) smpSize := uint32(waveHdr.numChannels * ((waveHdr.bitsPerSample + 7) / 8)) info := Info{ nch: uint32(waveHdr.numChannels), bps: uint32(waveHdr.bitsPerSample), sps: waveHdr.sampleRate, format: formatSimple, samples: dataSize / smpSize, } if len(passwd) > 0 { encoder.SetPassword(passwd) info.format = formatEncrypted } bufSize := pcmBufferLength * smpSize buffer := make([]byte, bufSize) if err = encoder.SetInfo(&info, 0); err != nil { return } var readLen int for dataSize > 0 { if bufSize >= dataSize { bufSize = dataSize } if readLen, err = infile.Read(buffer[:bufSize]); err != nil || readLen != int(bufSize) { err = errRead return } encoder.ProcessStream(buffer[:bufSize], cb) dataSize -= bufSize } encoder.Close() return }
func (geneMap GeneMap) Save(file io.ReadWriteSeeker) (err os.Error) { //go to the begging of the file file.Seek(0, 0) enc := gob.NewEncoder(file) err = enc.Encode(lens{len(geneMap)}) for key, item := range geneMap { enc.Encode(keyVal{key, item}) if err != nil { panic(err.String()) } } return }
func newIndex(rw io.ReadWriteSeeker) (*storeindex, error) { off, err := rw.Seek(0, os.SEEK_END) if err != nil { return nil, errwrap.Err(err, "cannot seek") } if off%8 != 0 { return nil, fmt.Errorf("corrupted index store: %d", off) } x := &storeindex{ count: uint64(off / 8), rw: rw, } return x, nil }
func AppendAdder(genome io.ReadWriteSeeker, gene genome.Gene, geneMap genome.GeneMap) (pos int64, err os.Error) { //go to the end of the file pos, err = genome.Seek(0, 2) length, err := genome.Write(gene) if err != nil { panic(err.String()) } genome.Seek(pos, 0) test := make([]byte, length) genome.Read(test) if !bytes.Equal(gene.GetData(), test) { panic("apperder broken") } return }
func Decompress(infile, outfile io.ReadWriteSeeker, passwd string, cb Callback) (err error) { decoder := NewDecoder(infile) if len(passwd) > 0 { decoder.SetPassword(passwd) } info := Info{} if err = decoder.GetInfo(&info, 0); err != nil { return } smpSize := info.nch * ((info.bps + 7) / 8) dataSize := info.samples * smpSize waveHdr := WaveHeader{ chunkId: riffSign, chunkSize: dataSize + 36, format: waveSign, subchunkId: fmtSign, subchunkSize: 16, audioFormat: 1, numChannels: uint16(info.nch), sampleRate: info.sps, bitsPerSample: uint16(info.bps), byteRate: info.sps * smpSize, blockAlign: uint16(smpSize), } if err = waveHdr.Write(outfile, dataSize); err != nil { return } bufSize := pcmBufferLength * smpSize buffer := make([]byte, bufSize) var writeLen int for { if writeLen = int(uint32(decoder.ProcessStream(buffer, cb)) * smpSize); writeLen == 0 { break } buf := buffer[:writeLen] if writeLen, err = outfile.Write(buf); err != nil { return } else if writeLen != len(buf) { err = errPartialWritten return } } return }
func insertSample(cfg *BuildConfig, header *OctreeHeader, readWriter io.ReadWriteSeeker, sample Sample, bounds Box, voxelRes int) error { var node accNode for { if err := binary.Read(readWriter, binary.BigEndian, &node); err != nil { return err } if _, err := readWriter.Seek(int64(-mipR64G64B64A64S64UnpackUI32.NodeSize()), 1); err != nil { return err } color := sample.Color() node.Color[0] += uint64(color.R * 255) node.Color[1] += uint64(color.G * 255) node.Color[2] += uint64(color.B * 255) node.Color[3] += uint64(color.A * 255) node.Color[4]++ if err := binary.Write(readWriter, binary.BigEndian, node.Color); err != nil { return err } if voxelRes == 1 { header.NumLeafs++ return nil } var ( childBounds Box newVoxelRes = voxelRes ) for i, child := range node.Children { childBounds.Size = bounds.Size * 0.5 childOffset := childPositions[i].scale(childBounds.Size) childBounds.Pos = bounds.Pos.add(&childOffset) if childBounds.Intersect(sample.Position()) == true { if child == 0 { currentPos, err := readWriter.Seek(0, 1) if err != nil { return err } newPos, err := readWriter.Seek(0, 2) if err != nil { return err } if _, err = readWriter.Seek(currentPos, 0); err != nil { return err } node.Children[i] = uint32((newPos - int64(header.Size())) / int64(mipR64G64B64A64S64UnpackUI32.NodeSize())) if err := binary.Write(readWriter, binary.BigEndian, node.Children); err != nil { return err } if _, err = readWriter.Seek(newPos, 0); err != nil { return err } header.NumNodes++ var newNode accNode if err := binary.Write(readWriter, binary.BigEndian, newNode); err != nil { return err } if _, err := readWriter.Seek(int64(-mipR64G64B64A64S64UnpackUI32.NodeSize()), 1); err != nil { return err } } else { if _, err := readWriter.Seek(int64(int(child)*mipR64G64B64A64S64UnpackUI32.NodeSize()+header.Size()), 0); err != nil { return err } } newVoxelRes = voxelRes / 2 break } } if newVoxelRes == voxelRes { return nil } else { bounds = childBounds voxelRes = newVoxelRes } } }