Ejemplo n.º 1
0
// get a single file from the s3 bucket
func (remote *S3Remote) getFile(dst string, key *keyDef) error {
	fmt.Printf("pulling key %s (%s)\n", key.key, utils.HumanSize(key.s3Key.Size))

	srcKey := remote.remoteKey(key.key)

	from, err := remote.getBucket().GetReader(srcKey)
	if err != nil {
		return err
	}
	defer from.Close()
	bufFrom := bufio.NewReader(from)

	if err := os.MkdirAll(filepath.Dir(dst), 0700); err != nil {
		return err
	}

	to, err := os.Create(dst)
	if err != nil {
		return err
	}

	// TODO add progress reader
	progressReaderFrom := utils.NewProgressReader(bufFrom, key.s3Key.Size, os.Stdout)

	_, err = io.Copy(to, progressReaderFrom)
	if err != nil {
		return err
	}

	// TODO validate against sum

	return nil
}
Ejemplo n.º 2
0
// put a file with key from imageRoot to the s3 bucket
func (remote *S3Remote) putFile(src string, key *keyDef) error {
	dstKey := remote.remoteKey(key.key)

	f, err := os.Open(src)
	if err != nil {
		return err
	}
	defer f.Close()

	finfo, err := f.Stat()
	if err != nil {
		return err
	}

	progressReader := utils.NewProgressReader(f, finfo.Size(), os.Stdout)

	// XXX We don't know how big the file will be ahead of time!
	//compressorReader,err := remote.compressor.CompressReader(progressReader)
	//if err != nil {
	//return err
	//}

	err = remote.getBucket().PutReader(dstKey, progressReader, finfo.Size(), "application/octet-stream", s3.Private)
	if err != nil {
		return err
	}

	return remote.getBucket().Put(dstKey+".sum", []byte(key.Sum()), "text/plain", s3.Private)
}