// get a single file from the s3 bucket func (remote *S3Remote) getFile(dst string, key *keyDef) error { log.Printf("Pulling key %s (%s)\n", key.key, utils.HumanSize(key.s3Key.Size)) from, _, err := remote.getUploadDownloadBucket().GetReader(key.key, nil) if err != nil { return err } defer from.Close() if err := os.MkdirAll(filepath.Dir(dst), 0700); err != nil { return err } to, err := os.Create(dst) if err != nil { return err } progressReader := utils.NewProgressReader(from, key.s3Key.Size, key.key) _, err = io.Copy(to, progressReader) if err != nil { return err } return nil }
func (cli *DogestryCli) createFileFromTar(root string, header *tar.Header, tarball io.Reader) error { // only handle files (directories are implicit) if header.Typeflag == tar.TypeReg { fmt.Printf(" tar: extracting file: %s\n", header.Name) // special case - repositories file if filepath.Base(header.Name) == "repositories" { if err := createRepositoriesJsonFile(root, tarball); err != nil { return err } } else { barename := strings.TrimPrefix(header.Name, "./") dest := filepath.Join(root, "images", barename) if err := os.MkdirAll(filepath.Dir(dest), os.ModeDir|0700); err != nil { return err } destFile, err := os.Create(dest) if err != nil { return err } if wrote, err := io.Copy(destFile, tarball); err != nil { return err } else { fmt.Printf(" tar: file created. Size: %s\n", utils.HumanSize(wrote)) } destFile.Close() } } return nil }