Beispiel #1
0
func copyToTemp(reader io.Reader, fileSize int64, cb progress.CopyCallback) (oid string, size int64, tmp *os.File, err error) {
	tmp, err = TempFile("")
	if err != nil {
		return
	}

	defer tmp.Close()

	oidHash := sha256.New()
	writer := io.MultiWriter(oidHash, tmp)

	if fileSize == 0 {
		cb = nil
	}

	by, ptr, err := DecodeFrom(reader)
	if err == nil && len(by) < 512 {
		err = errutil.NewCleanPointerError(err, ptr, by)
		return
	}

	multi := io.MultiReader(bytes.NewReader(by), reader)
	size, err = tools.CopyWithCallback(writer, multi, fileSize, cb)

	if err != nil {
		return
	}

	oid = hex.EncodeToString(oidHash.Sum(nil))
	return
}
func performDownload(oid string, size int64, a *action, writer, errWriter *bufio.Writer) {
	// We just use the URLs we're given, so we're just a proxy for the direct method
	// but this is enough to test intermediate custom adapters
	req, err := httputil.NewHttpRequest("GET", a.Href, a.Header)
	if err != nil {
		sendTransferError(oid, 2, err.Error(), writer, errWriter)
		return
	}
	res, err := httputil.DoHttpRequest(cfg, req, true)
	if err != nil {
		sendTransferError(oid, res.StatusCode, err.Error(), writer, errWriter)
		return
	}
	defer res.Body.Close()

	dlFile, err := ioutil.TempFile("", "lfscustomdl")
	if err != nil {
		sendTransferError(oid, 3, err.Error(), writer, errWriter)
		return
	}
	defer dlFile.Close()
	dlfilename := dlFile.Name()
	// Turn callback into progress messages
	cb := func(totalSize int64, readSoFar int64, readSinceLast int) error {
		sendProgress(oid, readSoFar, readSinceLast, writer, errWriter)
		return nil
	}
	_, err = tools.CopyWithCallback(dlFile, res.Body, res.ContentLength, cb)
	if err != nil {
		sendTransferError(oid, 4, fmt.Sprintf("cannot write data to tempfile %q: %v", dlfilename, err), writer, errWriter)
		os.Remove(dlfilename)
		return
	}
	if err := dlFile.Close(); err != nil {
		sendTransferError(oid, 5, fmt.Sprintf("can't close tempfile %q: %v", dlfilename, err), writer, errWriter)
		os.Remove(dlfilename)
		return
	}

	// completed
	complete := &transferResponse{"complete", oid, dlfilename, nil}
	err = sendResponse(complete, writer, errWriter)
	if err != nil {
		writeToStderr(fmt.Sprintf("Unable to send completion message: %v\n", err), errWriter)
	}
}
// download starts or resumes and download. Always closes dlFile if non-nil
func (a *basicDownloadAdapter) download(t *Transfer, cb TransferProgressCallback, authOkFunc func(), dlFile *os.File, fromByte int64, hash hash.Hash) error {

	if dlFile != nil {
		// ensure we always close dlFile. Note that this does not conflict with the
		// early close below, as close is idempotent.
		defer dlFile.Close()
	}

	rel, ok := t.Object.Rel("download")
	if !ok {
		return errors.New("Object not found on the server.")
	}

	req, err := httputil.NewHttpRequest("GET", rel.Href, rel.Header)
	if err != nil {
		return err
	}

	if fromByte > 0 {
		if dlFile == nil || hash == nil {
			return fmt.Errorf("Cannot restart %v from %d without a file & hash", t.Object.Oid, fromByte)
		}
		// We could just use a start byte, but since we know the length be specific
		req.Header.Set("Range", fmt.Sprintf("bytes=%d-%d", fromByte, t.Object.Size-1))
	}

	res, err := httputil.DoHttpRequest(req, true)
	if err != nil {
		// Special-case status code 416 () - fall back
		if fromByte > 0 && dlFile != nil && res.StatusCode == 416 {
			tracerx.Printf("xfer: server rejected resume download request for %q from byte %d; re-downloading from start", t.Object.Oid, fromByte)
			dlFile.Close()
			os.Remove(dlFile.Name())
			return a.download(t, cb, authOkFunc, nil, 0, nil)
		}
		return errutil.NewRetriableError(err)
	}
	httputil.LogTransfer("lfs.data.download", res)
	defer res.Body.Close()

	// Range request must return 206 & content range to confirm
	if fromByte > 0 {
		rangeRequestOk := false
		var failReason string
		// check 206 and Content-Range, fall back if either not as expected
		if res.StatusCode == 206 {
			// Probably a successful range request, check Content-Range
			if rangeHdr := res.Header.Get("Content-Range"); rangeHdr != "" {
				regex := regexp.MustCompile(`bytes (\d+)\-.*`)
				match := regex.FindStringSubmatch(rangeHdr)
				if match != nil && len(match) > 1 {
					contentStart, _ := strconv.ParseInt(match[1], 10, 64)
					if contentStart == fromByte {
						rangeRequestOk = true
					} else {
						failReason = fmt.Sprintf("Content-Range start byte incorrect: %s expected %d", match[1], fromByte)
					}
				} else {
					failReason = fmt.Sprintf("badly formatted Content-Range header: %q", rangeHdr)
				}
			} else {
				failReason = "missing Content-Range header in response"
			}
		} else {
			failReason = fmt.Sprintf("expected status code 206, received %d", res.StatusCode)
		}
		if rangeRequestOk {
			tracerx.Printf("xfer: server accepted resume download request: %q from byte %d", t.Object.Oid, fromByte)
			// Advance progress callback; must split into max int sizes though
			if cb != nil {
				const maxInt = int(^uint(0) >> 1)
				for read := int64(0); read < fromByte; {
					remainder := fromByte - read
					if remainder > int64(maxInt) {
						read += int64(maxInt)
						cb(t.Name, t.Object.Size, read, maxInt)
					} else {
						read += remainder
						cb(t.Name, t.Object.Size, read, int(remainder))
					}

				}
			}
		} else {
			// Abort resume, perform regular download
			tracerx.Printf("xfer: failed to resume download for %q from byte %d: %s. Re-downloading from start", t.Object.Oid, fromByte, failReason)
			dlFile.Close()
			os.Remove(dlFile.Name())
			if res.StatusCode == 200 {
				// If status code was 200 then server just ignored Range header and
				// sent everything. Don't re-request, use this one from byte 0
				dlFile = nil
				fromByte = 0
				hash = nil
			} else {
				// re-request needed
				return a.download(t, cb, authOkFunc, nil, 0, nil)
			}
		}
	}

	// Signal auth OK on success response, before starting download to free up
	// other workers immediately
	if authOkFunc != nil {
		authOkFunc()
	}

	var hasher *tools.HashingReader
	if fromByte > 0 && hash != nil {
		// pre-load hashing reader with previous content
		hasher = tools.NewHashingReaderPreloadHash(res.Body, hash)
	} else {
		hasher = tools.NewHashingReader(res.Body)
	}

	if dlFile == nil {
		// New file start
		dlFile, err = os.OpenFile(a.downloadFilename(t), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
		if err != nil {
			return err
		}
		defer dlFile.Close()
	}
	dlfilename := dlFile.Name()
	// Wrap callback to give name context
	ccb := func(totalSize int64, readSoFar int64, readSinceLast int) error {
		if cb != nil {
			return cb(t.Name, totalSize, readSoFar+fromByte, readSinceLast)
		}
		return nil
	}
	written, err := tools.CopyWithCallback(dlFile, hasher, res.ContentLength, ccb)
	if err != nil {
		return fmt.Errorf("cannot write data to tempfile %q: %v", dlfilename, err)
	}
	if err := dlFile.Close(); err != nil {
		return fmt.Errorf("can't close tempfile %q: %v", dlfilename, err)
	}

	if actual := hasher.Hash(); actual != t.Object.Oid {
		return fmt.Errorf("Expected OID %s, got %s after %d bytes written", t.Object.Oid, actual, written)
	}

	return tools.RenameFileCopyPermissions(dlfilename, t.Path)

}
Beispiel #4
0
func readLocalFile(writer io.Writer, ptr *Pointer, mediafile string, workingfile string, cb progress.CopyCallback) error {
	reader, err := os.Open(mediafile)
	if err != nil {
		return errors.Wrapf(err, "Error opening media file.")
	}
	defer reader.Close()

	if ptr.Size == 0 {
		if stat, _ := os.Stat(mediafile); stat != nil {
			ptr.Size = stat.Size()
		}
	}

	if len(ptr.Extensions) > 0 {
		registeredExts := config.Config.Extensions()
		extensions := make(map[string]config.Extension)
		for _, ptrExt := range ptr.Extensions {
			ext, ok := registeredExts[ptrExt.Name]
			if !ok {
				err := fmt.Errorf("Extension '%s' is not configured.", ptrExt.Name)
				return errors.Wrap(err, "smudge")
			}
			ext.Priority = ptrExt.Priority
			extensions[ext.Name] = ext
		}
		exts, err := config.SortExtensions(extensions)
		if err != nil {
			return errors.Wrap(err, "smudge")
		}

		// pipe extensions in reverse order
		var extsR []config.Extension
		for i := range exts {
			ext := exts[len(exts)-1-i]
			extsR = append(extsR, ext)
		}

		request := &pipeRequest{"smudge", reader, workingfile, extsR}

		response, err := pipeExtensions(request)
		if err != nil {
			return errors.Wrap(err, "smudge")
		}

		actualExts := make(map[string]*pipeExtResult)
		for _, result := range response.results {
			actualExts[result.name] = result
		}

		// verify name, order, and oids
		oid := response.results[0].oidIn
		if ptr.Oid != oid {
			err = fmt.Errorf("Actual oid %s during smudge does not match expected %s", oid, ptr.Oid)
			return errors.Wrap(err, "smudge")
		}

		for _, expected := range ptr.Extensions {
			actual := actualExts[expected.Name]
			if actual.name != expected.Name {
				err = fmt.Errorf("Actual extension name '%s' does not match expected '%s'", actual.name, expected.Name)
				return errors.Wrap(err, "smudge")
			}
			if actual.oidOut != expected.Oid {
				err = fmt.Errorf("Actual oid %s for extension '%s' does not match expected %s", actual.oidOut, expected.Name, expected.Oid)
				return errors.Wrap(err, "smudge")
			}
		}

		// setup reader
		reader, err = os.Open(response.file.Name())
		if err != nil {
			return errors.Wrapf(err, "Error opening smudged file: %s", err)
		}
		defer reader.Close()
	}

	_, err = tools.CopyWithCallback(writer, reader, ptr.Size, cb)
	if err != nil {
		return errors.Wrapf(err, "Error reading from media file: %s", err)
	}

	return nil
}