// Detect turns a source string into another source string if it is // detected to be of a known pattern. // // The third parameter should be the list of detectors to use in the // order to try them. If you don't want to configure this, just use // the global Detectors variable. // // This is safe to be called with an already valid source string: Detect // will just return it. func Detect(src string, pwd string, ds []Detector) (string, error) { getForce, getSrc := getForcedGetter(src) // Separate out the subdir if there is one, we don't pass that to detect getSrc, subDir := SourceDirSubdir(getSrc) u, err := url.Parse(getSrc) if err == nil && u.Scheme != "" { // Valid URL return src, nil } for _, d := range ds { result, ok, err := d.Detect(getSrc, pwd) if err != nil { return "", err } if !ok { continue } var detectForce string detectForce, result = getForcedGetter(result) result, detectSubdir := SourceDirSubdir(result) // If we have a subdir from the detection, then prepend it to our // requested subdir. if detectSubdir != "" { if subDir != "" { subDir = filepath.Join(detectSubdir, subDir) } else { subDir = detectSubdir } } if subDir != "" { u, err := url.Parse(result) if err != nil { return "", fmt.Errorf("Error parsing URL: %s", err) } u.Path += "//" + subDir result = u.String() } // Preserve the forced getter if it exists. We try to use the // original set force first, followed by any force set by the // detector. if getForce != "" { result = fmt.Sprintf("%s::%s", getForce, result) } else if detectForce != "" { result = fmt.Sprintf("%s::%s", detectForce, result) } return result, nil } return "", fmt.Errorf("invalid source string: %s", src) }
// GetFile for Git doesn't support updating at this time. It will download // the file every time. func (g *GitGetter) GetFile(dst string, u *url.URL) error { td, err := ioutil.TempDir("", "getter-git") if err != nil { return err } if err := os.RemoveAll(td); err != nil { return err } // Get the filename, and strip the filename from the URL so we can // just get the repository directly. filename := filepath.Base(u.Path) u.Path = filepath.Dir(u.Path) // Get the full repository if err := g.Get(td, u); err != nil { return err } // Copy the single file u, err = urlhelper.Parse(fmtFileURL(filepath.Join(td, filename))) if err != nil { return err } fg := &FileGetter{Copy: true} return fg.GetFile(dst, u) }
// GetFile for Hg doesn't support updating at this time. It will download // the file every time. func (g *HgGetter) GetFile(dst string, u *url.URL) error { td, err := ioutil.TempDir("", "getter-hg") if err != nil { return err } if err := os.RemoveAll(td); err != nil { return err } // Get the filename, and strip the filename from the URL so we can // just get the repository directly. filename := filepath.Base(u.Path) u.Path = filepath.ToSlash(filepath.Dir(u.Path)) // If we're on Windows, we need to set the host to "localhost" for hg if runtime.GOOS == "windows" { u.Host = "localhost" } // Get the full repository if err := g.Get(td, u); err != nil { return err } // Copy the single file u, err = urlhelper.Parse(fmtFileURL(filepath.Join(td, filename))) if err != nil { return err } fg := &FileGetter{Copy: true} return fg.GetFile(dst, u) }
func testURL(s string) *url.URL { u, err := urlhelper.Parse(s) if err != nil { panic(err) } return u }
func testModuleURL(n string) *url.URL { u, err := urlhelper.Parse(testModule(n)) if err != nil { panic(err) } return u }
func (g *HgGetter) Get(dst string, u *url.URL) error { if _, err := exec.LookPath("hg"); err != nil { return fmt.Errorf("hg must be available and on the PATH") } newURL, err := urlhelper.Parse(u.String()) if err != nil { return err } if fixWindowsDrivePath(newURL) { // See valid file path form on http://www.selenic.com/hg/help/urls newURL.Path = fmt.Sprintf("/%s", newURL.Path) } // Extract some query parameters we use var rev string q := newURL.Query() if len(q) > 0 { rev = q.Get("rev") q.Del("rev") newURL.RawQuery = q.Encode() } _, err = os.Stat(dst) if err != nil && !os.IsNotExist(err) { return err } if err != nil { if err := g.clone(dst, newURL); err != nil { return err } } if err := g.pull(dst, newURL); err != nil { return err } return g.update(dst, newURL, rev) }
// Get downloads the configured source to the destination. func (c *Client) Get() error { // Store this locally since there are cases we swap this mode := c.Mode if mode == ClientModeInvalid { if c.Dir { mode = ClientModeDir } else { mode = ClientModeFile } } // Default decompressor value decompressors := c.Decompressors if decompressors == nil { decompressors = Decompressors } // Detect the URL. This is safe if it is already detected. detectors := c.Detectors if detectors == nil { detectors = Detectors } src, err := Detect(c.Src, c.Pwd, detectors) if err != nil { return err } // Determine if we have a forced protocol, i.e. "git::http://..." force, src := getForcedGetter(src) // If there is a subdir component, then we download the root separately // and then copy over the proper subdir. var realDst string dst := c.Dst src, subDir := SourceDirSubdir(src) if subDir != "" { tmpDir, err := ioutil.TempDir("", "tf") if err != nil { return err } if err := os.RemoveAll(tmpDir); err != nil { return err } defer os.RemoveAll(tmpDir) realDst = dst dst = tmpDir } u, err := urlhelper.Parse(src) if err != nil { return err } if force == "" { force = u.Scheme } getters := c.Getters if getters == nil { getters = Getters } g, ok := getters[force] if !ok { return fmt.Errorf( "download not supported for scheme '%s'", force) } // We have magic query parameters that we use to signal different features q := u.Query() // Determine if we have an archive type archiveV := q.Get("archive") if archiveV != "" { // Delete the paramter since it is a magic parameter we don't // want to pass on to the Getter q.Del("archive") u.RawQuery = q.Encode() // If we can parse the value as a bool and it is false, then // set the archive to "-" which should never map to a decompressor if b, err := strconv.ParseBool(archiveV); err == nil && !b { archiveV = "-" } } if archiveV == "" { // We don't appear to... but is it part of the filename? matchingLen := 0 for k, _ := range decompressors { if strings.HasSuffix(u.Path, k) && len(k) > matchingLen { archiveV = k matchingLen = len(k) } } } // If we have a decompressor, then we need to change the destination // to download to a temporary path. We unarchive this into the final, // real path. var decompressDst string var decompressDir bool decompressor := decompressors[archiveV] if decompressor != nil { // Create a temporary directory to store our archive. We delete // this at the end of everything. td, err := ioutil.TempDir("", "getter") if err != nil { return fmt.Errorf( "Error creating temporary directory for archive: %s", err) } defer os.RemoveAll(td) // Swap the download directory to be our temporary path and // store the old values. decompressDst = dst decompressDir = mode != ClientModeFile dst = filepath.Join(td, "archive") mode = ClientModeFile } // Determine if we have a checksum var checksumHash hash.Hash var checksumValue []byte if v := q.Get("checksum"); v != "" { // Delete the query parameter if we have it. q.Del("checksum") u.RawQuery = q.Encode() // Determine the checksum hash type checksumType := "" idx := strings.Index(v, ":") if idx > -1 { checksumType = v[:idx] } switch checksumType { case "md5": checksumHash = md5.New() case "sha1": checksumHash = sha1.New() case "sha256": checksumHash = sha256.New() case "sha512": checksumHash = sha512.New() default: return fmt.Errorf( "unsupported checksum type: %s", checksumType) } // Get the remainder of the value and parse it into bytes b, err := hex.DecodeString(v[idx+1:]) if err != nil { return fmt.Errorf("invalid checksum: %s", err) } // Set our value checksumValue = b } // For now, any means file. In the future, we'll ask the getter // what it thinks it is. if mode == ClientModeAny { mode = ClientModeFile // Destination is the base name of the URL path dst = filepath.Join(dst, filepath.Base(u.Path)) } // If we're not downloading a directory, then just download the file // and return. if mode == ClientModeFile { err := g.GetFile(dst, u) if err != nil { return err } if checksumHash != nil { if err := checksum(dst, checksumHash, checksumValue); err != nil { return err } } if decompressor != nil { // We have a decompressor, so decompress the current destination // into the final destination with the proper mode. err := decompressor.Decompress(decompressDst, dst, decompressDir) if err != nil { return err } // Swap the information back dst = decompressDst if decompressDir { mode = ClientModeAny } else { mode = ClientModeFile } } // We check the dir value again because it can be switched back // if we were unarchiving. If we're still only Get-ing a file, then // we're done. if mode == ClientModeFile { return nil } } // If we're at this point we're either downloading a directory or we've // downloaded and unarchived a directory and we're just checking subdir. // In the case we have a decompressor we don't Get because it was Get // above. if decompressor == nil { // If we're getting a directory, then this is an error. You cannot // checksum a directory. TODO: test if checksumHash != nil { return fmt.Errorf( "checksum cannot be specified for directory download") } // We're downloading a directory, which might require a bit more work // if we're specifying a subdir. err := g.Get(dst, u) if err != nil { err = fmt.Errorf("error downloading '%s': %s", src, err) return err } } // If we have a subdir, copy that over if subDir != "" { if err := os.RemoveAll(realDst); err != nil { return err } if err := os.MkdirAll(realDst, 0755); err != nil { return err } return copyDir(realDst, filepath.Join(dst, subDir), false) } return nil }
// Get downloads the configured source to the destination. func (c *Client) Get() error { // Detect the URL. This is safe if it is already detected. detectors := c.Detectors if detectors == nil { detectors = Detectors } src, err := Detect(c.Src, c.Pwd, detectors) if err != nil { return err } // Determine if we have a forced protocol, i.e. "git::http://..." force, src := getForcedGetter(src) // If there is a subdir component, then we download the root separately // and then copy over the proper subdir. var realDst string dst := c.Dst src, subDir := SourceDirSubdir(src) if subDir != "" { tmpDir, err := ioutil.TempDir("", "tf") if err != nil { return err } if err := os.RemoveAll(tmpDir); err != nil { return err } defer os.RemoveAll(tmpDir) realDst = dst dst = tmpDir } u, err := urlhelper.Parse(src) if err != nil { return err } if force == "" { force = u.Scheme } getters := c.Getters if getters == nil { getters = Getters } g, ok := getters[force] if !ok { return fmt.Errorf( "download not supported for scheme '%s'", force) } // Determine if we have a checksum var checksumHash hash.Hash var checksumValue []byte q := u.Query() if v := q.Get("checksum"); v != "" { // Delete the query parameter if we have it. q.Del("checksum") u.RawQuery = q.Encode() // If we're getting a directory, then this is an error. You cannot // checksum a directory. TODO: test if c.Dir { return fmt.Errorf( "checksum cannot be specified for directory download") } // Determine the checksum hash type checksumType := "" idx := strings.Index(v, ":") if idx > -1 { checksumType = v[:idx] } switch checksumType { case "md5": checksumHash = md5.New() case "sha1": checksumHash = sha1.New() case "sha256": checksumHash = sha256.New() case "sha512": checksumHash = sha512.New() default: return fmt.Errorf( "unsupported checksum type: %s", checksumType) } // Get the remainder of the value and parse it into bytes b, err := hex.DecodeString(v[idx+1:]) if err != nil { return fmt.Errorf("invalid checksum: %s", err) } // Set our value checksumValue = b } // If we're not downloading a directory, then just download the file // and return. if !c.Dir { err := g.GetFile(dst, u) if err != nil { return err } if checksumHash != nil { return checksum(dst, checksumHash, checksumValue) } return nil } // We're downloading a directory, which might require a bit more work // if we're specifying a subdir. err = g.Get(dst, u) if err != nil { err = fmt.Errorf("error downloading '%s': %s", src, err) return err } // If we have a subdir, copy that over if subDir != "" { if err := os.RemoveAll(realDst); err != nil { return err } if err := os.MkdirAll(realDst, 0755); err != nil { return err } return copyDir(realDst, filepath.Join(dst, subDir), false) } return nil }