// SINGLE SOURCE - Type B: copy(f, d) -> copy(f, d/f) -> A // prepareCopyURLsTypeB - prepares target and source URLs for copying. func prepareCopyURLsTypeB(sourceURL string, targetURL string) copyURLs { _, sourceContent, err := url2Stat(sourceURL) if err != nil { // Source does not exist or insufficient privileges. return copyURLs{Error: NewIodine(iodine.New(err, nil))} } if !sourceContent.Type.IsRegular() { // Source is not a regular file. return copyURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceURL}, nil))} } // All OK.. We can proceed. Type B: source is a file, target is a folder and exists. sourceURLParse, err := client.Parse(sourceURL) if err != nil { return copyURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceURL}, nil))} } targetURLParse, err := client.Parse(targetURL) if err != nil { return copyURLs{Error: NewIodine(iodine.New(errInvalidTarget{URL: targetURL}, nil))} } targetURLParse.Path = filepath.Join(targetURLParse.Path, filepath.Base(sourceURLParse.Path)) return prepareCopyURLsTypeA(sourceURL, targetURLParse.String()) }
// aliasExpand expands aliased (name:/path) to full URL, used by url-parser func aliasExpand(aliasedURL string, aliases map[string]string) (newURL string, err error) { u, err := client.Parse(aliasedURL) if err != nil { return aliasedURL, iodine.New(errInvalidURL{URL: aliasedURL}, nil) } // proper URL if u.Host != "" { return aliasedURL, nil } for aliasName, expandedURL := range aliases { if strings.HasPrefix(aliasedURL, aliasName+":") { // Match found. Expand it. splits := strings.Split(aliasedURL, ":") // if expandedURL is missing, return aliasedURL treat it like fs if expandedURL == "" { return aliasedURL, nil } // if more splits found return if len(splits) == 2 { // remove any prefixed slashes trimmedURL := expandedURL + "/" + strings.TrimPrefix(strings.TrimPrefix(splits[1], "/"), "\\") u, err := client.Parse(trimmedURL) if err != nil { return aliasedURL, iodine.New(errInvalidURL{URL: aliasedURL}, nil) } return u.String(), nil } return aliasedURL, nil } } return aliasedURL, nil }
// urlJoinPath Join a path to existing URL func urlJoinPath(url1, url2 string) (string, *probe.Error) { u1, e := client.Parse(url1) if e != nil { return "", probe.NewError(e) } u2, e := client.Parse(url2) if e != nil { return "", probe.NewError(e) } u1.Path = filepath.Join(u1.Path, u2.Path) return u1.String(), nil }
// urlJoinPath Join a path to existing URL func urlJoinPath(url1, url2 string) (newURLStr string, err error) { u1, err := client.Parse(url1) if err != nil { return "", iodine.New(err, nil) } u2, err := client.Parse(url2) if err != nil { return "", iodine.New(err, nil) } u1.Path = filepath.Join(u1.Path, u2.Path) newURLStr = u1.String() return newURLStr, nil }
// SINGLE SOURCE - Type B: copy(f, d) -> copy(f, d/f) -> A // prepareCopyURLsTypeB - prepares target and source URLs for copying. func prepareCopyURLsTypeB(sourceURL string, targetURL string) <-chan cpURLs { cpURLsCh := make(chan cpURLs, 10000) go func(sourceURL, targetURL string, cpURLsCh chan cpURLs) { defer close(cpURLsCh) _, sourceContent, err := url2Stat(sourceURL) if err != nil { // Source does not exist or insufficient privileges. cpURLsCh <- cpURLs{Error: iodine.New(err, nil)} return } if !sourceContent.Type.IsRegular() { // Source is not a regular file. cpURLsCh <- cpURLs{Error: iodine.New(errInvalidSource{URL: sourceURL}, nil)} return } _, targetContent, err := url2Stat(targetURL) if err != nil { cpURLsCh <- cpURLs{Error: iodine.New(err, nil)} return } if err == nil { if !targetContent.Type.IsDir() { // Target exists, but is not a directory. cpURLsCh <- cpURLs{Error: iodine.New(errTargetIsNotDir{URL: targetURL}, nil)} return } } // Else name is available to create. // All OK.. We can proceed. Type B: source is a file, target is a directory and exists. sourceURLParse, err := client.Parse(sourceURL) if err != nil { cpURLsCh <- cpURLs{Error: iodine.New(errInvalidSource{URL: sourceURL}, nil)} return } targetURLParse, err := client.Parse(targetURL) if err != nil { cpURLsCh <- cpURLs{Error: iodine.New(errInvalidTarget{URL: targetURL}, nil)} return } targetURLParse.Path = filepath.Join(targetURLParse.Path, filepath.Base(sourceURLParse.Path)) for cURLs := range prepareCopyURLsTypeA(sourceURL, targetURLParse.String()) { cpURLsCh <- cURLs } }(sourceURL, targetURL, cpURLsCh) return cpURLsCh }
// getExpandedURL - extracts URL string from a single cmd-line argument func getExpandedURL(arg string, aliases map[string]string) (urlStr string, err error) { if _, err := client.Parse(urlStr); err != nil { // Not a valid URL. Return error return "", NewIodine(iodine.New(errInvalidURL{arg}, nil)) } // Check and expand Alias urlStr, err = aliasExpand(arg, aliases) if err != nil { return "", NewIodine(iodine.New(err, nil)) } if _, err := client.Parse(urlStr); err != nil { // Not a valid URL. Return error return "", NewIodine(iodine.New(errInvalidURL{urlStr}, nil)) } return urlStr, nil }
// getHostConfig retrieves host specific configuration such as access keys, certs. func getHostConfig(URL string) (hostConfig, *probe.Error) { config, err := getMcConfig() if err != nil { return hostConfig{}, err.Trace() } { url, err := client.Parse(URL) if err != nil { return hostConfig{}, probe.NewError(err) } // No host matching or keys needed for filesystem requests if url.Type == client.Filesystem { hostCfg := hostConfig{ AccessKeyID: "", SecretAccessKey: "", } return hostCfg, nil } for globURL, hostCfg := range config.Hosts { match, err := filepath.Match(globURL, url.Host) if err != nil { return hostConfig{}, errInvalidGlobURL(globURL, URL).Trace() } if match { return hostCfg, nil } } } return hostConfig{}, errNoMatchingHost(URL).Trace() }
// getNewClient gives a new client interface func getNewClient(urlStr string, auth *hostConfig) (clnt client.Client, err error) { url, err := client.Parse(urlStr) if err != nil { return nil, iodine.New(errInvalidURL{URL: urlStr}, map[string]string{"URL": urlStr}) } switch url.Type { case client.Object: // Minio and S3 compatible object storage if auth == nil { return nil, iodine.New(errInvalidArgument{}, nil) } s3Config := new(s3.Config) s3Config.AccessKeyID = func() string { if auth.AccessKeyID == globalAccessKeyID { return "" } return auth.AccessKeyID }() s3Config.SecretAccessKey = func() string { if auth.SecretAccessKey == globalSecretAccessKey { return "" } return auth.SecretAccessKey }() s3Config.AppName = "Minio" s3Config.AppVersion = getVersion() s3Config.AppComments = []string{os.Args[0], runtime.GOOS, runtime.GOARCH} s3Config.HostURL = urlStr s3Config.Debug = globalDebugFlag return s3.New(s3Config) case client.Filesystem: return fs.New(urlStr) } return nil, iodine.New(errInvalidURL{URL: urlStr}, nil) }
// getHostConfig retrieves host specific configuration such as access keys, certs. func getHostConfig(URL string) (*hostConfig, error) { config, err := getMcConfig() if err != nil { return nil, NewIodine(iodine.New(err, nil)) } url, err := client.Parse(URL) if err != nil { return nil, NewIodine(iodine.New(errInvalidURL{URL: URL}, nil)) } // No host matching or keys needed for filesystem requests if url.Type == client.Filesystem { hostCfg := &hostConfig{ AccessKeyID: "", SecretAccessKey: "", } return hostCfg, nil } for globURL, hostCfg := range config.Hosts { match, err := filepath.Match(globURL, url.Host) if err != nil { return nil, NewIodine(iodine.New(errInvalidGlobURL{glob: globURL, request: URL}, nil)) } if match { if hostCfg == nil { return nil, NewIodine(iodine.New(errInvalidAuth{}, nil)) } return hostCfg, nil } } return nil, NewIodine(iodine.New(errNoMatchingHost{}, nil)) }
// New returns an initialized s3Client structure. if debug use a internal trace transport func New(config *Config) (client.Client, error) { u, err := client.Parse(config.HostURL) if err != nil { return nil, iodine.New(err, nil) } var transport http.RoundTripper switch { case config.Debug == true: transport = GetNewTraceTransport(NewTrace(), http.DefaultTransport) default: transport = http.DefaultTransport } s3Conf := minio.Config{ AccessKeyID: config.AccessKeyID, SecretAccessKey: config.SecretAccessKey, Transport: transport, Endpoint: u.Scheme + "://" + u.Host, } s3Conf.AccessKeyID = config.AccessKeyID s3Conf.SecretAccessKey = config.SecretAccessKey s3Conf.Transport = transport s3Conf.SetUserAgent(config.AppName, config.AppVersion, config.AppComments...) s3Conf.Endpoint = u.Scheme + "://" + u.Host api, err := minio.New(s3Conf) if err != nil { return nil, err } return &s3Client{api: api, hostURL: u}, nil }
// checkCastSyntax(URLs []string) func checkCastSyntax(ctx *cli.Context) { if len(ctx.Args()) < 2 || ctx.Args().First() == "help" { cli.ShowCommandHelpAndExit(ctx, "cast", 1) // last argument is exit code. } // extract URLs. URLs, err := args2URLs(ctx.Args()) if err != nil { console.Fatalf("One or more unknown URL types found %s. %s\n", ctx.Args(), NewIodine(iodine.New(err, nil))) } srcURL := URLs[0] tgtURLs := URLs[1:] /****** Generic rules *******/ // Source cannot be a folder (except when recursive) if !isURLRecursive(srcURL) { _, srcContent, err := url2Stat(srcURL) // Source exist?. if err != nil { console.Fatalf("Unable to stat source ‘%s’. %s\n", srcURL, NewIodine(iodine.New(err, nil))) } if !srcContent.Type.IsRegular() { if srcContent.Type.IsDir() { console.Fatalf("Source ‘%s’ is a folder. Please use ‘%s...’ to recursively copy this folder and its contents.\n", srcURL, srcURL) } console.Fatalf("Source ‘%s’ is not a regular file.\n", srcURL) } } // Recursive URLs are not allowed in target. for _, tgtURL := range tgtURLs { if isURLRecursive(tgtURL) { console.Fatalf("Target ‘%s’ cannot be recursive. %s\n", tgtURL, NewIodine(iodine.New(errInvalidArgument{}, nil))) } } for _, tgtURL := range tgtURLs { url, err := client.Parse(tgtURL) if err != nil { console.Fatalf("Unable to parse target ‘%s’ argument. %s\n", tgtURL, NewIodine(iodine.New(err, nil))) } if url.Host != "" { if url.Path == string(url.Separator) { console.Fatalf("Bucket creation detected for %s, cloud storage URL's should use ‘mc mb’ to create buckets\n", tgtURL) } } } switch guessCastURLType(srcURL, tgtURLs) { case castURLsTypeA: // File -> File. checkCastSyntaxTypeA(srcURL, tgtURLs) case castURLsTypeB: // File -> Folder. checkCastSyntaxTypeB(srcURL, tgtURLs) case castURLsTypeC: // Folder -> Folder. checkCastSyntaxTypeC(srcURL, tgtURLs) default: console.Fatalln("Invalid arguments. Unable to determine how to cast. Please report this issue at https://github.com/minio/mc/issues") } }
func isValidURL(url string) bool { // Empty source arg? urlParse, err := client.Parse(url) if err != nil { return false } if urlParse.Path == "" { return false } return true }
// checkMirrorSyntax(URLs []string) func checkMirrorSyntax(ctx *cli.Context) { if len(ctx.Args()) < 2 || ctx.Args().First() == "help" { cli.ShowCommandHelpAndExit(ctx, "mirror", 1) // last argument is exit code. } // extract URLs. URLs, err := args2URLs(ctx.Args()) fatalIf(err.Trace(ctx.Args()...), "Unable to parse arguments.") srcURL := URLs[0] tgtURLs := URLs[1:] /****** Generic rules *******/ // Source cannot be a folder (except when recursive) if !isURLRecursive(srcURL) { fatalIf(errInvalidArgument().Trace(), fmt.Sprintf("Source ‘%s’ is not recursive. Use ‘%s...’ as argument to mirror recursively.", srcURL, srcURL)) } // Recursive source URL. newSrcURL := stripRecursiveURL(srcURL) _, srcContent, err := url2Stat(newSrcURL) fatalIf(err.Trace(srcURL), "Unable to stat source ‘"+newSrcURL+"’.") if srcContent.Type.IsRegular() { // Ellipses is supported only for folders. fatalIf(errInvalidArgument().Trace(), "Source ‘"+srcURL+"’ is not a folder.") } if len(tgtURLs) == 0 && tgtURLs == nil { fatalIf(errInvalidArgument().Trace(), "Invalid number of target arguments to mirror command.") } for _, tgtURL := range tgtURLs { // Recursive URLs are not allowed in target. if isURLRecursive(tgtURL) { fatalIf(errDummy().Trace(), fmt.Sprintf("Recursive option is not supported for target ‘%s’ argument.", tgtURL)) } url, e := client.Parse(tgtURL) fatalIf(probe.NewError(e), "Unable to parse target ‘"+tgtURL+"’ argument.") if url.Host != "" { if url.Path == string(url.Separator) { fatalIf(errInvalidArgument().Trace(), fmt.Sprintf("Target ‘%s’ does not contain bucket name.", tgtURL)) } } _, content, err := url2Stat(tgtURL) fatalIf(err.Trace(tgtURL), "Unable to stat target ‘"+tgtURL+"’.") if !content.Type.IsDir() { fatalIf(errInvalidArgument().Trace(), "Target ‘"+tgtURL+"’ is not a folder.") } } }
// SINGLE SOURCE - Type B: copy(f, d) -> copy(f, d/f) -> A // prepareCopyURLsTypeB - prepares target and source URLs for copying. func prepareCopyURLsTypeB(sourceURL string, targetURL string) <-chan copyURLs { copyURLsCh := make(chan copyURLs) go func(sourceURL, targetURL string, copyURLsCh chan copyURLs) { defer close(copyURLsCh) _, sourceContent, err := url2Stat(sourceURL) if err != nil { // Source does not exist or insufficient privileges. copyURLsCh <- copyURLs{Error: NewIodine(iodine.New(err, nil))} return } if !sourceContent.Type.IsRegular() { // Source is not a regular file. copyURLsCh <- copyURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceURL}, nil))} return } // All OK.. We can proceed. Type B: source is a file, target is a directory and exists. sourceURLParse, err := client.Parse(sourceURL) if err != nil { copyURLsCh <- copyURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceURL}, nil))} return } targetURLParse, err := client.Parse(targetURL) if err != nil { copyURLsCh <- copyURLs{Error: NewIodine(iodine.New(errInvalidTarget{URL: targetURL}, nil))} return } targetURLParse.Path = filepath.Join(targetURLParse.Path, filepath.Base(sourceURLParse.Path)) for cURLs := range prepareCopyURLsTypeA(sourceURL, targetURLParse.String()) { copyURLsCh <- cURLs } }(sourceURL, targetURL, copyURLsCh) return copyURLsCh }
// prepareSingleCastURLsTypeB - prepares a single target and single source URLs for casting. func prepareSingleCastURLsTypeB(sourceURL string, targetURL string) castURLs { _, sourceContent, err := url2Stat(sourceURL) if err != nil { // Source does not exist or insufficient privileges. return castURLs{Error: NewIodine(iodine.New(err, nil))} } if !sourceContent.Type.IsRegular() { // Source is not a regular file. return castURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceURL}, nil))} } _, targetContent, err := url2Stat(targetURL) if err != nil { // Source and target are files. Already reduced to Type A. return prepareSingleCastURLsTypeA(sourceURL, targetURL) } if targetContent.Type.IsRegular() { // File to File // Source and target are files. Already reduced to Type A. return prepareSingleCastURLsTypeA(sourceURL, targetURL) } // Source is a file, target is a directory and exists. sourceURLParse, err := client.Parse(sourceURL) if err != nil { return castURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceURL}, nil))} } targetURLParse, err := client.Parse(targetURL) if err != nil { return castURLs{Error: NewIodine(iodine.New(errInvalidTarget{URL: targetURL}, nil))} } // Reduce Type B to Type A. targetURLParse.Path = filepath.Join(targetURLParse.Path, filepath.Base(sourceURLParse.Path)) return prepareSingleCastURLsTypeA(sourceURL, targetURLParse.String()) }
// Check if the target URL represents directory. It may or may not exist yet. func isTargetURLDir(targetURL string) bool { targetURLParse, err := client.Parse(targetURL) if err != nil { return false } if strings.HasSuffix(targetURLParse.String(), string(targetURLParse.Separator)) { return true } _, targetContent, err := url2Stat(targetURL) if err != nil { return false } if !targetContent.Type.IsDir() { // Target is a dir. Type B return false } return true }
// Check if the target URL represents folder. It may or may not exist yet. func isTargetURLDir(targetURL string) bool { targetURLParse, err := client.Parse(targetURL) if err != nil { return false } _, targetContent, perr := url2Stat(targetURL) if perr != nil { if targetURLParse.Path == string(targetURLParse.Separator) && targetURLParse.Scheme != "" { return false } if strings.HasSuffix(targetURLParse.Path, string(targetURLParse.Separator)) { return true } return false } if !targetContent.Type.IsDir() { // Target is a dir. return false } return true }
// NOTE: All the parse rules should reduced to A: Copy(Source, Target). // // * VALID RULES // ======================= // A: copy(f, f) -> copy(f, f) // B: copy(f, d) -> copy(f, d/f) -> A // C: copy(d1..., d2) -> []copy(d1/f, d2/d1/f) -> []A // D: copy([]{d1... | f}, d2) -> []{copy(d1/f, d2/d1/f) | copy(f, d2/f )} -> []A // // * INVALID RULES // ========================= // A: copy(d, *) // B: copy(d..., f) // C: copy(*, d...) // func checkCopySyntax(ctx *cli.Context) { if len(ctx.Args()) < 2 || ctx.Args().First() == "help" { cli.ShowCommandHelpAndExit(ctx, "cp", 1) // last argument is exit code. } // extract URLs. URLs, err := args2URLs(ctx.Args()) fatalIf(err.Trace(ctx.Args()...), fmt.Sprintf("One or more unknown URL types passed.")) srcURLs := URLs[:len(URLs)-1] tgtURL := URLs[len(URLs)-1] /****** Generic rules *******/ // Recursive URLs are not allowed in target. if isURLRecursive(tgtURL) { fatalIf(errDummy().Trace(), fmt.Sprintf("Recursive option is not supported for target ‘%s’ argument.", tgtURL)) } // scope locally { url, err := client.Parse(tgtURL) if err != nil { fatalIf(probe.NewError(err), fmt.Sprintf("Unable to parse target ‘%s’ argument.", tgtURL)) } if url.Host != "" { if url.Path == string(url.Separator) { fatalIf(errInvalidArgument().Trace(), fmt.Sprintf("Target ‘%s’ does not contain bucket name.", tgtURL)) } } } switch guessCopyURLType(srcURLs, tgtURL) { case copyURLsTypeA: // File -> File. checkCopySyntaxTypeA(srcURLs, tgtURL) case copyURLsTypeB: // File -> Folder. checkCopySyntaxTypeB(srcURLs, tgtURL) case copyURLsTypeC: // Folder... -> Folder. checkCopySyntaxTypeC(srcURLs, tgtURL) case copyURLsTypeD: // File | Folder... -> Folder. checkCopySyntaxTypeD(srcURLs, tgtURL) default: fatalIf(errInvalidArgument().Trace(), "Invalid arguments to copy command.") } }
// NOTE: All the parse rules should reduced to A: Copy(Source, Target). // // * VALID RULES // ======================= // A: copy(f, f) -> copy(f, f) // B: copy(f, d) -> copy(f, d/f) -> A // C: copy(d1..., d2) -> []copy(d1/f, d2/d1/f) -> []A // D: copy([]{d1... | f}, d2) -> []{copy(d1/f, d2/d1/f) | copy(f, d2/f )} -> []A // // * INVALID RULES // ========================= // A: copy(d, *) // B: copy(d..., f) // C: copy(*, d...) // func checkCopySyntax(ctx *cli.Context) { if len(ctx.Args()) < 2 || ctx.Args().First() == "help" { cli.ShowCommandHelpAndExit(ctx, "cp", 1) // last argument is exit code. } // extract URLs. URLs, err := args2URLs(ctx.Args()) if err != nil { console.Fatalf("One or more unknown URL types found %s. %s\n", ctx.Args(), NewIodine(iodine.New(err, nil))) } srcURLs := URLs[:len(URLs)-1] tgtURL := URLs[len(URLs)-1] /****** Generic rules *******/ // Recursive URLs are not allowed in target. if isURLRecursive(tgtURL) { console.Fatalf("Recursive option is not supported for target ‘%s’ argument. %s\n", tgtURL, NewIodine(iodine.New(errInvalidArgument{}, nil))) } url, err := client.Parse(tgtURL) if err != nil { console.Fatalf("Unable to parse target ‘%s’ argument. %s\n", tgtURL, NewIodine(iodine.New(err, nil))) } if url.Host != "" { if url.Path == string(url.Separator) { console.Fatalf("Bucket creation detected for %s, cloud storage URL's should use ‘mc mb’ to create buckets\n", tgtURL) } } switch guessCopyURLType(srcURLs, tgtURL) { case copyURLsTypeA: // File -> File. checkCopySyntaxTypeA(srcURLs, tgtURL) case copyURLsTypeB: // File -> Folder. checkCopySyntaxTypeB(srcURLs, tgtURL) case copyURLsTypeC: // Folder... -> Folder. checkCopySyntaxTypeC(srcURLs, tgtURL) case copyURLsTypeD: // File | Folder... -> Folder. checkCopySyntaxTypeD(srcURLs, tgtURL) default: console.Fatalln("Invalid arguments. Unable to determine how to copy. Please report this issue at https://github.com/minio/mc/issues") } }
// getNewClient gives a new client interface func getNewClient(urlStr string, auth hostConfig) (client.Client, *probe.Error) { url, err := client.Parse(urlStr) if err != nil { return nil, probe.NewError(err) } switch url.Type { case client.Object: // Minio and S3 compatible cloud storage s3Config := new(s3.Config) s3Config.AccessKeyID = func() string { if auth.AccessKeyID == globalAccessKeyID { return "" } return auth.AccessKeyID }() s3Config.SecretAccessKey = func() string { if auth.SecretAccessKey == globalSecretAccessKey { return "" } return auth.SecretAccessKey }() s3Config.AppName = "Minio" s3Config.AppVersion = globalMCVersion s3Config.AppComments = []string{os.Args[0], runtime.GOOS, runtime.GOARCH} s3Config.HostURL = urlStr s3Config.Debug = globalDebugFlag s3Client, err := s3.New(s3Config) if err != nil { return nil, err.Trace() } return s3Client, nil case client.Filesystem: fsClient, err := fs.New(urlStr) if err != nil { return nil, err.Trace() } return fsClient, nil } return nil, errInitClient(urlStr).Trace() }
func url2Client(url string) (client.Client, error) { // Empty source arg? urlParse, err := client.Parse(url) if err != nil { return nil, iodine.New(err, map[string]string{"URL": url}) } if urlParse.Path == "" { return nil, iodine.New(errInvalidURL{URL: url}, map[string]string{"URL": url}) } urlonfig, err := getHostConfig(url) if err != nil { return nil, iodine.New(err, map[string]string{"URL": url}) } client, err := getNewClient(url, urlonfig) if err != nil { return nil, iodine.New(err, map[string]string{"URL": url}) } return client, nil }
// URL get url func (f *fsClient) URL() *client.URL { url, _ := client.Parse(f.path) return url }
// prepareCastURLsTypeC - C: func prepareCastURLsTypeC(sourceURL string, targetURLs []string) <-chan castURLs { castURLsCh := make(chan castURLs) go func() { defer close(castURLsCh) if !isURLRecursive(sourceURL) { // Source is not of recursive type. castURLsCh <- castURLs{Error: NewIodine(iodine.New(errSourceNotRecursive{URL: sourceURL}, nil))} return } // add `/` after trimming off `...` to emulate directories sourceURL = stripRecursiveURL(sourceURL) sourceClient, sourceContent, err := url2Stat(sourceURL) // Source exist? if err != nil { // Source does not exist or insufficient privileges. castURLsCh <- castURLs{Error: NewIodine(iodine.New(err, nil))} return } if !sourceContent.Type.IsDir() { // Source is not a dir. castURLsCh <- castURLs{Error: NewIodine(iodine.New(errSourceIsNotDir{URL: sourceURL}, nil))} return } for sourceContent := range sourceClient.List(true) { if sourceContent.Err != nil { // Listing failed. castURLsCh <- castURLs{Error: NewIodine(iodine.New(sourceContent.Err, nil))} continue } if !sourceContent.Content.Type.IsRegular() { // Source is not a regular file. Skip it for cast. continue } // All OK.. We can proceed. Type B: source is a file, target is a directory and exists. sourceURLParse, err := client.Parse(sourceURL) if err != nil { castURLsCh <- castURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceURL}, nil))} continue } var newTargetURLs []string var sourceContentParse *client.URL for _, targetURL := range targetURLs { targetURLParse, err := client.Parse(targetURL) if err != nil { castURLsCh <- castURLs{Error: NewIodine(iodine.New(errInvalidTarget{URL: targetURL}, nil))} continue } sourceURLDelimited := sourceURLParse.String()[:strings.LastIndex(sourceURLParse.String(), string(sourceURLParse.Separator))+1] sourceContentName := sourceContent.Content.Name sourceContentURL := sourceURLDelimited + sourceContentName sourceContentParse, err = client.Parse(sourceContentURL) if err != nil { castURLsCh <- castURLs{Error: NewIodine(iodine.New(errInvalidSource{URL: sourceContentName}, nil))} continue } // Construct target path from recursive path of source without its prefix dir. newTargetURLParse := *targetURLParse newTargetURLParse.Path = filepath.Join(newTargetURLParse.Path, sourceContentName) newTargetURLs = append(newTargetURLs, newTargetURLParse.String()) } castURLsCh <- prepareCastURLsTypeA(sourceContentParse.String(), newTargetURLs) } }() return castURLsCh }
// SINGLE SOURCE - Type C: copy(d1..., d2) -> []copy(d1/f, d1/d2/f) -> []A // prepareCopyRecursiveURLTypeC - prepares target and source URLs for copying. func prepareCopyURLsTypeC(sourceURL, targetURL string) <-chan copyURLs { copyURLsCh := make(chan copyURLs) go func(sourceURL, targetURL string, copyURLsCh chan copyURLs) { defer close(copyURLsCh) if !isURLRecursive(sourceURL) { // Source is not of recursive type. copyURLsCh <- copyURLs{Error: errSourceNotRecursive(sourceURL).Trace()} return } // add `/` after trimming off `...` to emulate folders sourceURL = stripRecursiveURL(sourceURL) sourceClient, sourceContent, err := url2Stat(sourceURL) if err != nil { // Source does not exist or insufficient privileges. copyURLsCh <- copyURLs{Error: err.Trace(sourceURL)} return } if !sourceContent.Type.IsDir() { // Source is not a dir. copyURLsCh <- copyURLs{Error: errSourceIsNotDir(sourceURL).Trace()} return } for sourceContent := range sourceClient.List(true) { if sourceContent.Err != nil { // Listing failed. copyURLsCh <- copyURLs{Error: sourceContent.Err.Trace()} continue } if !sourceContent.Content.Type.IsRegular() { // Source is not a regular file. Skip it for copy. continue } // All OK.. We can proceed. Type B: source is a file, target is a folder and exists. sourceURLParse, err := client.Parse(sourceURL) if err != nil { copyURLsCh <- copyURLs{Error: errInvalidSource(sourceURL).Trace()} continue } targetURLParse, err := client.Parse(targetURL) if err != nil { copyURLsCh <- copyURLs{Error: errInvalidTarget(targetURL).Trace()} continue } sourceURLDelimited := sourceURLParse.String()[:strings.LastIndex(sourceURLParse.String(), string(sourceURLParse.Separator))+1] sourceContentName := sourceContent.Content.Name sourceContentURL := sourceURLDelimited + sourceContentName sourceContentParse, err := client.Parse(sourceContentURL) if err != nil { copyURLsCh <- copyURLs{Error: errInvalidSource(sourceContentName).Trace()} continue } // Construct target path from recursive path of source without its prefix dir. newTargetURLParse := *targetURLParse newTargetURLParse.Path = filepath.Join(newTargetURLParse.Path, sourceContentName) copyURLsCh <- prepareCopyURLsTypeA(sourceContentParse.String(), newTargetURLParse.String()) } }(sourceURL, targetURL, copyURLsCh) return copyURLsCh }