Ejemplo n.º 1
0
Archivo: s3.go Proyecto: yut148/rclone
// SetModTime sets the modification time of the local fs object
func (o *Object) SetModTime(modTime time.Time) error {
	err := o.readMetaData()
	if err != nil {
		return err
	}
	o.meta[metaMtime] = aws.String(swift.TimeToFloatString(modTime))

	if o.bytes >= maxSizeForCopy {
		fs.Debug(o, "SetModTime is unsupported for objects bigger than %v bytes", fs.SizeSuffix(maxSizeForCopy))
		return nil
	}

	// Guess the content type
	contentType := fs.MimeType(o)

	// Copy the object to itself to update the metadata
	key := o.fs.root + o.remote
	sourceKey := o.fs.bucket + "/" + key
	directive := s3.MetadataDirectiveReplace // replace metadata with that passed in
	req := s3.CopyObjectInput{
		Bucket:            &o.fs.bucket,
		ACL:               &o.fs.perm,
		Key:               &key,
		ContentType:       &contentType,
		CopySource:        aws.String(url.QueryEscape(sourceKey)),
		Metadata:          o.meta,
		MetadataDirective: &directive,
	}
	_, err = o.fs.c.CopyObject(&req)
	return err
}
Ejemplo n.º 2
0
	// Description of how to auth for this app
	oauthConfig = &oauth2.Config{
		Scopes: []string{
			"wl.signin",          // Allow single sign-on capabilities
			"wl.offline_access",  // Allow receiving a refresh token
			"onedrive.readwrite", // r/w perms to all of a user's OneDrive files
		},
		Endpoint: oauth2.Endpoint{
			AuthURL:  "https://login.live.com/oauth20_authorize.srf",
			TokenURL: "https://login.live.com/oauth20_token.srf",
		},
		ClientID:     rcloneClientID,
		ClientSecret: fs.Reveal(rcloneClientSecret),
		RedirectURL:  oauthutil.RedirectPublicURL,
	}
	chunkSize    = fs.SizeSuffix(10 * 1024 * 1024)
	uploadCutoff = fs.SizeSuffix(10 * 1024 * 1024)
)

// Register with Fs
func init() {
	fs.Register(&fs.Info{
		Name:  "onedrive",
		NewFs: NewFs,
		Config: func(name string) {
			err := oauthutil.Config(name, oauthConfig)
			if err != nil {
				log.Fatalf("Failed to configure token: %v", err)
			}
		},
		Options: []fs.Option{{
Ejemplo n.º 3
0
const (
	rcloneClientID              = "amzn1.application-oa2-client.6bf18d2d1f5b485c94c8988bb03ad0e7"
	rcloneEncryptedClientSecret = "ZP12wYlGw198FtmqfOxyNAGXU3fwVcQdmt--ba1d00wJnUs0LOzvVyXVDbqhbcUqnr5Vd1QejwWmiv1Ep7UJG1kUQeuBP5n9goXWd5MrAf0"
	folderKind                  = "FOLDER"
	fileKind                    = "FILE"
	assetKind                   = "ASSET"
	statusAvailable             = "AVAILABLE"
	timeFormat                  = time.RFC3339 // 2014-03-07T22:31:12.173Z
	minSleep                    = 20 * time.Millisecond
	warnFileSize                = 50000 << 20 // Display warning for files larger than this size
)

// Globals
var (
	// Flags
	tempLinkThreshold = fs.SizeSuffix(9 << 30) // Download files bigger than this via the tempLink
	uploadWaitPerGB   = fs.DurationP("acd-upload-wait-per-gb", "", 180*time.Second, "Additional time per GB to wait after a failed complete upload to see if it appears.")
	// Description of how to auth for this app
	acdConfig = &oauth2.Config{
		Scopes: []string{"clouddrive:read_all", "clouddrive:write"},
		Endpoint: oauth2.Endpoint{
			AuthURL:  "https://www.amazon.com/ap/oa",
			TokenURL: "https://api.amazon.com/auth/o2/token",
		},
		ClientID:     rcloneClientID,
		ClientSecret: fs.MustReveal(rcloneEncryptedClientSecret),
		RedirectURL:  oauthutil.RedirectURL,
	}
)

// Register with Fs
Ejemplo n.º 4
0
)

// Constants
const (
	rcloneAppKey             = "5jcck7diasz0rqy"
	rcloneEncryptedAppSecret = "fRS5vVLr2v6FbyXYnIgjwBuUAt0osq_QZTXAEcmZ7g"
	metadataLimit            = dropbox.MetadataLimitDefault // max items to fetch at once
)

var (
	// A regexp matching path names for files Dropbox ignores
	// See https://www.dropbox.com/en/help/145 - Ignored files
	ignoredFiles = regexp.MustCompile(`(?i)(^|/)(desktop\.ini|thumbs\.db|\.ds_store|icon\r|\.dropbox|\.dropbox.attr)$`)
	// Upload chunk size - setting too small makes uploads slow.
	// Chunks aren't buffered into memory though so can set large.
	uploadChunkSize    = fs.SizeSuffix(128 * 1024 * 1024)
	maxUploadChunkSize = fs.SizeSuffix(150 * 1024 * 1024)
)

// Register with Fs
func init() {
	fs.Register(&fs.RegInfo{
		Name:        "dropbox",
		Description: "Dropbox",
		NewFs:       NewFs,
		Config:      configHelper,
		Options: []fs.Option{{
			Name: "app_key",
			Help: "Dropbox App Key - leave blank normally.",
		}, {
			Name: "app_secret",
Ejemplo n.º 5
0
	driveFolderType    = "application/vnd.google-apps.folder"
	timeFormatIn       = time.RFC3339
	timeFormatOut      = "2006-01-02T15:04:05.000000000Z07:00"
	minSleep           = 10 * time.Millisecond
	maxSleep           = 2 * time.Second
	decayConstant      = 2 // bigger for slower decay, exponential
)

// Globals
var (
	// Flags
	driveFullList = pflag.BoolP("drive-full-list", "", true, "Use a full listing for directory list. More data but usually quicker.")
	driveUseTrash = pflag.BoolP("drive-use-trash", "", false, "Send files to the trash instead of deleting permanently.")
	// chunkSize is the size of the chunks created during a resumable upload and should be a power of two.
	// 1<<18 is the minimum size supported by the Google uploader, and there is no maximum.
	chunkSize         = fs.SizeSuffix(256 * 1024)
	driveUploadCutoff = chunkSize
	// Description of how to auth for this app
	driveConfig = &oauth2.Config{
		Scopes:       []string{"https://www.googleapis.com/auth/drive"},
		Endpoint:     google.Endpoint,
		ClientID:     rcloneClientID,
		ClientSecret: fs.Reveal(rcloneClientSecret),
		RedirectURL:  oauthutil.TitleBarRedirectURL,
	}
)

// Register with Fs
func init() {
	fs.Register(&fs.FsInfo{
		Name:  "drive",
Ejemplo n.º 6
0
	"strings"
	"time"

	"github.com/ncw/rclone/fs"
	"github.com/ncw/swift"
	"github.com/spf13/pflag"
)

// Constants
const (
	directoryMarkerContentType = "application/directory" // content type of directory marker objects
)

// Globals
var (
	chunkSize = fs.SizeSuffix(5 * 1024 * 1024 * 1024)
)

// Register with Fs
func init() {
	fs.Register(&fs.Info{
		Name:  "swift",
		NewFs: NewFs,
		Options: []fs.Option{{
			Name: "user",
			Help: "User name to log in.",
		}, {
			Name: "key",
			Help: "API key or password.",
		}, {
			Name: "auth",
Ejemplo n.º 7
0
		MinArgs: 1,
		MaxArgs: 1,
	},
	{
		Name:     "size",
		ArgsHelp: "remote:path",
		Help: `
        Returns the total size of objects in remote:path and the number
        of objects.`,
		Run: func(fdst, fsrc fs.Fs) error {
			objects, size, err := fs.Count(fdst)
			if err != nil {
				return err
			}
			fmt.Printf("Total objects: %d\n", objects)
			fmt.Printf("Total size: %v (%d bytes)\n", fs.SizeSuffix(size), size)
			return nil
		},
		MinArgs: 1,
		MaxArgs: 1,
	},
	{
		Name:     "mkdir",
		ArgsHelp: "remote:path",
		Help: `
        Make the path if it doesn't already exist`,
		Run: func(fdst, fsrc fs.Fs) error {
			return fs.Mkdir(fdst)
		},
		MinArgs: 1,
		MaxArgs: 1,
Ejemplo n.º 8
0
Archivo: b2.go Proyecto: ncw/rclone
	timeHeader       = headerPrefix + timeKey
	sha1Key          = "large_file_sha1"
	sha1Header       = "X-Bz-Content-Sha1"
	sha1InfoHeader   = headerPrefix + sha1Key
	testModeHeader   = "X-Bz-Test-Mode"
	retryAfterHeader = "Retry-After"
	minSleep         = 10 * time.Millisecond
	maxSleep         = 5 * time.Minute
	decayConstant    = 1 // bigger for slower decay, exponential
	maxParts         = 10000
	maxVersions      = 100 // maximum number of versions we search in --b2-versions mode
)

// Globals
var (
	minChunkSize       = fs.SizeSuffix(100E6)
	chunkSize          = fs.SizeSuffix(96 * 1024 * 1024)
	uploadCutoff       = fs.SizeSuffix(200E6)
	b2TestMode         = fs.StringP("b2-test-mode", "", "", "A flag string for X-Bz-Test-Mode header.")
	b2Versions         = fs.BoolP("b2-versions", "", false, "Include old versions in directory listings.")
	errNotWithVersions = errors.New("can't modify or delete files in --b2-versions mode")
)

// Register with Fs
func init() {
	fs.Register(&fs.RegInfo{
		Name:        "b2",
		Description: "Backblaze B2",
		NewFs:       NewFs,
		Options: []fs.Option{{
			Name: "account",
Ejemplo n.º 9
0
Archivo: size.go Proyecto: ncw/rclone
import (
	"fmt"

	"github.com/ncw/rclone/cmd"
	"github.com/ncw/rclone/fs"
	"github.com/spf13/cobra"
)

func init() {
	cmd.Root.AddCommand(commandDefintion)
}

var commandDefintion = &cobra.Command{
	Use:   "size remote:path",
	Short: `Prints the total size and number of objects in remote:path.`,
	Run: func(command *cobra.Command, args []string) {
		cmd.CheckArgs(1, 1, command, args)
		fsrc := cmd.NewFsSrc(args)
		cmd.Run(false, false, command, func() error {
			objects, size, err := fs.Count(fsrc)
			if err != nil {
				return err
			}
			fmt.Printf("Total objects: %d\n", objects)
			fmt.Printf("Total size: %s (%d Bytes)\n", fs.SizeSuffix(size).Unit("Bytes"), size)
			return nil
		})
	},
}