Пример #1
0
// Synchronize an entire folder hierarchy from Drive to a local directory.
func syncHierarchyDown(driveBasePath string, localBasePath string, trustTimes bool,
	downloadGoogleAppsFiles bool) int {
	// First, make sure the user isn't asking us to download a directory on
	// top of a file.
	if stat, err := os.Stat(localBasePath); err == nil && !stat.IsDir() {
		printErrorAndExit(fmt.Errorf("%s: unable to download folder %s "+
			"on top of file", localBasePath, driveBasePath))
	}

	// Get the files from Drive under driveBasePath.
	includeBase := true
	message("Getting list of files to download... ")
	filesOnDrive, err := gd.GetFilesUnderFolder(driveBasePath, includeBase)
	checkFatalError(err, "error getting files from Drive")
	message("Done. Starting download.\n")

	// We won't download files where there are multiple versions of the
	// file with the same name on Drive.  Issue warnings about any dupes
	// here.
	uniqueDriveFiles, dupes := gdrive.PartitionUniquesAndMultiples(filesOnDrive)
	nDownloadErrors := int32(len(dupes))
	for _, f := range dupes {
		fmt.Fprintf(os.Stderr, "skicka: %s: skipping download of duplicate "+
			"file on Drive\n", f[0].Path)
	}

	// If we're not trying to download Google Apps files (Docs, etc.),
	// then filter them out here.
	if !downloadGoogleAppsFiles {
		var files []*gdrive.File
		for _, f := range uniqueDriveFiles {
			if f.IsGoogleAppsFile() {
				message("%s: skipping Google Apps file.", f.Path)
			} else {
				files = append(files, f)
			}
		}
		uniqueDriveFiles = files
	}

	// Create a map that stores the local filename to use for each file in
	// Google Drive. This map is indexed by the path of the Google Drive
	// file.
	localPathMap := createPathMap(uniqueDriveFiles, localBasePath, driveBasePath)

	// First create all of the local directories, so that the downloaded
	// files have somewhere to land.  For any already-existing directories,
	// update their permissions to match the permissions of the
	// corresponding folder on Drive.  Stop executing if there's an error;
	// we almost certainly can't successfully go on if we failed creating
	// some local direcotries.
	err = createLocalDirectories(localPathMap, uniqueDriveFiles)
	checkFatalError(err, "")

	// Now figure out which files actually need to be downloaded and
	// initialize filesToDownload with their corresponding gdrive.Files.
	nBytesToDownload := int64(0)
	var filesToDownload []*gdrive.File
	for _, f := range uniqueDriveFiles {
		if f.IsFolder() {
			// Folders were aready taken care of by createLocalDirectories().
			continue
		}

		localPath := localPathMap[f.Path]
		needsDownload, err := fileNeedsDownload(localPath, f, trustTimes)
		if err != nil {
			addErrorAndPrintMessage(&nDownloadErrors,
				fmt.Sprintf("%s: error determining if file needs download\n",
					f.Path), err)
			continue
		}

		if needsDownload {
			nBytesToDownload += f.FileSize
			filesToDownload = append(filesToDownload, f)
		} else {
			// No download needed, but make sure the local permissions and
			// modified time match those values on Drive.
			syncLocalFileMetadata(localPath, f, &nDownloadErrors)
		}
	}

	// Bail out early if everything is up to date.
	if len(filesToDownload) == 0 {
		message("Nothing to download.")
		return 0
	}

	// Actually download the files. We'll use multiple workers to improve
	// performance; we're more likely to have some workers actively
	// downloading file contents while others are still making Drive API
	// calls this way.
	toDownloadChan := make(chan *gdrive.File, 128)
	doneChan := make(chan int, nWorkers)
	progressBar := getProgressBar(nBytesToDownload)

	// Launch the workers.
	for i := 0; i < nWorkers; i++ {
		go func() {
			for {
				// Get the gdrive.File for the file the worker should download
				// next.
				if f, ok := <-toDownloadChan; ok {
					localPath := localPathMap[f.Path]
					err := downloadFile(f, localPath, progressBar)
					if err != nil {
						addErrorAndPrintMessage(&nDownloadErrors, localPath, err)
					}
				} else {
					debug.Printf("Worker exiting")
					doneChan <- 1
					break
				}
			}
		}()
	}

	// Send the workers the files to be downloaded.
	for _, f := range filesToDownload {
		toDownloadChan <- f
	}
	close(toDownloadChan)

	// And now wait for the workers to all return.
	for i := 0; i < nWorkers; i++ {
		<-doneChan
	}
	if progressBar != nil {
		progressBar.Finish()
	}

	if nDownloadErrors > 0 {
		fmt.Fprintf(os.Stderr, "skicka: %d files not downloaded due to errors\n",
			nDownloadErrors)
	}
	return int(nDownloadErrors)
}
Пример #2
0
func fsck(args []string, metadataCacheFilename string) int {
	path := ""
	actuallyTrash := false
	for i := 0; i < len(args); i++ {
		if args[i] == "--trash-duplicates" {
			actuallyTrash = true
		} else if path == "" {
			path = args[i]
		} else {
			fmt.Fprintf(os.Stderr, "Usage: skicka fsck [--trash-duplicates] [drive_path]\n")
			fmt.Printf("Run \"skicka help\" for more detailed help text.\n")
			return 1
		}
	}
	if path == "" {
		path = "/"
	}

	if actuallyTrash {
		fmt.Fprintf(os.Stderr, `
**** WARNING WARNING DANGER ****

The "fsck" command is new and hasn't been thoroughly tested. In that
it will optionally delete files from Google Drive, you should be
very careful with it. At minimum, please first do a run without the
"--trash-duplicates" option tnd make sure that any files that it says
it's planning on deleting are ok to delete.

If disaster strikes and it deletes something it shouldn't (or if it
wants to delete something it shouldn't, please file a bug at
https://github.com/google/skicka/issues.) If it has made a deletion
mistake, it should be possible to salvage the file from the trash.

**** WARNING WARNING DANGER ****

`)
		time.Sleep(10 * time.Second)
	}

	includeBase := true
	files, err := gd.GetFilesUnderFolder(path, includeBase)
	if err != nil {
		fmt.Fprintf(os.Stderr, "skicka: %s: %v\n", path, err)
		return 1
	}

	errs := 0
	uniques, dupes := gdrive.PartitionUniquesAndMultiples(files)
	for _, f := range uniques {
		errs += checkFile(f)
	}
	for _, files := range dupes {
		errs += cleanupDupes(files, actuallyTrash)
	}

	// See if the metadata cache is in sync.
	gd.CheckMetadata(metadataCacheFilename, func(msg string) {
		fmt.Fprintf(os.Stderr, "skicka: %s\n", msg)
		errs++
	})

	return errs
}