func (session *BackupSession) PrintStoreProgress(interval time.Duration) { if session.ShowProgress && (interval == 0 || time.Now().After(session.Progress)) { var compression float64 if session.Client.WriteData > 0 { compression = 100.0 * (float64(session.Client.WriteData) - float64(session.Client.WriteDataCompressed)) / float64(session.Client.WriteData) } sent, skipped, _, queuedsize := session.Client.GetStats() session.Log(">>> %.1f min, read: %s, written: %s (%.0f%% compr), %d folders, %d/%d files changed, blocks sent %d/%d, queued:%s", time.Since(session.Start).Minutes(), core.HumanSize(session.ReadData), core.HumanSize(session.Client.WriteDataCompressed), compression, session.Directories, session.Files-session.UnchangedFiles, session.Files, sent, skipped+sent, core.HumanSize(int64(queuedsize))) //fmt.Println(core.MemoryStats()) session.Progress = time.Now().Add(interval) } }
func (session *BackupSession) PrintDiffProgress() { var compression float64 if session.WriteData > 0 { compression = 100.0 * (float64(session.WriteData) - float64(session.ReadData)) / float64(session.WriteData) } session.Log(">>> %.1f min, read: %s (%.0f%% compr), compared: %s, %d folders, %d files", time.Since(session.Start).Minutes(), core.HumanSize(session.ReadData), compression, core.HumanSize(session.WriteData), session.Directories, session.Files) //fmt.Println(core.MemoryStats()) }
func main() { bytearray.EnableAutoGC(60, 74) var lockFile *lockfile.LockFile runtime.SetBlockProfileRate(1000) go func() { log.Println(http.ListenAndServe(":6060", nil)) }() defer func() { // Panic error handling if !DEBUG { if r := recover(); r != nil { fmt.Println(r) if lockFile != nil { lockFile.Unlock() } os.Exit(1) } } }() // Figure out where to load/save options { LocalStoragePath = filepath.Join(".", ".hashback") home, err := filepath.Abs(userHomeFolder()) if err == nil { s, err := os.Stat(home) if err == nil && s.IsDir() { LocalStoragePath = filepath.Join(home, ".hashback") } } err = os.MkdirAll(filepath.Dir(LocalStoragePath), 0700) PanicOn(err) } session := NewBackupSession() cmd.Title = fmt.Sprintf("Hashback %s (Hashbox Backup Client)", Version) cmd.OptionsFile = filepath.Join(LocalStoragePath, "options.json") cmd.BoolOption("debug", "", "Debug output", &DEBUG, cmd.Hidden) var queueSizeMB int64 cmd.IntOption("queuesize", "", "<MiB>", "Change sending queue size", &queueSizeMB, cmd.Hidden|cmd.Preference).OnChange(func() { SendingQueueSize = queueSizeMB * 1024 * 1024 }) cmd.StringOption("user", "", "<username>", "Username", &session.User, cmd.Preference|cmd.Required) var accesskey []byte cmd.ByteOption("accesskey", "", "", "Hashbox server accesskey", &accesskey, cmd.Preference|cmd.Hidden).OnChange(func() { var key core.Byte128 copy(key[:16], accesskey[:]) session.AccessKey = &key }) var backupkey []byte cmd.ByteOption("backupkey", "", "", "Hashbox server backupkey", &backupkey, cmd.Preference|cmd.Hidden).OnChange(func() { var key core.Byte128 copy(key[:16], backupkey[:]) session.BackupKey = &key }) var password string cmd.StringOption("password", "", "<password>", "Password", &password, cmd.Standard).OnChange(func() { { key := GenerateAccessKey(session.User, password) session.AccessKey = &key accesskey = session.AccessKey[:] } { key := GenerateBackupKey(session.User, password) session.BackupKey = &key backupkey = session.BackupKey[:] } }).OnSave(func() { if session.User == "" { panic(errors.New("Unable to save login unless both user and password options are specified")) } }) cmd.StringOption("server", "", "<ip>:<port>", "Hashbox server address", &session.ServerString, cmd.Preference|cmd.Required) cmd.BoolOption("full", "", "Force a non-incremental store", &session.FullBackup, cmd.Preference) cmd.BoolOption("verbose", "", "Show verbose output", &session.Verbose, cmd.Preference) cmd.BoolOption("progress", "", "Show progress during store", &session.ShowProgress, cmd.Preference) cmd.BoolOption("paint", "", "Paint!", &session.Paint, cmd.Preference).OnChange(func() { if session.Paint { session.Verbose = false session.ShowProgress = false } }) cmd.Command("info", "", func() { session.Connect() defer session.Close(true) info := session.Client.GetAccountInfo() var hashbackEnabled bool = false var dlist []core.Dataset for _, d := range info.DatasetList { if d.Name == "\x07HASHBACK_DEK" { hashbackEnabled = true } if d.Name[0] > 32 { dlist = append(dlist, d) } } // fmt.Println("* TODO: Add quota and total size info") if hashbackEnabled { fmt.Println("Account is setup for Hashback") } else { fmt.Println("Account is NOT setup for Hashback") } fmt.Println("") if len(dlist) == 0 { fmt.Println("No datasets") } else { fmt.Println("Size Dataset") fmt.Println("-------- -------") for _, d := range dlist { fmt.Printf("%8s %s\n", core.HumanSize(d.Size), d.Name) } } }) cmd.Command("list", "<dataset> [(<backup id>|.) [\"<path>\"]]", func() { if len(cmd.Args) < 3 { panic(errors.New("Missing dataset argument")) } session.Connect() defer session.Close(true) list := session.Client.ListDataset(cmd.Args[2]) if len(list.States) > 0 { if len(cmd.Args) < 4 { fmt.Println("Backup id Backup start Total size Diff prev") fmt.Println("-------------------------------- ------------------------- ---------- ----------") for _, e := range list.States { timestamp := binary.BigEndian.Uint64(e.State.StateID[:]) date := time.Unix(0, int64(timestamp)) if e.StateFlags&core.StateFlagInvalid == core.StateFlagInvalid { fmt.Printf("%-32x %-25s !!! INVALID DATASET\n", e.State.StateID, date.Format(time.RFC3339)) } else { fmt.Printf("%-32x %-25s %10s %10s\n", e.State.StateID, date.Format(time.RFC3339), core.HumanSize(e.State.Size), core.HumanSize(e.State.UniqueSize)) } } } else { var state *core.DatasetState for i, e := range list.States { if e.StateFlags&core.StateFlagInvalid != core.StateFlagInvalid && cmd.Args[3] == "." || fmt.Sprintf("%x", e.State.StateID[:]) == cmd.Args[3] { state = &list.States[i].State } } if state == nil { panic(errors.New("Backup id not found")) } var filelist []*FileEntry var listpath string = "*" if len(cmd.Args) > 4 { listpath = cmd.Args[4] } var err error filelist, err = session.findPathMatch(state.BlockID, listpath) if err != nil { panic(err) } fmt.Printf("Listing %s\n", listpath) if len(filelist) > 0 { for _, f := range filelist { var niceDate, niceSize string date := time.Unix(0, int64(f.ModTime)) if time.Since(date).Hours() > 24*300 { // 300 days niceDate = date.Format("Jan _2 2006") } else { niceDate = date.Format("Jan _2 15:04") } if f.ContentType != ContentTypeDirectory { niceSize = core.ShortHumanSize(f.FileSize) } fmt.Printf("%-10s %6s %-12s %s\n", os.FileMode(f.FileMode), niceSize, niceDate /*date.Format(time.RFC3339)*/, f.FileName) } } else { fmt.Println("No files matching") } } } else { fmt.Println("Dataset is empty or does not exist") } }) var pidName string = "" var retainWeeks int64 = 0 var retainDays int64 = 0 var intervalBackup int64 = 0 cmd.StringOption("pid", "store", "<filename>", "Create a PID file (lock-file)", &pidName, cmd.Standard) cmd.StringListOption("ignore", "store", "<pattern>", "Ignore files matching pattern", &DefaultIgnoreList, cmd.Standard|cmd.Preference) cmd.IntOption("interval", "store", "<minutes>", "Keep running backups every <minutes> until interrupted", &intervalBackup, cmd.Standard) cmd.IntOption("retaindays", "store", "<days>", "Remove backups older than 24h but keep one per day for <days>, 0 = keep all daily", &retainDays, cmd.Standard|cmd.Preference) cmd.IntOption("retainweeks", "store", "<weeks>", "Remove backups older than 24h but keep one per week for <weeks>, 0 = keep all weekly", &retainWeeks, cmd.Standard|cmd.Preference) cmd.Command("store", "<dataset> (<folder> | <file>)...", func() { for _, d := range DefaultIgnoreList { ignore := ignoreEntry{pattern: d, match: core.ExpandEnv(d)} // Expand ignore patterns if ignore.match == "" { continue } if _, err := filepath.Match(ignore.match, "ignore"); err != nil { panic(errors.New(fmt.Sprintf("Invalid ignore pattern %s", ignore.pattern))) } if os.IsPathSeparator(ignore.match[len(ignore.match)-1]) { ignore.match = ignore.match[:len(ignore.match)-1] ignore.dirmatch = true } if strings.IndexRune(ignore.match, os.PathSeparator) >= 0 { // path in pattern ignore.pathmatch = true } session.ignoreList = append(session.ignoreList, ignore) } if len(cmd.Args) < 3 { panic(errors.New("Missing dataset argument")) } if len(cmd.Args) < 4 { panic(errors.New("Missing source file or folder argument")) } if pidName != "" { var err error if lockFile, err = lockfile.Lock(pidName); err != nil { fmt.Println(err) os.Exit(0) } else { defer lockFile.Unlock() } } var latestBackup uint64 for latestBackup == 0 || intervalBackup > 0 { // Keep looping if interval backupping func() { session.Connect() defer session.Close(true) if latestBackup > 0 || intervalBackup == 0 { session.State = &core.DatasetState{StateID: session.Client.SessionNonce} session.Store(cmd.Args[2], cmd.Args[3:]...) if retainWeeks > 0 || retainDays > 0 { session.Retention(cmd.Args[2], int(retainDays), int(retainWeeks)) } latestBackup = binary.BigEndian.Uint64(session.State.StateID[:]) date := time.Unix(0, int64(latestBackup)) fmt.Printf("Backup %s %x (%s) completed\n", cmd.Args[2], session.State.StateID[:], date.Format(time.RFC3339)) } else { list := session.Client.ListDataset(cmd.Args[2]) for i := len(list.States) - 1; i >= 0; i-- { if list.States[i].StateFlags&core.StateFlagInvalid != core.StateFlagInvalid { latestBackup = binary.BigEndian.Uint64(list.States[i].State.StateID[:]) break } } } }() if intervalBackup > 0 { date := time.Unix(0, int64(latestBackup)).Add(time.Duration(intervalBackup) * time.Minute) if date.After(time.Now()) { fmt.Printf("Next backup scheduled for %s\n", date.Format(time.RFC3339)) // fmt.Println(time.Since(date)) time.Sleep(-time.Since(date)) } else { latestBackup = 1 // trigger a new backup already } } } }) cmd.Command("restore", "<dataset> (<backup id>|.) [\"<path>\"...] <dest-folder>", func() { if len(cmd.Args) < 3 { panic(errors.New("Missing dataset argument")) } if len(cmd.Args) < 4 { panic(errors.New("Missing backup id (or \".\")")) } if len(cmd.Args) < 5 { panic(errors.New("Missing destination folder argument")) } session.Connect() defer session.Close(true) list := session.Client.ListDataset(cmd.Args[2]) var stateid string = cmd.Args[3] var restorepath string = cmd.Args[len(cmd.Args)-1] var restorelist []string = cmd.Args[4 : len(cmd.Args)-1] var found int = -1 if stateid == "." { found = len(list.States) - 1 } else { for i, e := range list.States { if stateid == fmt.Sprintf("%x", e.State.StateID) { found = i break } } if found < 0 { panic(errors.New("Backup id " + cmd.Args[3] + " not found in dataset " + cmd.Args[2])) } } if found < 0 { panic(errors.New("No backup found under dataset " + cmd.Args[2])) } timestamp := binary.BigEndian.Uint64(list.States[found].State.StateID[:]) date := time.Unix(0, int64(timestamp)) fmt.Printf("Restoring from %x (%s) to path %s\n", list.States[found].State.StateID, date.Format(time.RFC3339), restorepath) session.Restore(list.States[found].State.BlockID, restorepath, restorelist...) }) cmd.Command("diff", "<dataset> (<backup id>|.) [\"<path>\"...] <local-folder>", func() { if len(cmd.Args) < 3 { panic(errors.New("Missing dataset argument")) } if len(cmd.Args) < 4 { panic(errors.New("Missing backup id (or \".\")")) } if len(cmd.Args) < 5 { panic(errors.New("Missing destination folder argument")) } session.Connect() defer session.Close(true) list := session.Client.ListDataset(cmd.Args[2]) var stateid string = cmd.Args[3] var restorepath string = cmd.Args[len(cmd.Args)-1] var restorelist []string = cmd.Args[4 : len(cmd.Args)-1] var found int = -1 if stateid == "." { found = len(list.States) - 1 } else { for i, e := range list.States { if stateid == fmt.Sprintf("%x", e.State.StateID) { found = i break } } if found < 0 { panic(errors.New("Backup id " + cmd.Args[3] + " not found in dataset " + cmd.Args[2])) } } if found < 0 { panic(errors.New("No backup found under dataset " + cmd.Args[2])) } timestamp := binary.BigEndian.Uint64(list.States[found].State.StateID[:]) date := time.Unix(0, int64(timestamp)) fmt.Printf("Comparing %x (%s) to path %s\n", list.States[found].State.StateID, date.Format(time.RFC3339), restorepath) session.DiffRestore(list.States[found].State.BlockID, restorepath, restorelist...) if session.DifferentFiles > 0 { os.Exit(2) } }) cmd.Command("remove", "<dataset> <backup id>", func() { if len(cmd.Args) < 3 { panic(errors.New("Missing dataset argument")) } if len(cmd.Args) < 4 { panic(errors.New("Missing backup id")) } session.Connect() defer session.Close(true) list := session.Client.ListDataset(cmd.Args[2]) var stateid string = cmd.Args[3] var found int = -1 for i, e := range list.States { if stateid == fmt.Sprintf("%x", e.State.StateID) { found = i break } } if found < 0 { panic(errors.New("Backup id " + cmd.Args[3] + " not found in dataset " + cmd.Args[2])) } fmt.Printf("Removing backup %x from %s\n", list.States[found].State.StateID, cmd.Args[2]) session.Client.RemoveDatasetState(cmd.Args[2], list.States[found].State.StateID) }) signalchan := make(chan os.Signal, 1) defer close(signalchan) signal.Notify(signalchan, os.Interrupt) signal.Notify(signalchan, os.Kill) go func() { for range signalchan { if session != nil { session.Close(false) } if lockFile != nil { lockFile.Unlock() } os.Exit(2) } }() if err := cmd.Parse(); err != nil { panic(err) } }