func getDatasetNameFromFile(filename string) core.String { // Open the file, read and check the file headers fil, err := os.Open(filepath.Join(datDirectory, "account", filename)) PanicOn(err) defer fil.Close() var header dbFileHeader header.Unserialize(fil) if header.filetype != dbFileTypeTransaction { panic(errors.New("File " + filename + " is not a valid transaction file")) } datasetName := header.datasetName var datasetNameH core.Byte128 { d, err := base64.RawURLEncoding.DecodeString(filename[23:45]) PanicOn(err) datasetNameH.Set(d) } datasetHashB := core.Hash([]byte(datasetName)) if datasetHashB.Compare(datasetNameH) != 0 { panic(errors.New("Header for " + filename + " does not contain the correct dataset name")) } return datasetName }
func (handler *AccountHandler) CollectAllRootBlocks(skipInvalid bool) (rootBlocks []BlockSource) { // Open each dataset and check the chains dir, err := os.Open(filepath.Join(datDirectory, "account")) PanicOn(err) defer dir.Close() dirlist, err := dir.Readdir(-1) PanicOn(err) for _, info := range dirlist { name := info.Name() if m, _ := filepath.Match("??????????????????????.??????????????????????.trn", name); m { // Read the accountNameH from the filename var accountName string var accountNameH core.Byte128 { decoded, err := base64.RawURLEncoding.DecodeString(name[:22]) PanicOn(err) accountNameH.Set(decoded) info := readInfoFile(accountNameH) if info != nil { accountName = string(info.AccountName) } } datasetName := getDatasetNameFromFile(name) collection := readDBFile(accountNameH, datasetName) if collection == nil { panic("InvalidateDatasetState was called on a DB file which cannot be opened") } for _, e := range collection.States { if e.StateFlags&core.StateFlagInvalid == core.StateFlagInvalid { if skipInvalid { core.Log(core.LogWarning, "All data referenced by %s.%s.%x will be marked for removal unless referenced elsewhere", accountName, datasetName, e.State.StateID[:]) } else { panic(errors.New(fmt.Sprintf("Dataset %s.%s.%x is referencing data with a broken block chain", accountName, datasetName, e.State.StateID[:]))) } } else { rootBlocks = append(rootBlocks, BlockSource{BlockID: e.State.BlockID, StateID: e.State.StateID, DatasetName: datasetName, AccountNameH: accountNameH, AccountName: string(accountName)}) } } } } return rootBlocks }
func (handler *AccountHandler) RebuildAccountFiles() (rootBlocks []BlockSource) { // Open each dataset and check the chains dir, err := os.Open(filepath.Join(datDirectory, "account")) PanicOn(err) defer dir.Close() dirlist, err := dir.Readdir(-1) PanicOn(err) for _, info := range dirlist { // Clear all cached dataset information from the info files name := info.Name() if m, _ := filepath.Match("??????????????????????.info", name); m { // Read the accountNameH from the filename var accountNameH core.Byte128 { decoded, err := base64.RawURLEncoding.DecodeString(name[:22]) PanicOn(err) accountNameH.Set(decoded) } info := readInfoFile(accountNameH) if info != nil { info.Datasets = nil writeInfoFile(accountNameH, *info) } } } for _, info := range dirlist { name := info.Name() if m, _ := filepath.Match("??????????????????????.??????????????????????.trn", name); m { // Read the accountNameH from the filename var accountName string var accountNameH core.Byte128 { decoded, err := base64.RawURLEncoding.DecodeString(name[:22]) PanicOn(err) accountNameH.Set(decoded) info := readInfoFile(accountNameH) if info != nil { accountName = string(info.AccountName) } } datasetName := getDatasetNameFromFile(name) core.Log(core.LogDebug, "Regenerating file %s.db (%s.%s)", name[:45], accountName, datasetName) // Generate the DB file from transactions states := stateArrayFromTransactions(accountNameH, datasetName) sort.Sort(states) writeDBFile(accountNameH, datasetName, &dbStateCollection{States: states}) for _, e := range states { rootBlocks = append(rootBlocks, BlockSource{BlockID: e.State.BlockID, StateID: e.State.StateID, DatasetName: datasetName, AccountNameH: accountNameH, AccountName: string(accountName)}) } } } return rootBlocks }