func (s *bucketstore) compactSwapFile(bsf *bucketstorefile, compactPath string) error { idx, ver, err := parseStoreFileName(filepath.Base(bsf.path), STORE_FILE_SUFFIX) if err != nil { return err } nextName := makeStoreFileName(idx, ver+1, STORE_FILE_SUFFIX) nextPath := path.Join(filepath.Dir(bsf.path), nextName) if err = os.Rename(compactPath, nextPath); err != nil { return err } nextFile, err := fileService.OpenFile(nextPath, os.O_RDWR|os.O_CREATE) if err != nil { return err } nextBSF := NewBucketStoreFile(nextPath, nextFile, bsf.stats) nextStore, err := gkvlite.NewStore(nextBSF) if err != nil { // TODO: Rollback the previous *.orig rename. return err } nextBSF.store = nextStore atomic.StorePointer(&s.bsf, unsafe.Pointer(nextBSF)) bsf.apply(func() { bsf.purge = true // Mark the old file as purgable. }) return nil }
func (m *mainMenuUI) loadGame(name string) { var w World f, err := os.OpenFile(filepath.Join(SaveDirName, name+".sav"), os.O_RDWR, 0666) if err != nil { m.err = err.Error() m.state = menuStateError return } w.store, err = gkvlite.NewStore(f) if err != nil { m.err = err.Error() m.state = menuStateError f.Close() return } w.storeFile = f err = w.init() if err != nil { m.err = err.Error() m.state = menuStateError f.Close() return } worldLock.Lock() world = &w worldLock.Unlock() }
func initkv() { dir, _ := filepath.Abs(filepath.Dir(os.Args[0])) kvFile, _ = os.OpenFile(fmt.Sprintf("%s/database.gkvlite", dir), os.O_RDWR|os.O_CREATE, 0666) KV, _ = gkvlite.NewStore(kvFile) Users = KV.SetCollection("users", nil) Channels = KV.SetCollection("channels", nil) }
func TestDumpColls(t *testing.T) { x := 0 printf := func(format string, a ...interface{}) (n int, err error) { x++ return 0, nil } store, _ := gkvlite.NewStore(nil) c := store.SetCollection("test", nil) n, err := dumpColl(printf, c, "") if err != nil || n != 0 || x != 0 { t.Errorf("expected dumpColl on empty coll to work, got: %v, %v", n, err) } n, err = dumpCollAsItems(printf, c, "") if err != nil || n != 0 || x != 0 { t.Errorf("expected dumpCollAsItems on empty coll to work, got: %v, %v", n, err) } c.Set([]byte("test-key"), (&item{}).toValueBytes()) x = 0 n, err = dumpColl(printf, c, "") if err != nil || n != 1 || x != 1 { t.Errorf("expected dumpColl on 1-item coll to work, got: %v, %v", n, err) } x = 0 n, err = dumpCollAsItems(printf, c, "") if err != nil || n != 1 || x != 1 { t.Errorf("expected dumpCollAsItems on 1-item coll to work, got: %v, %v", n, err) } }
// Returns a gkvlite collection func getCacheStore() *gkvlite.Store { f, err := os.OpenFile(cachePath, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0600) if err != nil { log.Fatal(err) } s, err := gkvlite.NewStore(f) if err != nil { log.Fatal(err) } return s }
func newBucketStore(path string, settings BucketSettings) (res *bucketstore, err error) { var file FileLike if settings.MemoryOnly < MemoryOnly_LEVEL_PERSIST_NOTHING { file, err = fileService.OpenFile(path, os.O_RDWR|os.O_CREATE) if err != nil { fmt.Printf("!!!! %v\n", err) return nil, err } } bsf := NewBucketStoreFile(path, file, &BucketStoreStats{}) bsfForGKVLite := bsf if settings.MemoryOnly >= MemoryOnly_LEVEL_PERSIST_NOTHING { bsfForGKVLite = nil } store, err := gkvlite.NewStore(bsfForGKVLite) if err != nil { return nil, err } bsf.store = store var bsfMemoryOnly *bucketstorefile if settings.MemoryOnly > MemoryOnly_LEVEL_PERSIST_EVERYTHING { bsfMemoryOnly = NewBucketStoreFile(path, file, bsf.stats) bsfMemoryOnly.store, err = gkvlite.NewStore(nil) if err != nil { return nil, err } } return &bucketstore{ bsf: unsafe.Pointer(bsf), bsfMemoryOnly: bsfMemoryOnly, endch: make(chan bool), partitions: make(map[uint16]*partitionstore), stats: bsf.stats, }, nil }
func NewPster(dbpath string, errlog *log.Logger) (*SimplePster, error) { // {{{1 var store *gkvlite.Store file, err := os.OpenFile(dbpath, os.O_RDWR|os.O_CREATE|os.O_SYNC, 0660) if err != nil { return nil, err } store, err = gkvlite.NewStore(file) if err != nil { return nil, err } return &SimplePster{ file: file, store: store, rlog: store.SetCollection("rlog", nil), rfields: store.SetCollection("rfields", nil), err: errlog, }, nil }
func mainDo(args []string) error { if len(args) < 1 { return fmt.Errorf("missing gkvlite file arg") } fname, args := args[0], args[1:] f, err := os.Open(fname) if err != nil || f == nil { return fmt.Errorf("could not open file: %v", fname) } defer f.Close() s, err := gkvlite.NewStore(f) if err != nil || s == nil { return fmt.Errorf("could not create store from file: %v", fname) } cmd := "names" if len(args) > 0 { cmd, args = args[0], args[1:] } switch cmd { case "names": collNames := s.GetCollectionNames() for _, collName := range collNames { fmt.Printf("%s\n", collName) } case "items": if len(args) < 1 { return fmt.Errorf("missing 'items <collection-name>' param") } collName := args[0] coll := s.GetCollection(collName) if coll == nil { return fmt.Errorf("could not find collection: %v", collName) } return coll.VisitItemsAscendEx(nil, true, emitItem) default: return fmt.Errorf("unknown command: %v", cmd) } return nil }
func compress(fn string) (err error) { f, err := os.OpenFile(fn, os.O_RDWR, 0666) if err != nil { return err } defer func() { err_ := f.Close() if err == nil { err = err_ } }() tmpf, err := ioutil.TempFile("", "gkvlite") if err != nil { return err } defer os.Remove(tmpf.Name()) defer tmpf.Close() store, err := gkvlite.NewStore(f) if err != nil { return err } store2, err := store.CopyTo(tmpf, 1000000) if err != nil { return err } err = f.Truncate(0) if err != nil { return err } _, err = store2.CopyTo(f, 1000000) if err != nil { return err } return nil }
// NewBasicStore initializes BasicStore and creates Jobs collection // // It initializes data storage to a path passed in as an argument // It returns an error if the store could not be initialized or creation of jobs collection failed func NewBasicStore(fileName string) (*BasicStore, error) { file, err := os.OpenFile(fileName, os.O_RDWR|os.O_CREATE, 0600) if err != nil { return nil, err } s, err := gkvlite.NewStore(file) if err != nil { return nil, err } if s.GetCollection("jobs") == nil { s.SetCollection("jobs", nil) if err := s.Flush(); err != nil { return nil, err } } return &BasicStore{ store: s, }, nil }
func (s *bucketstore) compactGo(bsf *bucketstorefile, compactPath string) error { // TODO: Should cleanup all old, previous attempts to rescue disk space. os.Remove(compactPath) // Clean up any previous attempts. compactFile, err := fileService.OpenFile(compactPath, os.O_RDWR|os.O_CREATE|os.O_EXCL) if err != nil { return err } defer func() { if compactFile != nil { compactFile.Close() os.Remove(compactPath) } }() compactStore, err := gkvlite.NewStore(compactFile) if err != nil { return err } // TODO: Parametrize writeEvery. writeEvery := 1000 lastChanges := make(map[uint16]*gkvlite.Item) // Last items in changes colls. collNames := bsf.store.GetCollectionNames() // Names of collections to process. collRest := make([]string, 0, len(collNames)) // Names of unprocessed collections. vbids := make([]uint16, 0, len(collNames)) // VBucket id's that we processed. // Process compaction in a few steps: first, unlocked, // snapshot-based collection copying meant to handle most of each // vbucket's data; and then, locked copying of any vbucket // mutations (deltas) that happened in the meantime. Then, while // still holding all vbucket collection locks, we copy any // remaining non-vbucket collections and atomically swap the files. for _, collName := range collNames { if !strings.HasSuffix(collName, COLL_SUFFIX_CHANGES) { if !strings.HasSuffix(collName, COLL_SUFFIX_KEYS) { // It's neither a changes nor a keys collection. collRest = append(collRest, collName) } continue } vbid, lastChange, err := s.copyVBucketColls(bsf, collName, compactStore, writeEvery) if err != nil { return err } lastChanges[uint16(vbid)] = lastChange vbids = append(vbids, uint16(vbid)) } return s.copyBucketStoreDeltas(bsf, compactStore, vbids, 0, lastChanges, writeEvery, func() (err error) { // Copy any remaining (simple) collections (lifke COLL_VBMETA). err = s.copyRemainingColls(bsf, collRest, compactStore, writeEvery) if err != nil { return err } err = compactStore.Flush() if err != nil { return err } compactFile.Close() return s.compactSwapFile(bsf, compactPath) // The last step. }) }
func TestCollRangeCopy(t *testing.T) { s, _ := gkvlite.NewStore(nil) x := s.SetCollection("x", nil) testFillColl(x, []string{"b", "c", "d"}) d, _ := gkvlite.NewStore(nil) dx := d.SetCollection("dx", nil) err := collRangeCopy(x, dx, []byte("a"), []byte("a"), []byte("z")) testCheckColl(t, dx, "a", []string{"b", "c", "d"}, nil) d, _ = gkvlite.NewStore(nil) dx = d.SetCollection("dx", nil) err = collRangeCopy(x, dx, []byte("a"), []byte("a"), []byte("a")) if err != nil { t.Errorf("expected collRangeCopy to not have error") } testCheckColl(t, dx, "a", []string{}, nil) d, _ = gkvlite.NewStore(nil) dx = d.SetCollection("dx", nil) err = collRangeCopy(x, dx, []byte("z"), []byte("z"), []byte("a")) if err != nil { t.Errorf("expected collRangeCopy to not have error") } testCheckColl(t, dx, "a", []string{}, nil) d, _ = gkvlite.NewStore(nil) dx = d.SetCollection("dx", nil) err = collRangeCopy(x, dx, []byte("a"), []byte("a"), []byte("b")) if err != nil { t.Errorf("expected collRangeCopy to not have error") } testCheckColl(t, dx, "a", []string{}, nil) d, _ = gkvlite.NewStore(nil) dx = d.SetCollection("dx", nil) err = collRangeCopy(x, dx, []byte("d"), []byte("z"), []byte("z")) if err != nil { t.Errorf("expected collRangeCopy to not have error") } testCheckColl(t, dx, "a", []string{}, nil) d, _ = gkvlite.NewStore(nil) dx = d.SetCollection("dx", nil) err = collRangeCopy(x, dx, []byte("d"), []byte("d"), []byte("z")) if err != nil { t.Errorf("expected collRangeCopy to not have error") } testCheckColl(t, dx, "a", []string{"d"}, nil) d, _ = gkvlite.NewStore(nil) dx = d.SetCollection("dx", nil) err = collRangeCopy(x, dx, []byte("a"), []byte("c"), []byte("d")) if err != nil { t.Errorf("expected collRangeCopy to not have error") } testCheckColl(t, dx, "a", []string{"c"}, nil) d, _ = gkvlite.NewStore(nil) dx = d.SetCollection("dx", nil) err = collRangeCopy(x, dx, []byte("a"), []byte("b"), []byte("c")) if err != nil { t.Errorf("expected collRangeCopy to not have error") } testCheckColl(t, dx, "a", []string{"b"}, nil) }
func (m *mainMenuUI) inputKey(key termbox.Key, ch rune, mod termbox.Modifier) bool { switch m.state { case menuStateMain: switch { case key == termbox.KeyArrowDown: m.choiceIndex = (m.choiceIndex + 1) % (len(m.saveNames) + 1) case key == termbox.KeyArrowUp: m.choiceIndex = (m.choiceIndex + (len(m.saveNames) + 1 - 1)) % (len(m.saveNames) + 1) case key == termbox.KeyArrowLeft || key == termbox.KeyArrowRight: // silently ignore case key == termbox.KeyEnter: if m.choiceIndex < len(m.saveNames) { m.loadGame(m.saveNames[m.choiceIndex]) } else { switch m.choiceIndex - len(m.saveNames) { case 0: m.newGame() } } case key == termbox.KeyEsc: return false default: panic(fmt.Sprintf("%v, %v, %v", key, ch, mod)) } case menuStateError: switch { case key == termbox.KeyEsc: m.err = "" m.state = menuStateMain default: panic(fmt.Sprintf("%v, %v, %v", key, ch, mod)) } case menuStateNew: const fieldCount = 2 switch { case key == termbox.KeyEsc: m.state = menuStateMain m.choiceIndex = len(m.saveNames) case key == termbox.KeyEnter: m.choiceIndex++ if m.choiceIndex >= fieldCount { m.choiceIndex = fieldCount - 1 if len(m.saveName) == 0 { m.choiceIndex = 0 fmt.Print("\a") return true } _ = os.MkdirAll(SaveDirName, 0777) f, err := os.OpenFile(filepath.Join(SaveDirName, string(m.saveName)+".sav"), os.O_CREATE|os.O_EXCL|os.O_RDWR, 0666) var w *World if err == nil { var store *gkvlite.Store store, err = gkvlite.NewStore(f) w = &World{store: store, storeFile: f} } if err == nil { err = w.setSeed(NewSeed(string(m.seed))) } if err == nil { err = w.init() } if err == nil { m.saveNames = append(m.saveNames, string(m.saveName)) worldLock.Lock() world = w worldLock.Unlock() } else { if f != nil { f.Close() } m.err = err.Error() m.state = menuStateError } } case key == termbox.KeyArrowDown: m.choiceIndex = (m.choiceIndex + 1) % fieldCount case key == termbox.KeyArrowUp: m.choiceIndex = (m.choiceIndex + (fieldCount - 1)) % fieldCount case key == termbox.KeyArrowLeft || key == termbox.KeyArrowRight: fmt.Print("\a") case m.choiceIndex == 0 && (key == termbox.KeyBackspace || key == termbox.KeyBackspace2): if len(m.saveName) == 0 { fmt.Print("\a") } else { m.saveName = m.saveName[:len(m.saveName)-1] } case m.choiceIndex == 0 && key == termbox.KeySpace: if len(m.saveName) == 0 { fmt.Print("\a") } else { m.saveName = append(m.saveName, ' ') } case m.choiceIndex == 0 && ch != 0: // unicode.Punctuation is not included due to characters like / if ch != '_' && ch != '-' && !unicode.In(ch, unicode.Letter, unicode.Number, unicode.Symbol) { fmt.Print("\a") } else { m.saveName = append(m.saveName, ch) } case m.choiceIndex == 1 && (key == termbox.KeyBackspace || key == termbox.KeyBackspace2): if len(m.seed) == 0 { fmt.Print("\a") } else { m.seed = m.seed[:len(m.seed)-1] } case m.choiceIndex == 1 && key == termbox.KeySpace: m.seed = append(m.seed, ' ') case m.choiceIndex == 1 && ch != 0: m.seed = append(m.seed, ch) default: panic(fmt.Sprintf("%v, %v, %v", key, ch, mod)) } default: panic(fmt.Sprintf("%v, %v, %v", key, ch, mod)) } return true }
func main() { //var start string //flag.StringVar(&start, "start", "", "starting url") //flag.Parse() flag.Parse() if len(flag.Args()) < 2 { log.Fatal("Usage: mirror start-url credentials") } start := flag.Args()[0] username, password := Split2(flag.Args()[1], ":") // http://stackoverflow.com/questions/18414212/golang-how-to-follow-location-with-cookie options := cookiejar.Options{} cookie_jar, err := cookiejar.New(&options) client := &http.Client{ Jar: cookie_jar, } v := url.Values{"user": {username}, "passwrd": {password}} req, err := http.NewRequest("POST", start+"index.php?action=login2", bytes.NewReader([]byte(v.Encode()))) // ugh req.Header.Add("Content-Type", "application/x-www-form-urlencoded") req.Header.Add("Accept-Encoding", "identity") pres, err := client.Do(req) if err != nil { log.Fatal(err) } contents, err := ioutil.ReadAll(pres.Body) if !strings.Contains(string(contents), "action=unread") { log.Fatal("Login did not work") } f, err := os.OpenFile("smfmirror.gkvlite", os.O_RDWR, 0666) if err != nil { f, err = os.Create("smfmirror.gkvlite") } if err != nil { panic(err) } s, err := gkvlite.NewStore(f) if err != nil { panic(err) } c := s.GetCollection("smfmirror") if c == nil { c = s.SetCollection("smfmirror", nil) } pboard_links, err := c.Get([]byte("board_links")) var board_links []string if pboard_links != nil { err := json.Unmarshal(pboard_links, &board_links) if err != nil { panic(err) } } else { board_links = loadBoardLinks(client, start) b, err := json.Marshal(board_links) if err != nil { panic(err) } //c.Set([]byte("hello"), []byte("world")) //a, x := c.Get([]byte("hello")) //fmt.Printf("%v %v\n", a, x) c.Set([]byte("board_links"), b) s.Flush() s.Close() f.Sync() f.Close() } for _, url := range board_links { fmt.Printf("%v\n", url) } //fmt.Printf("%v\n", board_links) }