Пример #1
0
func (dbm *DBManager) run() {

	for i, db := range dbm.dbs {

		go func(db *DB, comChan chan com.Command) {

			var l int
			var args []string
			var command com.Command

			for {

				command = <-comChan
				l = len(command.Args)

				args = command.Args[1:l]

				db_func, ok := dbm.funcs[strings.ToLower(command.Args[0])]
				if ok {
					if str, err := db_func.function(db, args); err != nil {
						command.ReplyChan <- fmt.Sprintf("-ERR %s\r\n", err)
					} else {
						command.ReplyChan <- str
					}
					if dbm.PersistChan != nil && db_func.sideEffects {
						dbm.PersistChan <- command.CommandRaw
					}
				} else {
					command.ReplyChan <- "-ERR Unknown command\r\n"
				}

			}

		}(db, dbm.dbChans[i])

	}

	var l int
	var command com.Command

	for {
		command = <-dbm.ComChan
		l = len(command.Args)

		bucket := 0
		if l > 1 {
			bucket = murmur.HashBucket(command.Args[1], dbm.NumDBs)
		}

		dbm.dbChans[bucket] <- command
	}
}
Пример #2
0
func (dbm *DBManager) LoadFromGobDecoder(db_dec *gob.Decoder) error {

	var storeLen int
	err := db_dec.Decode(&storeLen)
	if err != nil {
		panic(err)
	}

	for i := 0; i < storeLen; i++ {

		var key string
		var elem *data.Entry
		err = db_dec.Decode(&key)

		if err != nil {

			if err == io.EOF {

				for _, value := range dbm.dbs[0].Store {

					for _, db := range dbm.dbs {
						db.lastDump = value.LastDump()
					}
					break

				}
				break

			} else {
				panic(err)
			}

		}

		err = db_dec.Decode(&elem)
		if err != nil {
			panic(err)
		}

		bucket := murmur.HashBucket(key, dbm.NumDBs)
		dbm.dbs[bucket].StoreSet(key, elem)

	}
	return nil
}
Пример #3
0
// SaveToDiskAsync asynchronously save the current database to disk. It
// use gob encoding to sequentially dump each key. It listens to an internal
// dump channel that allows other DB methods to either force the dump of a key
// so that it can be modified during a save or to block execution of the loop
// so that the underlying map can be modified via store set.
func (db *DB) SaveToDiskAsync(db_enc *gob.Encoder, bucketsToDump map[uint16]uint8) error {

	if !db.InSaveMode {
		return nil
	}

	var err error
	for key, elem := range db.Store {
		if elem.LastDump() == db.lastDump {
			continue
		}
		value := db.Store[key]
		for {
			escape := false
			select {
			case dump_key := <-db.dumpKeyChan:

				if bucketsToDump != nil {
					bucket := murmur.HashBucket(dump_key, 16384)
					if _, ok := bucketsToDump[uint16(bucket)]; !ok {
						db.dumpKeyChan <- ""
						continue
					}
				}

				dump_value, ok := db.Store[dump_key]
				if ok && dump_value.LastDump() != db.lastDump {
					dump_value.SetLastDump(db.lastDump)
					err = db_enc.Encode(dump_key)
					if err != nil {
						log.Fatal("encode error:", err)
						return err
					}
					err = db_enc.Encode(dump_value)
					if err != nil {
						log.Fatal("encode error:", err)
						return err
					}
				}
				db.dumpKeyChan <- ""
			default:

				if bucketsToDump != nil {
					bucket := murmur.HashBucket(key, 16384)
					if _, ok := bucketsToDump[uint16(bucket)]; !ok {
						escape = true
						break
					}
				}

				var ok bool
				value, ok = db.Store[key]
				if ok && value.LastDump() != db.lastDump {
					value.SetLastDump(db.lastDump)
					err = db_enc.Encode(key)
					if err != nil {
						log.Fatal("encode error:", err)
						return err
					}
					err = db_enc.Encode(value)
					if err != nil {
						log.Fatal("encode error:", err)
						return err
					}
				}
				escape = true
			}
			if escape {
				break
			}
		}
	}

	return nil

}