コード例 #1
0
ファイル: context.go プロジェクト: DaviWei/utils-1
/*
MarshalJSON will recursively run any found `BeforeMarshal` functions on the content with arg and a stack of container instances, and then json marshal it.

It will not recurse down further after a BeforeMarshal function has been found, but it will run all top level BeforeMarshal functions that it finds.
*/
func (self *DefaultJSONContext) MarshalJSON(c interface{}, body interface{}, arg interface{}) (result []byte, err error) {
	// declare a function that recursively will run itself
	var runRecursive func(reflect.Value, reflect.Value) error

	cVal := reflect.ValueOf(c)
	contextType := reflect.TypeOf((*JSONContextLogger)(nil)).Elem()
	// initialize an empty container stack
	stackType := reflect.TypeOf([]interface{}{})

	// implement the function
	runRecursive = func(val reflect.Value, stack reflect.Value) error {
		// push this instance to the stack
		stack = reflect.Append(stack, val)

		// Try run BeforeMarshal
		fun := val.MethodByName("BeforeMarshal")
		if fun.IsValid() && !utils.IsNil(val.Interface()) {
			// make sure we don't run BeforeMarshal on any other things at the same time, at least in this context.
			return self.marshalSyncLock.Sync(val.Interface(), func() (err error) {
				// Validate BeforeMarshal takes something that implements JSONContextLogger
				if err = utils.ValidateFuncInput(fun.Interface(), []reflect.Type{contextType, stackType}); err != nil {
					if err = utils.ValidateFuncInput(fun.Interface(), []reflect.Type{contextType, stackType, reflect.TypeOf(arg)}); err != nil {
						return fmt.Errorf("BeforeMarshal needs to take an JSONContextLogger")
					}
				}

				// Validate BeforeMarshal returns an error
				if err = utils.ValidateFuncOutput(fun.Interface(), []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}); err != nil {
					return fmt.Errorf("BeforeMarshal needs to return an error")
				}

				args := []reflect.Value{cVal, stack}
				if fun.Type().NumIn() == 3 {
					args = append(args, reflect.ValueOf(arg))
				}
				timer := time.Now()

				// run the actual BeforeMarshal
				res := fun.Call(args)

				if time.Now().Sub(timer) > (500 * time.Millisecond) {
					self.Infof("BeforeMarshal for %s is slow, took: %v", val.Type(), time.Now().Sub(timer))
				}

				if !res[0].IsNil() {
					err = res[0].Interface().(error)
				}
				return
			})
		}

		// Try do recursion on these types, if we didn't find a BeforeMarshal func on the val itself
		switch val.Kind() {
		case reflect.Ptr, reflect.Interface:
			if val.IsNil() {
				return nil
			}
			return runRecursive(val.Elem(), stack)

		case reflect.Slice:
			for i := 0; i < val.Len(); i++ {
				if err := runRecursive(val.Index(i).Addr(), stack); err != nil {
					return err
				}
			}
			break

		case reflect.Struct:
			for i := 0; i < val.NumField(); i++ {
				if val.Type().Field(i).PkgPath == "" {
					if err := runRecursive(val.Field(i), stack); err != nil {
						return err
					}
				}
			}
			break
		}
		return nil
	}

	// Run recursive reflection on self.Body that executes BeforeMarshal on every object possible.
	stack := []interface{}{}
	if err = runRecursive(reflect.ValueOf(body), reflect.ValueOf(stack)); err != nil {
		return
	}

	// This makes sure that replies that returns a slice that is empty returns a '[]' instad of 'null'
	bodyVal := reflect.ValueOf(body)
	if bodyVal.Kind() == reflect.Slice && bodyVal.IsNil() {
		reflect.ValueOf(&body).Elem().Set(reflect.MakeSlice(bodyVal.Type(), 0, 0))
	}

	if result, err = json.MarshalIndent(body, "", "  "); err != nil {
		return
	}

	return
}
コード例 #2
0
ファイル: memcache.go プロジェクト: DaviWei/utils-1
/*
memoizeMulti will look for all provided keys, and load them into the destinationPointers.

Any missing values will be generated using the generatorFunctions and put in memcache with a duration timeout.

If cacheNil is true, nil results or memcache.ErrCacheMiss errors from the generator function will be cached.

It returns a slice of bools that show whether each value was found (either from memcache or from the genrator function).
*/
func memoizeMulti(
	c TransactionContext,
	keys []string,
	cacheNil bool,
	destinationPointers []interface{},
	generatorFunctions []func() (interface{}, time.Duration, error)) (errors appengine.MultiError) {

	// First generate memcache friendly key hashes from all the provided keys.
	keyHashes := make([]string, len(keys))
	for index, key := range keys {
		k, err := Keyify(key)
		if err != nil {
			errors = appengine.MultiError{err}
			return
		}
		keyHashes[index] = k
	}

	// Then, run a memGetMulti using these keys, and warn if it is slow.
	t := time.Now()
	var items []*memcache.Item
	items, errors = memGetMulti(c, keyHashes, destinationPointers)
	if d := time.Now().Sub(t); d > time.Millisecond*10 {
		c.Debugf("SLOW memGetMulti(%v): %v", keys, d)
	}

	// Create a channel to handle any panics produced by the concurrent code.
	panicChan := make(chan interface{}, len(items))

	// For all the items we tried to fetch...
	for i, item := range items {

		// set up variables to use in the iteration
		index := i
		err := errors[index]
		keyHash := keyHashes[index]
		destinationPointer := destinationPointers[index]
		if err == memcache.ErrCacheMiss {
			// for cache misses, background do..
			go func() (err error) {
				// defer fetching any panics and sending them to the panic channel
				defer func() {
					errors[index] = err
					if e := recover(); e != nil {
						c.Infof("Panic: %v", e)
						panicChan <- fmt.Errorf("%v\n%v", e, utils.Stack())
					} else {
						// no panics will send a nil, which is necessary since we wait for all goroutines to send SOMETHING on the channel
						panicChan <- nil
					}
				}()
				var result interface{}
				var duration time.Duration
				found := true
				// try to run the generator function
				if result, duration, err = generatorFunctions[index](); err != nil {
					if err != memcache.ErrCacheMiss {
						return
					} else {
						// ErrCacheMiss from the generator function means that we want the caller to think there is no data to return
						found = false
					}
				} else {
					// if there is no error, check if we got a nil
					found = !utils.IsNil(result)
					if !found {
						// if we did, we fake an ErrCacheMiss
						err = memcache.ErrCacheMiss
					}
				}
				// If we are not inside a transaction, we have to store the result in memcache
				if !c.InTransaction() && (found || cacheNil) {
					obj := result
					var flags uint32
					if !found {
						// if the generator responded with nil or a cache miss, flag this cache entry as a cache miss for future reference
						obj = reflect.Indirect(reflect.ValueOf(destinationPointer)).Interface()
						flags = nilCache
					}
					if err2 := codecSetWithRetry(c, Codec, &memcache.Item{
						Key:        keyHash,
						Flags:      flags,
						Object:     obj,
						Expiration: duration,
					}); err2 != nil {
						// We've successfully fetched the data, we failed storing it in memcache, log it and continue
						c.Errorf("Failed storing to memcache, %v", err2)
						return
					}
				}
				if found {
					// if we actually found something, copy the result to the destination
					utils.ReflectCopy(result, destinationPointer)
				}
				return
			}()
		} else if err != nil {
			// any errors will bubble up the panic channel
			panicChan <- nil
		} else {
			// if we FOUND something, but it was flagged as a cache miss, fake a cache miss
			if item.Flags&nilCache == nilCache {
				errors[index] = memcache.ErrCacheMiss
			}
			panicChan <- nil
		}
	}

	// collect any panics, and raise them if we found any
	panics := []interface{}{}
	for _, _ = range items {
		if e := <-panicChan; e != nil {
			panics = append(panics, e)
		}
	}
	if len(panics) > 0 {
		panic(panics)
	}
	return
}