Exemplo n.º 1
0
func (m *Multi) writeChunks(key string, data []byte, sha [32]byte, prefixid [16]byte) (*meta.File, error) {
	m.mu.Lock()
	conf := m.config
	m.mu.Unlock()

	stores, err := m.orderTargets()
	if err != nil {
		return nil, err
	}

	mapping, all := gf.MapToGF(data)
	parts := splitVector(all, conf.Need)
	parityParts := make([][]uint32, conf.Total-conf.Need)
	for i := range parityParts {
		parityParts[i] = rs.CreateParity(parts, i+len(parts), nil)
	}
	parts = append(parts, parityParts...)

	file := &meta.File{
		Path:         key,
		Size:         uint64(len(data)),
		WriteTime:    time.Now().Unix(),
		PrefixID:     prefixid,
		DataChunks:   uint16(conf.Need),
		MappingValue: mapping,
		SHA256:       sha,
	}

	storeCh := make(chan store.Store, len(stores))
	for _, st := range stores {
		storeCh <- st
	}
	close(storeCh)

	file.Locations = make([][16]byte, len(parts))
	errs := make(chan error)
	for i, part := range parts {
		go func(i int, part []uint32) {
			data := gf.MapFromGF(mapping, part)
			localKey := localKeyFor(file, i)
			dataV := store.DataV(data)
			for st := range storeCh {
				err := st.CAS(localKey, store.AnyV, dataV, nil)
				if err != nil {
					// TODO: log
					continue
				}

				file.Locations[i] = st.UUID()
				errs <- nil
				return
			}
			errs <- ErrInsufficientStores
		}(i, part)
	}

	var theError error
	for range parts {
		err := <-errs
		if err != nil && theError == nil {
			theError = err
		}
	}

	if theError != nil {
		// attempt to clean up any parts we wrote
		for i := range parts {
			st := m.finder.StoreFor(file.Locations[i])
			if st != nil {
				localKey := localKeyFor(file, i)
				st.CAS(localKey, store.AnyV, store.MissingV, nil)
			}
		}

		return nil, theError
	}

	return file, nil
}
Exemplo n.º 2
0
func (m *Multi) reconstruct(f *meta.File, opts store.GetOptions) ([]byte, error) {
	chunkData := m.getChunkData(f, opts)

	select {
	case <-opts.Cancel:
		return nil, store.ErrCancelled
	default:
	}

	rawDataAvailable := f.MappingValue == 0
	if rawDataAvailable {
		for i := 0; i < int(f.DataChunks); i++ {
			if chunkData[i] == nil {
				rawDataAvailable = false
				break
			}
		}
	}

	data := make([]byte, 0, int(f.Size)+16)
	if rawDataAvailable {
		// fast path:
		// - chunkData[0..f.DataChunks-1] are non-nil
		// - f.MappingValue == 0

		// TODO: fast path when f.MappingValue != 0
		for i := 0; i < int(f.DataChunks); i++ {
			data = append(data, chunkData[i]...)
		}
		data = data[:int(f.Size)]
	} else {
		// slow path: full reconstruction

		indicies := make([]int, 0, len(chunkData))
		chunks := make([][]uint32, 0, len(chunkData))
		for i, data := range chunkData {
			if data == nil {
				continue
			}
			chunk := gf.MapToGFWith(data, f.MappingValue)

			indicies = append(indicies, i)
			chunks = append(chunks, chunk)
		}

		if len(chunks) < int(f.DataChunks) {
			return nil, ErrInsufficientChunks
		}

		chunks = chunks[:int(f.DataChunks)]
		indicies = indicies[:int(f.DataChunks)]

		dataVecs := rs.RecoverData(chunks, indicies)
		for _, vec := range dataVecs {
			data = append(data, gf.MapFromGF(f.MappingValue, vec)...)
		}
		data = data[:int(f.Size)]
	}

	if !opts.NoVerify {
		have := sha256.Sum256(data)
		if have != f.SHA256 {
			return nil, ErrBadHash
		}
	}

	return data, nil
}