Ejemplo n.º 1
0
// Goes through a revisions '_attachments' map, loads attachments (by their 'digest' properties)
// and adds 'data' properties containing the data. The data is added as raw []byte; the JSON
// marshaler will convert that to base64.
// If minRevpos is > 0, then only attachments that have been changed in a revision of that
// generation or later are loaded.
func (db *Database) loadBodyAttachments(body Body, minRevpos int) error {
	for _, value := range BodyAttachments(body) {
		meta := value.(map[string]interface{})
		revpos, ok := base.ToInt64(meta["revpos"])
		if ok && revpos >= int64(minRevpos) {
			key := AttachmentKey(meta["digest"].(string))
			data, err := db.GetAttachment(key)
			if err != nil {
				return err
			}
			meta["data"] = data
			delete(meta, "stub")
		}
	}
	return nil
}
Ejemplo n.º 2
0
func ReadMultipartDocument(reader *multipart.Reader) (Body, error) {
	// First read the main JSON document body:
	mainPart, err := reader.NextPart()
	if err != nil {
		return nil, err
	}
	var body Body
	err = ReadJSONFromMIME(http.Header(mainPart.Header), mainPart, &body)
	mainPart.Close()
	if err != nil {
		return nil, err
	}

	// Collect the attachments with a "follows" property, which will appear as MIME parts:
	followingAttachments := map[string]map[string]interface{}{}
	for name, value := range BodyAttachments(body) {
		if meta := value.(map[string]interface{}); meta["follows"] == true {
			followingAttachments[name] = meta
		}
	}

	// Subroutine to look up a following attachment given its digest. (I used to precompute a
	// map from digest->name, which was faster, but that broke down if there were multiple
	// attachments with the same contents! See #96)
	findFollowingAttachment := func(withDigest string) (string, map[string]interface{}) {
		for name, meta := range followingAttachments {
			if meta["follows"] == true {
				if digest, ok := meta["digest"].(string); ok && digest == withDigest {
					return name, meta
				}
			}
		}
		return "", nil
	}

	// Read the parts one by one:
	for i := 0; i < len(followingAttachments); i++ {
		part, err := reader.NextPart()
		if err != nil {
			if err == io.EOF {
				err = base.HTTPErrorf(http.StatusBadRequest,
					"Too few MIME parts: expected %d attachments, got %d",
					len(followingAttachments), i)
			}
			return nil, err
		}
		data, err := ioutil.ReadAll(part)
		part.Close()
		if err != nil {
			return nil, err
		}

		// Look up the attachment by its digest:
		digest := sha1DigestKey(data)
		name, meta := findFollowingAttachment(digest)
		if meta == nil {
			name, meta = findFollowingAttachment(md5DigestKey(data))
			if meta == nil {
				return nil, base.HTTPErrorf(http.StatusBadRequest,
					"MIME part #%d doesn't match any attachment", i+2)
			}
		}

		length, ok := base.ToInt64(meta["encoded_length"])
		if !ok {
			length, ok = base.ToInt64(meta["length"])
		}
		if ok {
			if length != int64(len(data)) {
				return nil, base.HTTPErrorf(http.StatusBadRequest, "Attachment length mismatch for %q: read %d bytes, should be %g", name, len(data), length)
			}
		}

		// Stuff the data into the attachment metadata and remove the "follows" property:
		delete(meta, "follows")
		meta["data"] = data
		meta["digest"] = digest
	}

	// Make sure there are no unused MIME parts:
	if _, err = reader.NextPart(); err != io.EOF {
		if err == nil {
			err = base.HTTPErrorf(http.StatusBadRequest,
				"Too many MIME parts (expected %d)", len(followingAttachments)+1)
		}
		return nil, err
	}

	return body, nil
}