// Attempts to retrieve ancestor attachments for a document. First attempts to find and use a non-pruned ancestor. // If no non-pruned ancestor is available, checks whether the currently active doc has a common ancestor with the new revision. // If it does, can use the attachments on the active revision with revpos earlier than that common ancestor. func (db *Database) retrieveAncestorAttachments(doc *document, parentRev string, docHistory []string) map[string]interface{} { var parentAttachments map[string]interface{} // Attempt to find a non-pruned parent or ancestor parent, _ := db.getAvailableRev(doc, parentRev) if parent != nil { parentAttachments, _ = parent["_attachments"].(map[string]interface{}) } else { // No non-pruned ancestor is available commonAncestor := doc.History.findAncestorFromSet(doc.CurrentRev, docHistory) if commonAncestor != "" { parentAttachments = make(map[string]interface{}) commonAncestorGen, _ := base.ToInt64(genOfRevID(commonAncestor)) for name, activeAttachment := range BodyAttachments(doc.body) { attachmentMeta, ok := activeAttachment.(map[string]interface{}) if ok { activeRevpos, ok := base.ToInt64(attachmentMeta["revpos"]) if ok && activeRevpos <= commonAncestorGen { parentAttachments[name] = activeAttachment } } } } } return parentAttachments }
// Splits out the "start" and "ids" properties from encoded revision list func splitRevisionList(revisions Body) (int, []string) { start, ok := base.ToInt64(revisions["start"]) digests, _ := GetStringArrayProperty(revisions, "ids") if ok && len(digests) > 0 && int(start) >= len(digests) { return int(start), digests } else { return 0, nil } }
// Goes through a revisions '_attachments' map, loads attachments (by their 'digest' properties) // and adds 'data' properties containing the data. The data is added as raw []byte; the JSON // marshaler will convert that to base64. // If minRevpos is > 0, then only attachments that have been changed in a revision of that // generation or later are loaded. func (db *Database) loadBodyAttachments(body Body, minRevpos int) (Body, error) { body = body.ImmutableAttachmentsCopy() for _, value := range BodyAttachments(body) { meta := value.(map[string]interface{}) revpos, ok := base.ToInt64(meta["revpos"]) if ok && revpos >= int64(minRevpos) { key := AttachmentKey(meta["digest"].(string)) data, err := db.GetAttachment(key) if err != nil { return nil, err } meta["data"] = data delete(meta, "stub") } } return body, nil }
// Given a CouchDB document body about to be stored in the database, goes through the _attachments // dict, finds attachments with inline bodies, copies the bodies into the Couchbase db, and replaces // the bodies with the 'digest' attributes which are the keys to retrieving them. func (db *Database) storeAttachments(doc *document, body Body, generation int, parentRev string) error { var parentAttachments map[string]interface{} atts := BodyAttachments(body) if atts == nil && body["_attachments"] != nil { return base.HTTPErrorf(400, "Invalid _attachments") } for name, value := range atts { meta, ok := value.(map[string]interface{}) if !ok { return base.HTTPErrorf(400, "Invalid _attachments") } data, exists := meta["data"] if exists { // Attachment contains data, so store it in the db: attachment, err := decodeAttachment(data) if err != nil { return err } key, err := db.setAttachment(attachment) if err != nil { return err } newMeta := map[string]interface{}{ "stub": true, "digest": string(key), "revpos": generation, } if contentType, ok := meta["content_type"].(string); ok { newMeta["content_type"] = contentType } if encoding := meta["encoding"]; encoding != nil { newMeta["encoding"] = encoding newMeta["encoded_length"] = len(attachment) if length, ok := meta["length"].(float64); ok { newMeta["length"] = length } } else { newMeta["length"] = len(attachment) } atts[name] = newMeta } else { // Attachment must be a stub that repeats a parent attachment if meta["stub"] != true { return base.HTTPErrorf(400, "Missing data of attachment %q", name) } if revpos, ok := base.ToInt64(meta["revpos"]); !ok || revpos < 1 { return base.HTTPErrorf(400, "Missing/invalid revpos in stub attachment %q", name) } // Try to look up the attachment in the parent revision: if parentAttachments == nil { if parent, _ := db.getAvailableRev(doc, parentRev); parent != nil { parentAttachments, _ = parent["_attachments"].(map[string]interface{}) } } if parentAttachments != nil { if parentAttachment := parentAttachments[name]; parentAttachment != nil { atts[name] = parentAttachment } } else if meta["digest"] == nil { return base.HTTPErrorf(400, "Missing digest in stub attachment %q", name) } } } return nil }
func ReadMultipartDocument(reader *multipart.Reader) (Body, error) { // First read the main JSON document body: mainPart, err := reader.NextPart() if err != nil { return nil, err } var body Body err = ReadJSONFromMIME(http.Header(mainPart.Header), mainPart, &body) mainPart.Close() if err != nil { return nil, err } // Collect the attachments with a "follows" property, which will appear as MIME parts: followingAttachments := map[string]map[string]interface{}{} for name, value := range BodyAttachments(body) { if meta := value.(map[string]interface{}); meta["follows"] == true { followingAttachments[name] = meta } } // Subroutine to look up a following attachment given its digest. (I used to precompute a // map from digest->name, which was faster, but that broke down if there were multiple // attachments with the same contents! See #96) findFollowingAttachment := func(withDigest string) (string, map[string]interface{}) { for name, meta := range followingAttachments { if meta["follows"] == true { if digest, ok := meta["digest"].(string); ok && digest == withDigest { return name, meta } } } return "", nil } // Read the parts one by one: for i := 0; i < len(followingAttachments); i++ { part, err := reader.NextPart() if err != nil { if err == io.EOF { err = base.HTTPErrorf(http.StatusBadRequest, "Too few MIME parts: expected %d attachments, got %d", len(followingAttachments), i) } return nil, err } data, err := ioutil.ReadAll(part) part.Close() if err != nil { return nil, err } // Look up the attachment by its digest: digest := sha1DigestKey(data) name, meta := findFollowingAttachment(digest) if meta == nil { name, meta = findFollowingAttachment(md5DigestKey(data)) if meta == nil { return nil, base.HTTPErrorf(http.StatusBadRequest, "MIME part #%d doesn't match any attachment", i+2) } } length, ok := base.ToInt64(meta["encoded_length"]) if !ok { length, ok = base.ToInt64(meta["length"]) } if ok { if length != int64(len(data)) { return nil, base.HTTPErrorf(http.StatusBadRequest, "Attachment length mismatch for %q: read %d bytes, should be %g", name, len(data), length) } } // Stuff the data into the attachment metadata and remove the "follows" property: delete(meta, "follows") meta["data"] = data meta["digest"] = digest } // Make sure there are no unused MIME parts: if _, err = reader.NextPart(); err != io.EOF { if err == nil { err = base.HTTPErrorf(http.StatusBadRequest, "Too many MIME parts (expected %d)", len(followingAttachments)+1) } return nil, err } return body, nil }