示例#1
0
func (node *Node) Delete() (err error) {
	// check to make sure this node isn't referenced by a vnode
	virtualNodes := Nodes{}
	if _, err = dbFind(bson.M{"virtual_parts": node.Id}, &virtualNodes, nil); err != nil {
		return err
	}
	if len(virtualNodes) != 0 {
		return errors.New(e.NodeReferenced)
	}

	// Check to see if this node has a data file and if it's referenced by another node.
	// If it is, we will move the data file to the first node we find, and point all other nodes to that node's path
	dataFilePath := fmt.Sprintf("%s/%s.data", getPath(node.Id), node.Id)
	dataFileExists := true
	if _, ferr := os.Stat(dataFilePath); os.IsNotExist(ferr) {
		dataFileExists = false
	}
	newDataFilePath := ""
	copiedNodes := Nodes{}
	if _, err = dbFind(bson.M{"file.path": dataFilePath}, &copiedNodes, nil); err != nil {
		return err
	}
	if len(copiedNodes) != 0 && dataFileExists {
		for index, copiedNode := range copiedNodes {
			if index == 0 {
				newDataFilePath = fmt.Sprintf("%s/%s.data", getPath(copiedNode.Id), copiedNode.Id)
				if rerr := os.Rename(dataFilePath, newDataFilePath); rerr != nil {
					if _, cerr := util.CopyFile(dataFilePath, newDataFilePath); cerr != nil {
						return errors.New("This node has a data file linked to another node and the data file could not be copied elsewhere to allow for node deletion.")
					}
				}
				copiedNode.File.Path = ""
				copiedNode.Save()
			} else {
				copiedNode.File.Path = newDataFilePath
				copiedNode.Save()
			}
		}
	}

	if err = dbDelete(bson.M{"id": node.Id}); err != nil {
		return err
	}
	return node.Rmdir()
}
示例#2
0
文件: update.go 项目: MG-RAST/Shock
//Modification functions
func (node *Node) Update(params map[string]string, files FormFiles) (err error) {
	// Exclusive conditions
	// 1.1. has files[upload] (regular upload)
	// 1.2. has files[gzip] (compressed upload)
	// 1.3. has files[bzip2] (compressed upload)
	// 2. has params[parts] (partial upload support)
	// 3. has params[type] & params[source] (v_node)
	// 4. has params[path] (set from local path)
	// 5. has params[copy_data] (create node by copying data from another node)
	// 6. has params[parent_node] (create node by specifying subset of records in a parent node)
	//
	// All condition allow setting of attributes
	//
	// Note that all paths for node operations in this function must end with "err = node.Save()" to save node state.

	for _, u := range util.ValidUpload {
		if _, uploadMisplaced := params[u]; uploadMisplaced {
			return errors.New(fmt.Sprintf("%s form field must be file encoded", u))
		}
	}

	isRegularUpload := false
	uploadFile := ""
	uploadCount := 0
	for _, u := range util.ValidUpload {
		if _, hasRegularUpload := files[u]; hasRegularUpload {
			isRegularUpload = true
			uploadFile = u
			uploadCount += 1
		}
	}
	if uploadCount > 1 {
		return errors.New("only one upload file allowed")
	}

	isUrlUpload := false
	if _, hasUrlUpload := files["upload_url"]; hasUrlUpload {
		isUrlUpload = true
	}

	_, isPartialUpload := params["parts"]
	hasPartsFile := false
	for key, _ := range files {
		if _, errf := strconv.Atoi(key); errf == nil {
			hasPartsFile = true
		}
	}

	isVirtualNode := false
	if t, hasType := params["type"]; hasType && t == "virtual" {
		isVirtualNode = true
	}
	_, isPathUpload := params["path"]
	_, isCopyUpload := params["copy_data"]
	_, isSubsetUpload := params["parent_node"]

	// Check exclusive conditions
	if isRegularUpload && (isUrlUpload || isPartialUpload || isPathUpload || isVirtualNode || isCopyUpload || isSubsetUpload) {
		return errors.New("upload parameter incompatible with upload_url, parts, path, type, copy_data and/or parent_node parameter(s)")
	} else if isUrlUpload && (isRegularUpload || isPartialUpload || isPathUpload || isVirtualNode || isCopyUpload || isSubsetUpload) {
		return errors.New("upload_url parameter incompatible with upload, parts, path, type, copy_data and/or parent_node parameter(s)")
	} else if isPartialUpload && (isVirtualNode || isPathUpload || isCopyUpload || isSubsetUpload) {
		return errors.New("parts parameter incompatible with type, path, copy_data and/or parent_node parameter(s)")
	} else if isVirtualNode && (isPathUpload || isCopyUpload || isSubsetUpload) {
		return errors.New("type parameter incompatible with path, copy_data and/or parent_node parameter")
	} else if isPathUpload && (isCopyUpload || isSubsetUpload) {
		return errors.New("path parameter incompatible with copy_data and/or parent_node parameter")
	} else if isCopyUpload && isSubsetUpload {
		return errors.New("copy_data parameter incompatible with parent_node parameter")
	} else if hasPartsFile && (isRegularUpload || isUrlUpload) {
		return errors.New("parts file and upload or upload_url parameters are incompatible")
	} else if (node.Type == "parts") && (isRegularUpload || isUrlUpload) {
		return errors.New("parts node and upload or upload_url parameters are incompatible")
	} else if isPartialUpload && hasPartsFile {
		return errors.New("can not upload parts file when creating parts node")
	}

	// Check if immutable
	if node.HasFile() && (isRegularUpload || isUrlUpload || isPartialUpload || hasPartsFile || isVirtualNode || isPathUpload || isCopyUpload || isSubsetUpload) {
		return errors.New(e.FileImut)
	}

	if isRegularUpload {
		if err = node.SetFile(files[uploadFile]); err != nil {
			return err
		}
		delete(files, uploadFile)
	} else if isUrlUpload {
		if err = node.SetFile(files["upload_url"]); err != nil {
			return err
		}
		delete(files, "upload_url")
	} else if isPartialUpload {
		// close variable length parts
		if params["parts"] == "close" {
			if (node.Type != "parts") || (node.Parts == nil) || !node.Parts.VarLen {
				return errors.New("can only call 'close' on unknown parts node")
			}
			// we do a node level lock here incase its processing a part
			// Refresh parts information after locking, before saving.
			LockMgr.LockNode(node.Id)
			n, err := Load(node.Id)
			if err != nil {
				LockMgr.UnlockNode(node.Id)
				return err
			}
			node.Parts = n.Parts
			// closeParts removes node id from LockMgr, no need unlock
			if err = node.closeParts(true); err != nil {
				return err
			}
		} else if (node.Parts != nil) && (node.Parts.VarLen || node.Parts.Count > 0) {
			return errors.New("parts already set")
		} else {
			// set parts struct
			var compressionFormat string = ""
			if compress, ok := params["compression"]; ok {
				if archive.IsValidUncompress(compress) {
					compressionFormat = compress
				}
			}
			if params["parts"] == "unknown" {
				// initParts adds node id to LockMgr
				if err = node.initParts("unknown", compressionFormat); err != nil {
					return err
				}
			} else {
				n, err := strconv.Atoi(params["parts"])
				if err != nil {
					return errors.New("parts must be an integer or 'unknown'")
				}
				if n < 1 {
					return errors.New("parts cannot be less than 1")
				}
				// initParts adds node id to LockMgr
				if err = node.initParts(params["parts"], compressionFormat); err != nil {
					return err
				}
			}
		}
	} else if isVirtualNode {
		node.Type = "virtual"
		if source, hasSource := params["source"]; hasSource {
			ids := strings.Split(source, ",")
			node.addVirtualParts(ids)
		} else {
			return errors.New("type virtual requires source parameter")
		}
	} else if isPathUpload {
		if action, hasAction := params["action"]; !hasAction || (action != "copy_file" && action != "move_file" && action != "keep_file") {
			return errors.New("path upload requires action field equal to copy_file, move_file or keep_file")
		}
		localpaths := strings.Split(conf.PATH_LOCAL, ",")
		if len(localpaths) <= 0 {
			return errors.New("local files path uploads must be configured. Please contact your Shock administrator.")
		}
		var success = false
		for _, p := range localpaths {
			if strings.HasPrefix(params["path"], p) {
				if err = node.SetFileFromPath(params["path"], params["action"]); err != nil {
					return err
				} else {
					success = true
				}
			}
		}
		if !success {
			return errors.New("file not in local files path. Please contact your Shock administrator.")
		}
	} else if isCopyUpload {
		var n *Node
		n, err = Load(params["copy_data"])
		if err != nil {
			return err
		}

		if n.File.Virtual {
			return errors.New("copy_data parameter points to a virtual node, invalid operation.")
		}

		// Copy node file information
		node.File.Name = n.File.Name
		node.File.Size = n.File.Size
		node.File.Checksum = n.File.Checksum
		node.File.Format = n.File.Format
		node.File.CreatedOn = time.Now()

		if n.Type == "subset" {
			node.Subset = n.Subset
			subsetIndexFile := n.Path() + "/" + n.Id + ".subset.idx"
			// The subset index file is required for creating a copy of a subset node.
			if _, err := os.Stat(subsetIndexFile); err == nil {
				if _, cerr := util.CopyFile(subsetIndexFile, node.Path()+"/"+node.Id+".subset.idx"); cerr != nil {
					return cerr
				}
			} else {
				return err
			}
			node.Type = "subset"
		} else {
			node.Type = "copy"
		}

		// Copy node attributes
		if _, copyAttributes := params["copy_attributes"]; copyAttributes {
			node.Attributes = n.Attributes
		}

		// Copy node indexes
		if _, copyIndex := params["copy_indexes"]; copyIndex && (len(n.Indexes) > 0) {
			// loop through parent indexes
			for idxType, idxInfo := range n.Indexes {
				parentFile := n.IndexPath() + "/" + idxType + ".idx"
				if _, err := os.Stat(parentFile); err == nil {
					// copy file if exists
					if _, cerr := util.CopyFile(parentFile, node.IndexPath()+"/"+idxType+".idx"); cerr != nil {
						return cerr
					}
				}
				// copy index struct
				if err := node.SetIndexInfo(idxType, idxInfo); err != nil {
					return err
				}
			}
		} else if sizeIndex, exists := n.Indexes["size"]; exists {
			// just copy size index
			if err := node.SetIndexInfo("size", sizeIndex); err != nil {
				return err
			}
		}

		if n.File.Path == "" {
			node.File.Path = fmt.Sprintf("%s/%s.data", getPath(params["copy_data"]), params["copy_data"])
		} else {
			node.File.Path = n.File.Path
		}

		if err = node.Save(); err != nil {
			return err
		}
	} else if isSubsetUpload {
		fInfo, statErr := os.Stat(files["subset_indices"].Path)
		if statErr != nil {
			return errors.New("Could not stat uploaded subset_indices file.")
		}
		node.Type = "subset"

		if fInfo.Size() == 0 {
			// if upload file is empty, make a basic node with empty file
			if err = node.SetFile(files["subset_indices"]); err != nil {
				return err
			}
			delete(files, "subset_indices")
		} else {
			// process subset upload
			_, hasParentIndex := params["parent_index"]
			if !hasParentIndex {
				return errors.New("parent_index is a required parameter for creating a subset node.")
			}

			var n *Node
			n, err = Load(params["parent_node"])
			if err != nil {
				return err
			}

			if n.File.Virtual {
				return errors.New("parent_node parameter points to a virtual node, invalid operation.")
			}

			if _, indexExists := n.Indexes[params["parent_index"]]; !indexExists {
				return errors.New("Index '" + params["parent_index"] + "' does not exist for parent node.")
			}

			parentIndexFile := n.IndexPath() + "/" + params["parent_index"] + ".idx"
			if _, statErr := os.Stat(parentIndexFile); statErr != nil {
				return errors.New("Could not stat index file for parent node where parent node = '" + params["parent_node"] + "' and index = '" + params["parent_index"] + "'.")
			}

			// Copy node file information
			node.File.Name = n.File.Name
			node.File.Format = n.File.Format
			node.Subset.Parent.Id = params["parent_node"]
			node.Subset.Parent.IndexName = params["parent_index"]

			if n.File.Path == "" {
				node.File.Path = fmt.Sprintf("%s/%s.data", getPath(params["parent_node"]), params["parent_node"])
			} else {
				node.File.Path = n.File.Path
			}

			if _, hasSubsetList := files["subset_indices"]; hasSubsetList {
				if err = node.SetFileFromSubset(files["subset_indices"]); err != nil {
					return err
				}
				delete(files, "subset_indices")
			} else {
				if err = node.Save(); err != nil {
					return err
				}
			}
		}
	}

	// set attributes from file
	if _, hasAttr := files["attributes"]; hasAttr {
		if _, hasAttrStr := params["attributes_str"]; hasAttrStr {
			return errors.New("Cannot define an attributes file and an attributes_str parameter in the same request.")
		}
		if err = node.SetAttributes(files["attributes"]); err != nil {
			return err
		}
		delete(files, "attributes")
	}

	// set attributes from json string
	if _, hasAttrStr := params["attributes_str"]; hasAttrStr {
		if _, hasAttr := files["attributes"]; hasAttr {
			return errors.New("Cannot define an attributes file and an attributes_str parameter in the same request.")
		}
		if err = node.SetAttributesFromString(params["attributes_str"]); err != nil {
			return err
		}
		delete(params, "attributes_str")
	}

	// set filename string
	if _, hasFileNameStr := params["file_name"]; hasFileNameStr {
		node.File.Name = params["file_name"]
		if err = node.Save(); err != nil {
			return err
		}
		delete(params, "file_name")
	}

	// update relatives
	if _, hasRelation := params["linkage"]; hasRelation {
		ltype := params["linkage"]

		if ltype == "parent" {
			if node.HasParent() {
				return errors.New(e.ProvenanceImut)
			}
		}
		var ids string
		if _, hasIds := params["ids"]; hasIds {
			ids = params["ids"]
		} else {
			return errors.New("missing ids for updating relatives")
		}
		var operation string
		if _, hasOp := params["operation"]; hasOp {
			operation = params["operation"]
		}
		if err = node.UpdateLinkages(ltype, ids, operation); err != nil {
			return err
		}
	}

	//update node tags
	if _, hasDataType := params["tags"]; hasDataType {
		if err = node.UpdateDataTags(params["tags"]); err != nil {
			return err
		}
	}

	//update file format
	if _, hasFormat := params["format"]; hasFormat {
		if node.File.Format != "" {
			return errors.New(fmt.Sprintf("file format already set:%s", node.File.Format))
		}
		if err = node.SetFileFormat(params["format"]); err != nil {
			return err
		}
	}

	// update node expiration
	if _, hasExpiration := params["expiration"]; hasExpiration {
		if err = node.SetExpiration(params["expiration"]); err != nil {
			return err
		}
	}
	if _, hasRemove := params["remove_expiration"]; hasRemove {
		if err = node.RemoveExpiration(); err != nil {
			return err
		}
	}

	// clear node revisions
	if _, hasClearRevisions := params["clear_revisions"]; hasClearRevisions {
		if err = node.ClearRevisions(); err != nil {
			return err
		}
	}

	// handle part file / we do a node level lock here
	if hasPartsFile {
		if node.HasFile() {
			return errors.New(e.FileImut)
		}
		if (node.Type != "parts") || (node.Parts == nil) {
			return errors.New("This is not a parts node and thus does not support uploading in parts.")
		}
		LockMgr.LockNode(node.Id)
		defer LockMgr.UnlockNode(node.Id)

		// Refresh parts information after locking, before saving.
		// Load node by id
		n, err := Load(node.Id)
		if err != nil {
			return err
		}
		node.Parts = n.Parts

		if node.Parts.Count > 0 || node.Parts.VarLen {
			for key, file := range files {
				keyn, errf := strconv.Atoi(key)
				if errf == nil && (keyn <= node.Parts.Count || node.Parts.VarLen) {
					if err = node.addPart(keyn-1, &file); err != nil {
						return err
					}
				}
			}
		} else {
			return errors.New("Unable to retrieve parts info for node.")
		}
		// all parts are in, close it
		// closeParts removes node id from LockMgr
		if !node.Parts.VarLen && node.Parts.Length == node.Parts.Count {
			if err = node.closeParts(false); err != nil {
				return err
			}
		}
	}

	return
}
示例#3
0
文件: update.go 项目: paczian/Shock
//Modification functions
func (node *Node) Update(params map[string]string, files FormFiles) (err error) {
	// Exclusive conditions
	// 1. has files[upload] (regular upload)
	// 2. has params[parts] (partial upload support)
	// 3. has params[type] & params[source] (v_node)
	// 4. has params[path] (set from local path)
	// 5. has params[copy_data] (create node by copying data from another node)
	// 6. has params[parent_node] (create node by specifying subset of records in a parent node)
	//
	// All condition allow setting of attributes
	//
	// Note that all paths for node operations in this function must end with "err = node.Save()" to save node state.

	if _, uploadMisplaced := params["upload"]; uploadMisplaced {
		return errors.New("upload form field must be file encoded.")
	}

	_, isRegularUpload := files["upload"]
	_, isPartialUpload := params["parts"]

	isVirtualNode := false
	if t, hasType := params["type"]; hasType && t == "virtual" {
		isVirtualNode = true
	}
	_, isPathUpload := params["path"]
	_, isCopyUpload := params["copy_data"]
	_, isSubsetUpload := params["parent_node"]

	// Check exclusive conditions
	if (isRegularUpload && isPartialUpload) || (isRegularUpload && isVirtualNode) || (isRegularUpload && isPathUpload) || (isRegularUpload && isCopyUpload) || (isRegularUpload && isSubsetUpload) {
		return errors.New("upload parameter incompatible with parts, path, type, copy_data and/or parent_node parameter(s)")
	} else if (isPartialUpload && isVirtualNode) || (isPartialUpload && isPathUpload) || (isPartialUpload && isCopyUpload) || (isPartialUpload && isSubsetUpload) {
		return errors.New("parts parameter incompatible with type, path, copy_data and/or parent_node parameter(s)")
	} else if (isVirtualNode && isPathUpload) || (isVirtualNode && isCopyUpload) || (isVirtualNode && isSubsetUpload) {
		return errors.New("type parameter incompatible with path, copy_data and/or parent_node parameter")
	} else if (isPathUpload && isCopyUpload) || (isPathUpload && isSubsetUpload) {
		return errors.New("path parameter incompatible with copy_data and/or parent_node parameter")
	} else if isCopyUpload && isSubsetUpload {
		return errors.New("copy_data parameter incompatible with parent_node parameter")
	}

	// Check if immutable
	if (isRegularUpload || isPartialUpload || isVirtualNode || isPathUpload || isCopyUpload || isSubsetUpload) && node.HasFile() {
		return errors.New(e.FileImut)
	}

	if isRegularUpload {
		if err = node.SetFile(files["upload"]); err != nil {
			return err
		}
		delete(files, "upload")
	} else if isPartialUpload {
		node.Type = "parts"
		if params["parts"] == "unknown" {
			if err = node.initParts("unknown"); err != nil {
				return err
			}
		} else if params["parts"] == "close" {
			if err = node.closeVarLenPartial(); err != nil {
				return err
			}
		} else if node.isVarLen() || node.partsCount() > 0 {
			return errors.New("parts already set")
		} else {
			n, err := strconv.Atoi(params["parts"])
			if err != nil {
				return errors.New("parts must be an integer or 'unknown'")
			}
			if n < 1 {
				return errors.New("parts cannot be less than 1")
			}
			if err = node.initParts(params["parts"]); err != nil {
				return err
			}
		}
	} else if isVirtualNode {
		node.Type = "virtual"
		if source, hasSource := params["source"]; hasSource {
			ids := strings.Split(source, ",")
			node.addVirtualParts(ids)
		} else {
			return errors.New("type virtual requires source parameter")
		}
	} else if isPathUpload {
		if action, hasAction := params["action"]; !hasAction || (action != "copy_file" && action != "move_file" && action != "keep_file") {
			return errors.New("path upload requires action field equal to copy_file, move_file or keep_file")
		}
		localpaths := strings.Split(conf.Conf["local-paths"], ",")
		if len(localpaths) <= 0 {
			return errors.New("local files path uploads must be configured. Please contact your Shock administrator.")
		}
		var success = false
		for _, p := range localpaths {
			if strings.HasPrefix(params["path"], p) {
				if err = node.SetFileFromPath(params["path"], params["action"]); err != nil {
					return err
				} else {
					success = true
				}
			}
		}
		if !success {
			return errors.New("file not in local files path. Please contact your Shock administrator.")
		}
	} else if isCopyUpload {
		var n *Node
		n, err = LoadUnauth(params["copy_data"])
		if err != nil {
			return err
		}

		if n.File.Virtual {
			return errors.New("copy_data parameter points to a virtual node, invalid operation.")
		}

		// Copy node file information
		node.File.Name = n.File.Name
		node.File.Size = n.File.Size
		node.File.Checksum = n.File.Checksum
		node.File.Format = n.File.Format
		node.Type = "copy"

		// Copy node indexes
		if _, copyIndex := params["copy_indexes"]; copyIndex && (len(n.Indexes) > 0) {
			// loop through parent indexes
			for idxType, idxInfo := range n.Indexes {
				parentFile := n.IndexPath() + "/" + idxType + ".idx"
				if _, err := os.Stat(parentFile); err == nil {
					// copy file if exists
					if _, cerr := util.CopyFile(parentFile, node.IndexPath()+"/"+idxType+".idx"); cerr != nil {
						return cerr
					}
				}
				// copy index struct
				if err := node.SetIndexInfo(idxType, idxInfo); err != nil {
					return err
				}
			}
		} else if sizeIndex, exists := n.Indexes["size"]; exists {
			// just copy size index
			if err := node.SetIndexInfo("size", sizeIndex); err != nil {
				return err
			}
		}

		if n.File.Path == "" {
			node.File.Path = fmt.Sprintf("%s/%s.data", getPath(params["copy_data"]), params["copy_data"])
		} else {
			node.File.Path = n.File.Path
		}

		err = node.Save()
		if err != nil {
			return
		}
	} else if isSubsetUpload {
		_, hasParentIndex := params["parent_index"]
		if !hasParentIndex {
			return errors.New("parent_index is a required parameter for creating a subset node.")
		}

		var n *Node
		n, err = LoadUnauth(params["parent_node"])
		if err != nil {
			return err
		}

		if n.File.Virtual {
			return errors.New("parent_node parameter points to a virtual node, invalid operation.")
		}

		if _, indexExists := n.Indexes[params["parent_index"]]; !indexExists {
			return errors.New("Index '" + params["parent_index"] + "' does not exist for parent node.")
		}

		parentIndexFile := n.IndexPath() + "/" + params["parent_index"] + ".idx"
		if _, statErr := os.Stat(parentIndexFile); statErr != nil {
			return errors.New("Could not stat index file for parent node where parent node = '" + params["parent_node"] + "' and index = '" + params["parent_index"] + "'.")
		}

		// Copy node file information
		node.File.Name = n.File.Name
		node.File.Format = n.File.Format
		node.Type = "subset"
		node.Subset.Parent.Id = params["parent_node"]
		node.Subset.Parent.IndexName = params["parent_index"]

		if n.File.Path == "" {
			node.File.Path = fmt.Sprintf("%s/%s.data", getPath(params["parent_node"]), params["parent_node"])
		} else {
			node.File.Path = n.File.Path
		}

		if _, hasSubsetList := files["subset_indices"]; hasSubsetList {
			if err = node.SetFileFromSubset(files["subset_indices"]); err != nil {
				return err
			}
			delete(files, "subset_indices")
		} else {
			err = node.Save()
			if err != nil {
				return
			}
		}
	}

	if _, hasSubsetList := files["subset_indices"]; hasSubsetList {
		if err = node.SetFileFromSubset(files["subset_indices"]); err != nil {
			return err
		}
		delete(files, "subset_indices")
	}

	// set attributes from file
	if _, hasAttr := files["attributes"]; hasAttr {
		if err = node.SetAttributes(files["attributes"]); err != nil {
			return err
		}
		os.Remove(files["attributes"].Path)
		delete(files, "attributes")
	}

	// set attributes from json string
	if _, hasAttrStr := params["attributes_str"]; hasAttrStr {
		if err = node.SetAttributesFromString(params["attributes_str"]); err != nil {
			return err
		}
		delete(params, "attributes_str")
	}

	// set filename string
	if _, hasFileNameStr := params["file_name"]; hasFileNameStr {
		node.File.Name = params["file_name"]
		if err = node.Save(); err != nil {
			return err
		}
		delete(params, "file_name")
	}

	// handle part file
	LockMgr.LockPartOp()
	parts_count := node.partsCount()
	if parts_count > 0 || node.isVarLen() {
		for key, file := range files {
			if node.HasFile() {
				LockMgr.UnlockPartOp()
				return errors.New(e.FileImut)
			}
			keyn, errf := strconv.Atoi(key)
			if errf == nil && (keyn <= parts_count || node.isVarLen()) {
				err = node.addPart(keyn-1, &file)
				if err != nil {
					LockMgr.UnlockPartOp()
					return err
				}
			} else {
				LockMgr.UnlockPartOp()
				return errors.New("invalid file parameter")
			}
		}
	} else if node.HasFile() {
		// if node has a file and user is trying to perform parts upload, return error that file is immutable.
		for key, _ := range files {
			if _, errf := strconv.Atoi(key); errf == nil {
				LockMgr.UnlockPartOp()
				return errors.New(e.FileImut)
			}
		}
	} else if parts_count == -1 {
		// if node is not variable length and user is trying to perform parts upload, return error that node is not variable
		for key, _ := range files {
			if _, errf := strconv.Atoi(key); errf == nil {
				LockMgr.UnlockPartOp()
				return errors.New("This is not a variable length node and thus does not support uploading in parts.")
			}
		}
	}

	LockMgr.UnlockPartOp()

	// update relatives
	if _, hasRelation := params["linkage"]; hasRelation {
		ltype := params["linkage"]

		if ltype == "parent" {
			if node.HasParent() {
				return errors.New(e.ProvenanceImut)
			}
		}
		var ids string
		if _, hasIds := params["ids"]; hasIds {
			ids = params["ids"]
		} else {
			return errors.New("missing ids for updating relatives")
		}
		var operation string
		if _, hasOp := params["operation"]; hasOp {
			operation = params["operation"]
		}
		if err = node.UpdateLinkages(ltype, ids, operation); err != nil {
			return err
		}
	}

	//update node tags
	if _, hasDataType := params["tags"]; hasDataType {
		if err = node.UpdateDataTags(params["tags"]); err != nil {
			return err
		}
	}

	//update file format
	if _, hasFormat := params["format"]; hasFormat {
		if node.File.Format != "" {
			return errors.New(fmt.Sprintf("file format already set:%s", node.File.Format))
		}
		if err = node.SetFileFormat(params["format"]); err != nil {
			return err
		}
	}
	return
}