Subcommands: map[string]*cmds.Command{ "add": tarAddCmd, "cat": tarCatCmd, }, } var tarAddCmd = &cmds.Command{ Helptext: cmds.HelpText{ Tagline: "import a tar file into ipfs", ShortDescription: ` 'ipfs tar add' will parse a tar file and create a merkledag structure to represent it. `, }, Arguments: []cmds.Argument{ cmds.FileArg("file", true, false, "tar file to add").EnableStdin(), }, Run: func(req cmds.Request, res cmds.Response) { nd, err := req.InvocContext().GetNode() if err != nil { res.SetError(err, cmds.ErrNormal) return } fi, err := req.Files().NextFile() if err != nil { res.SetError(err, cmds.ErrNormal) return } node, err := tar.ImportTar(fi, nd.DAG)
res.SetOutput(bytes.NewReader(b.Data)) }, } var blockPutCmd = &cmds.Command{ Helptext: cmds.HelpText{ Tagline: "Stores input as an IPFS block", ShortDescription: ` ipfs block put is a plumbing command for storing raw ipfs blocks. It reads from stdin, and <key> is a base58 encoded multihash. `, }, Arguments: []cmds.Argument{ cmds.FileArg("data", true, false, "The data to be stored as an IPFS block").EnableStdin(), }, Run: func(req cmds.Request, res cmds.Response) { n, err := req.InvocContext().GetNode() if err != nil { res.SetError(err, cmds.ErrNormal) return } file, err := req.Files().NextFile() if err != nil { res.SetError(err, cmds.ErrNormal) return } data, err := ioutil.ReadAll(file)
res.SetError(err, cmds.ErrNormal) } }, } var configReplaceCmd = &cmds.Command{ Helptext: cmds.HelpText{ Tagline: "Replaces the config with <file>", ShortDescription: ` Make sure to back up the config file first if neccessary, this operation can't be undone. `, }, Arguments: []cmds.Argument{ cmds.FileArg("file", true, false, "The file to use as the new config"), }, Run: func(req cmds.Request, res cmds.Response) { r, err := fsrepo.Open(req.InvocContext().ConfigRoot) if err != nil { res.SetError(err, cmds.ErrNormal) return } defer r.Close() file, err := req.Files().NextFile() if err != nil { res.SetError(err, cmds.ErrNormal) return } defer file.Close()
Bytes int64 `json:",omitempty"` } var AddCmd = &cmds.Command{ Helptext: cmds.HelpText{ Tagline: "Add an object to ipfs.", ShortDescription: ` Adds contents of <path> to ipfs. Use -r to add directories. Note that directories are added recursively, to form the ipfs MerkleDAG. A smarter partial add with a staging area (like git) remains to be implemented. `, }, Arguments: []cmds.Argument{ cmds.FileArg("path", true, true, "The path to a file to be added to IPFS").EnableRecursive().EnableStdin(), }, Options: []cmds.Option{ cmds.OptionRecursivePath, // a builtin option that allows recursive paths (-r, --recursive) cmds.BoolOption(quietOptionName, "q", "Write minimal output"), cmds.BoolOption(progressOptionName, "p", "Stream progress data"), cmds.BoolOption(trickleOptionName, "t", "Use trickle-dag format for dag generation"), cmds.BoolOption(onlyHashOptionName, "n", "Only chunk and hash - do not write to disk"), cmds.BoolOption(wrapOptionName, "w", "Wrap files with a directory object"), cmds.BoolOption(hiddenOptionName, "Include files that are hidden"), cmds.StringOption(chunkerOptionName, "s", "chunking algorithm to use"), }, PreRun: func(req cmds.Request) error { if quiet, _, _ := req.Option(quietOptionName).Bool(); quiet { return nil }
"Data": "another", "Links": [ { "Name": "some link", "Hash": "QmXg9Pp2ytZ14xgmQjYEiHjVjMFXzCVVEcRTWJBmLgR39V", "Size": 8 } ] } and then run ipfs object put node.json `, }, Arguments: []cmds.Argument{ cmds.FileArg("data", true, false, "Data to be stored as a DAG object").EnableStdin(), }, Options: []cmds.Option{ cmds.StringOption("inputenc", "Encoding type of input data, either \"protobuf\" or \"json\""), }, Run: func(req cmds.Request, res cmds.Response) { n, err := req.InvocContext().GetNode() if err != nil { res.SetError(err, cmds.ErrNormal) return } input, err := req.Files().NextFile() if err != nil && err != io.EOF { res.SetError(err, cmds.ErrNormal) return