// Write the File to io.Writer as xlsx func (f *File) Write(writer io.Writer) (err error) { var parts map[string]string var zipWriter *zip.Writer parts, err = f.MarshallParts() if err != nil { return } zipWriter = zip.NewWriter(writer) for partName, part := range parts { var writer io.Writer writer, err = zipWriter.Create(partName) if err != nil { return } _, err = writer.Write([]byte(part)) if err != nil { return } } err = zipWriter.Close() return }
func addFileToZIP(zw *zip.Writer, item ArchiveItem) (err error) { binfo, err := os.Stat(item.FileSystemPath) if err != nil { return } header, err := zip.FileInfoHeader(binfo) if err != nil { return } header.Method = zip.Deflate //always use forward slashes even on Windows header.Name = strings.Replace(item.ArchivePath, "\\", "/", -1) w, err := zw.CreateHeader(header) if err != nil { zw.Close() return } bf, err := os.Open(item.FileSystemPath) if err != nil { return } defer bf.Close() _, err = io.Copy(w, bf) return }
// Save the File to an xlsx file at the provided path. func (f *File) Save(path string) (err error) { var parts map[string]string var target *os.File var zipWriter *zip.Writer parts, err = f.MarshallParts() if err != nil { return } target, err = os.Create(path) if err != nil { return } zipWriter = zip.NewWriter(target) for partName, part := range parts { var writer io.Writer writer, err = zipWriter.Create(partName) if err != nil { return } _, err = writer.Write([]byte(part)) if err != nil { return } } err = zipWriter.Close() if err != nil { return } return target.Close() }
// Writes this tree to the given zip file, returning an error on failure. func (t Stream) ToZip(w *zip.Writer) error { for blob := range t { hdr, err := zip.FileInfoHeader(blob) if err != nil { return err } writer, err := w.CreateHeader(hdr) if err != nil { return err } if blob.Contents() != nil { _, err := io.Copy(writer, blob.Contents()) if err != nil { return err } } if blob.Error() != nil { return blob.Error() } } w.Close() return nil }
func zipEntries(readers map[string]entryReader, out *zip.Writer) error { defer trace.End(trace.Begin("")) defer out.Close() defer out.Flush() for name, r := range readers { log.Infof("Collecting log with reader %s(%#v)", name, r) e, err := r.open() if err != nil { log.Warningf("error reading %s(%s): %s\n", name, r, err) continue } sz := e.Size() header := &zip.FileHeader{ Name: name, Method: zip.Deflate, } header.SetModTime(time.Now()) header.SetMode(0644) if sz > uint32max { header.UncompressedSize = uint32max } else { header.UncompressedSize = uint32(e.Size()) } w, err := out.CreateHeader(header) if err != nil { log.Errorf("Failed to create Zip writer for %s: %s", header.Name, err) continue } log.Infof("%s has size %d", header.Name, sz) // be explicit about the number of bytes to copy as the log files will likely // be written to during this exercise _, err = io.CopyN(w, e, sz) _ = e.Close() if err != nil { log.Errorf("Failed to write content for %s: %s", header.Name, err) continue } log.Infof("Wrote %d bytes to %s", sz, header.Name) } return nil }
// Write the File to io.Writer as xlsx func (f *File) Write(writer io.Writer, fn func(total int, current int) string) (err error) { var parts map[string]string var zipWriter *zip.Writer parts, err = f.MarshallParts() if err != nil { return } zipWriter = zip.NewWriter(writer) count := 0 for partName, part := range parts { var writer io.Writer writer, err = zipWriter.Create(partName) if err != nil { return } _, err = writer.Write([]byte(part)) if err != nil { return } for { ctrl := fn(len(parts), count) if ctrl == "pause" { time.Sleep(1 * time.Second) } else if ctrl == "export" || ctrl == "resume" { break } else if ctrl == "cancel" { zipWriter.Close() return } } count++ } fn(len(parts), len(parts)) err = zipWriter.Close() return }
func ZipFolder(folder, outName string) (err error) { var ( dirToZip *os.File filesToZip []string zFile io.Writer zipFile *os.File zipWriter *zip.Writer content []byte ) if dirToZip, err = os.Open(folder); err != nil { return } defer dirToZip.Close() if filesToZip, err = dirToZip.Readdirnames(-1); err != nil { return } if zipFile, err = os.Create(outName); err != nil { return } defer zipFile.Close() zipWriter = zip.NewWriter(zipFile) for _, fileName := range filesToZip { if content, err = ioutil.ReadFile(filepath.Join(folder, fileName)); err != nil { return } if zFile, err = zipWriter.Create(fileName); err != nil { return } if _, err = zFile.Write(content); err != nil { return } } return zipWriter.Close() }
func main() { debug := flag.Bool("debug", false, "print debug info") view := flag.Bool("v", false, "view list") out := flag.String("out", "", "write recovered files to output zip file") override := flag.Bool("override", false, "override existing files") flag.Usage = func() { fmt.Fprintf(os.Stderr, "usage: %s [options] {zip-file}\n", path.Base(os.Args[0])) fmt.Fprintf(os.Stderr, "options:\n") flag.PrintDefaults() } flag.Parse() if flag.NArg() != 1 { flag.Usage() return } zipfile := flag.Arg(0) f, err := os.Open(zipfile) if err != nil { log.Fatal("open ", err) } defer f.Close() r := bufio.NewReader(f) var outz *zip.Writer create_flags := os.O_RDWR | os.O_CREATE | os.O_TRUNC if !*override { create_flags |= os.O_EXCL } if len(*out) > 0 { outf, err := os.OpenFile(*out, create_flags, 0666) if err != nil { log.Fatal("create output", err) } outz = zip.NewWriter(outf) defer func() { outz.Close() outf.Close() }() } Loop: for { var fh [fileHeaderLen]byte if _, err := io.ReadFull(r, fh[:]); err != nil { log.Println("file header", err) break Loop } if *debug { fmt.Println(hex.Dump(fh[:])) } b := readBuf(fh[:]) magic := b.uint32() version := b.uint16() flags := b.uint16() comp := b.uint16() ctime := b.uint16() cdate := b.uint16() crc32 := b.uint32() clen := b.uint32() ulen := b.uint32() flen := b.uint16() elen := b.uint16() ctype := "" if magic == directoryHeaderSignature { // got central directory. Done log.Println("found central directory") break Loop } if magic != fileHeaderSignature { log.Println("invalid file header signature ", fmt.Sprintf("%08x", magic)) break Loop } if *debug { fmt.Println() fmt.Printf("magic %08x\n", magic) fmt.Printf("version %04x\n", version) fmt.Printf("flags %04x\n", flags) fmt.Printf("comp %04x\n", comp) fmt.Printf("time %04x\n", ctime) fmt.Printf("date %04x\n", cdate) fmt.Printf("crc32 %08x\n", crc32) fmt.Printf("compressed size %d\n", clen) fmt.Printf("uncompressed size %d\n", ulen) fmt.Printf("filename length %d\n", flen) fmt.Printf("extra length %d\n", elen) } fn := make([]byte, flen) if _, err := io.ReadFull(r, fn); err != nil { log.Println("read file name", err) break Loop } if *debug { fmt.Println() fmt.Println("filename", string(fn)) } if elen > 0 { if _, err := io.CopyN(ioutil.Discard, r, int64(elen)); err != nil { log.Println("read extra", err) break Loop } } filename := string(fn) switch comp { case zip.Deflate: ctype = "Defl:N" var w io.Writer if *view { w = ioutil.Discard } else if outz != nil { fmt.Println("adding:", filename) if f, err := outz.Create(filename); err != nil { log.Fatal("create zip entry ", filename, err) } else { w = f } } else { fmt.Println("inflating:", filename) dir := filepath.Dir(filename) if dir != "" { if err := os.MkdirAll(dir, 0755); err != nil { log.Println("mkdir", dir, err) } } if f, err := os.OpenFile(filename, create_flags, 0666); err != nil { log.Fatal("create ", filename, err) } else { w = f } } dec := flate.NewReader(r) n, err := io.Copy(w, dec) if *debug { fmt.Println("decoded", n, "bytes") } if err != nil { if wc, ok := w.(io.Closer); ok { wc.Close() os.Remove(filename) } log.Println("decode file", err) break Loop } else { dec.Close() if wc, ok := w.(io.Closer); ok { wc.Close() } } case zip.Store: ctype = "Stored" if ulen > 0 { n, err := io.CopyN(ioutil.Discard, r, int64(ulen)) if *debug { fmt.Println("read", n, "bytes") } if err != nil { log.Fatal("read file ", err) } } else { log.Fatal("missing lenght") } default: log.Fatal("unsupported compression mode ", comp) } if (flags & 0x08) != 0 { // data descriptor var dd [dataDescriptorLen]byte if _, err := io.ReadFull(r, dd[:]); err != nil { log.Fatal("data descriptor", err) } b := readBuf(dd[:]) magic := b.uint32() crc32 = b.uint32() clen = b.uint32() ulen = b.uint32() if magic != dataDescriptorSignature { log.Fatal("invalid data descriptor signature ", magic) } if *debug { fmt.Println() fmt.Printf("magic %08x\n", magic) fmt.Printf("crc32 %08x\n", crc32) fmt.Printf("compressed size %d\n", clen) fmt.Printf("uncompressed size %d\n", ulen) } } if *view { pc := 0 if ulen != 0 { pc = 100 - int(clen*100/ulen) } fmt.Printf("%8d %6s %8d %2d%% %08x %s\n", ulen, ctype, clen, pc, crc32, filename) } } }
func savez2(q *Context) { var fpz, fpgz *os.File var z *zip.Writer var gz *gzip.Reader var dact interface{} var err error var dirname, fulldirname string var okall bool defer func() { if z != nil { z.Close() } if fpz != nil { fpz.Close() } if gz != nil { gz.Close() } if fpgz != nil { fpgz.Close() } saveCloseDact(dact) if !okall { os.RemoveAll(fulldirname) q.db.Exec(fmt.Sprintf("DELETE FROM `%s_info` WHERE `id` = %q", Cfg.Prefix, dirname)) } }() protected := 0 if !q.auth { http.Error(q.w, "Je bent niet ingelogd", http.StatusUnauthorized) return } corpora := make([]string, 0, len(q.form.Value["db"])) for _, c := range q.form.Value["db"] { if s := strings.TrimSpace(c); s != "" { corpora = append(corpora, s) } } for _, corpus := range corpora { if !q.prefixes[corpus] { http.Error(q.w, "Geen toegang tot corpus", http.StatusUnauthorized) return } if q.protected[corpus] || !q.myprefixes[corpus] { protected = 1 } } if len(corpora) == 0 { writeHtml(q, "Fout", "Geen corpora gekozen") return } word := firstf(q.form, "word") rel := firstf(q.form, "rel") hword := firstf(q.form, "hword") postag := firstf(q.form, "postag") hpostag := firstf(q.form, "hpostag") meta := firstf(q.form, "meta") if word == "" && hword == "" && rel == "" && postag == "" && hpostag == "" && meta == "" { writeHtml(q, "Fout", "Zoektermen ontbreken") return } title := maxtitlelen(firstf(q.form, "title")) if title == "" { writeHtml(q, "Fout", "Titel ontbreekt") return } maxdup, _ := strconv.Atoi(firstf(q.form, "maxdup")) if maxdup < 1 || maxdup > Cfg.Maxdup { maxdup = Cfg.Maxdup } dirname, fulldirname, ok := beginNewCorpus(q, q.db, title, hErr) if !ok { return } fpz, err = os.Create(fulldirname + "/data") if hErr(q, err) { fpz = nil return } z = zip.NewWriter(fpz) linecount := 0 chClose := make(<-chan bool) for _, prefix := range corpora { if linecount == maxdup && maxdup > 0 { break } global, ok := isGlobal(q, prefix) if !ok { return } pathlen, ok := getPathLen(q, prefix, global, false) if !ok { return } query, joins, usererr, syserr := makeQueryF(q, prefix, "c", chClose) if hErr(q, syserr) { return } if uhErr(q, usererr) { return } query = fmt.Sprintf( "SELECT DISTINCT `f`.`file`, `c`.`arch` FROM `%s_c_%s_deprel` `c` "+ "JOIN `%s_c_%s_file` `f` ON (`f`.`id`=`c`.`file`) %s WHERE %s", Cfg.Prefix, prefix, Cfg.Prefix, prefix, joins, query) rows, err := q.db.Query(query) if hErr(q, err) { return } currentarch := -1 dact = nil var arch int var filename, dactname string for rows.Next() { if linecount == maxdup && maxdup > 0 { rows.Close() break } err = rows.Scan(&filename, &arch) if hErr(q, err) { rows.Close() return } var data []byte if arch < 0 { fpgz, err = os.Open(filename + ".gz") if err == nil { gz, err = gzip.NewReader(fpgz) if hErr(q, err) { gz = nil rows.Close() return } data, err = ioutil.ReadAll(gz) if hErr(q, err) { rows.Close() return } gz.Close() gz = nil fpgz.Close() fpgz = nil } else { fpgz, err = os.Open(filename) if hErr(q, err) { fpgz = nil rows.Close() return } data, err = ioutil.ReadAll(fpgz) if hErr(q, err) { rows.Close() return } fpgz.Close() fpgz = nil } } else { if arch != currentarch { currentarch = arch saveCloseDact(dact) dact, dactname = saveOpenDact(q, prefix, arch) } data = saveGetDact(q, dact, filename) } var newfile string if arch < 0 { newfile = filename[pathlen:] if !global { if strings.Contains(q.params[prefix], "-lbl") || strings.HasPrefix(q.params[prefix], "folia") || strings.HasPrefix(q.params[prefix], "tei") { newfile = decode_filename(newfile[10:]) } else if strings.HasPrefix(q.params[prefix], "xmlzip") || q.params[prefix] == "dact" { newfile = decode_filename(newfile[5:]) } } } else { newfile = dactname[pathlen:] + "::" + filename } if len(corpora) > 1 { newfile = prefix + "/" + newfile data = xmlSetSource(data, prefix) } f, err := z.Create(newfile) if hErr(q, err) { rows.Close() return } _, err = f.Write(data) if hErr(q, err) { rows.Close() return } linecount++ } // for rows.Next() err = rows.Err() if hErr(q, err) { return } saveCloseDact(dact) dact = nil } err = z.Close() z = nil if hErr(q, err) { return } fpz.Close() fpz = nil s := "xmlzip-d" if protected != 0 { s = "xmlzip-p" } newCorpus(q, q.db, dirname, title, s, protected, hErr, true) okall = true }
func xsavez2(q *Context) { var fpz, fpgz *os.File var z *zip.Writer var gz *gzip.Reader var dact *dbxml.Db var docs *dbxml.Docs var dirname, fulldirname string var okall bool defer func() { if z != nil { z.Close() } if fpz != nil { fpz.Close() } if gz != nil { gz.Close() } if fpgz != nil { fpgz.Close() } if docs != nil { docs.Close() } if dact != nil { dact.Close() } if !okall { os.RemoveAll(fulldirname) q.db.Exec(fmt.Sprintf("DELETE FROM `%s_info` WHERE `id` = %q", Cfg.Prefix, dirname)) } }() protected := 0 if !q.auth { http.Error(q.w, "Je bent niet ingelogd", http.StatusUnauthorized) return } methode := firstf(q.form, "mt") if methode != "dx" { methode = "std" } corpora := make([]string, 0, len(q.form.Value["db"])) for _, c := range q.form.Value["db"] { if s := strings.TrimSpace(c); s != "" { corpora = append(corpora, s) } } for _, corpus := range corpora { if !q.prefixes[corpus] { http.Error(q.w, "Geen toegang tot corpus", http.StatusUnauthorized) return } if q.protected[corpus] || !q.myprefixes[corpus] { protected = 1 } } if len(corpora) == 0 { writeHtml(q, "Fout", "Geen corpora gekozen") return } xpath := firstf(q.form, "xpath") if xpath == "" { writeHtml(q, "Fout", "Zoekterm ontbreekt") return } title := maxtitlelen(firstf(q.form, "title")) if title == "" { writeHtml(q, "Fout", "Titel ontbreekt") return } maxdup, _ := strconv.Atoi(firstf(q.form, "maxdup")) if maxdup < 1 || maxdup > Cfg.Maxdup { maxdup = Cfg.Maxdup } dirname, fulldirname, ok := beginNewCorpus(q, q.db, title, hErr) if !ok { return } fpz, err := os.Create(fulldirname + "/data") if hErr(q, err) { fpz = nil return } z = zip.NewWriter(fpz) linecount := 0 for _, prefix := range corpora { if linecount == maxdup && maxdup > 0 { break } global, ok := isGlobal(q, prefix) if !ok { return } pathlen, ok := getPathLen(q, prefix, global, true) if !ok { return } dactfiles := make([]string, 0) if !global { dactfiles = append(dactfiles, fmt.Sprintf("%s/data/%s/data.dact", paqudir, prefix)) } else { rows, err := q.db.Query(fmt.Sprintf("SELECT `arch` FROM `%s_c_%s_arch` ORDER BY `id`", Cfg.Prefix, prefix)) if hErr(q, err) { return } for rows.Next() { var s string if hErr(q, rows.Scan(&s)) { rows.Close() return } if strings.HasSuffix(s, ".dact") { dactfiles = append(dactfiles, s) } } if hErr(q, rows.Err()) { return } } fullquery := xpath if strings.Contains(xpath, "%") { rules := getMacrosRules(q) fullquery = macroKY.ReplaceAllStringFunc(xpath, func(s string) string { return rules[s[1:len(s)-1]] }) } queryparts := strings.Split(fullquery, "+|+") for _, dactfile := range dactfiles { if linecount == maxdup && maxdup > 0 { break } if Cfg.Dactx && methode == "dx" { dactfile += "x" } var data []byte dact, err = dbxml.Open(dactfile) if hErr(q, err) { dact = nil return } qu, err := dact.Prepare(queryparts[0]) if hErr(q, err) { return } docs, err = qu.Run() if hErr(q, err) { docs = nil return } seen := make(map[string]bool) NEXTDOC: for docs.Next() { if linecount == maxdup && maxdup > 0 { break } filename := docs.Name() if seen[filename] { continue } seen[filename] = true found := false if len(queryparts) == 1 { found = true data = []byte(docs.Content()) } else { doctxt := fmt.Sprintf("[dbxml:metadata('dbxml:name')=%q]", filename) for i := 1; i < len(queryparts)-1; i++ { docs2, err := dact.Query(doctxt + queryparts[i]) if hErr(q, err) { return } if !docs2.Next() { continue NEXTDOC } docs2.Close() } docs2, err := dact.Query(doctxt + queryparts[len(queryparts)-1]) if hErr(q, err) { return } found = false if docs2.Next() { found = true data = []byte(docs2.Content()) docs2.Close() } } if !found { continue } newfile := filename if global { newfile = dactfile[pathlen:len(dactfile)-5] + "::" + filename } if len(corpora) > 1 { newfile = prefix + "/" + newfile data = xmlSetSource(data, prefix) } f, err := z.Create(newfile) if hErr(q, err) { return } if methode == "dx" { data, err = unexpandDact(data) if hErr(q, err) { return } } _, err = f.Write(data) if hErr(q, err) { return } linecount++ } // for docs.Next() err = docs.Error() docs = nil if hErr(q, err) { return } dact.Close() dact = nil } // for range dactfiles } // for range corpora err = z.Close() z = nil if hErr(q, err) { return } fpz.Close() fpz = nil s := "xmlzip-d" if protected != 0 { s = "xmlzip-p" } newCorpus(q, q.db, dirname, title, s, protected, hErr, true) okall = true }
// Utillity method to add an item to a Zip archive func addToZip(zipWriter *zip.Writer, source string, rootSource string, logger *logrus.Logger) error { fullPathSource, err := filepath.Abs(source) if nil != err { return err } appendFile := func(sourceFile string) error { // Get the relative path var name = filepath.Base(sourceFile) if sourceFile != rootSource { name = strings.TrimPrefix(strings.TrimPrefix(sourceFile, rootSource), string(os.PathSeparator)) } binaryWriter, err := zipWriter.Create(name) if err != nil { return fmt.Errorf("Failed to create ZIP entry: %s", filepath.Base(sourceFile)) } reader, err := os.Open(sourceFile) if err != nil { return fmt.Errorf("Failed to open file: %s", sourceFile) } defer reader.Close() io.Copy(binaryWriter, reader) logger.WithFields(logrus.Fields{ "Path": sourceFile, }).Debug("Archiving file") return nil } directoryWalker := func(path string, info os.FileInfo, err error) error { if err != nil { return err } header, err := zip.FileInfoHeader(info) if err != nil { return err } header.Name = strings.TrimPrefix(strings.TrimPrefix(path, rootSource), string(os.PathSeparator)) if info.IsDir() { header.Name += "/" } else { header.Method = zip.Deflate } writer, err := zipWriter.CreateHeader(header) if err != nil { return err } if info.IsDir() { return nil } file, err := os.Open(path) if err != nil { return err } defer file.Close() _, err = io.Copy(writer, file) return err } fileInfo, err := os.Stat(fullPathSource) if nil != err { return err } switch mode := fileInfo.Mode(); { case mode.IsDir(): err = filepath.Walk(fullPathSource, directoryWalker) case mode.IsRegular(): err = appendFile(fullPathSource) default: err = errors.New("Inavlid source type") } zipWriter.Close() return err }
func saveIpaDic(d *IpaDic, base string, archive bool) (err error) { var zw *zip.Writer if archive { p := path.Join(base, ipaDicArchiveFileName) f, e := os.Create(p) if e != nil { return e } defer f.Close() zw = zip.NewWriter(f) } if err = func() (e error) { p := path.Join(base, ipaDicMorphFileName) var out io.Writer if archive { out, e = zw.Create(p) if e != nil { return } } else { var f *os.File if f, e = os.OpenFile(p, os.O_RDWR|os.O_TRUNC|os.O_CREATE, 0666); e != nil { return } defer f.Close() out = f } if _, e = dic.MorphSlice(d.Morphs).WriteTo(out); e != nil { return } // if e = enc.Encode(d.Morphs); e != nil { // return // } // if _, e = buf.WriteTo(out); e != nil { // return // } var buf bytes.Buffer enc := gob.NewEncoder(&buf) if e = enc.Encode(d.Contents); e != nil { return } if _, e = buf.WriteTo(out); e != nil { return } return }(); err != nil { return } if err = func() (e error) { p := path.Join(base, ipaDicIndexFileName) var out io.Writer if archive { if out, e = zw.Create(p); e != nil { return } } else { var f *os.File if f, e = os.OpenFile(p, os.O_RDWR|os.O_TRUNC|os.O_CREATE, 0666); e != nil { return } defer f.Close() out = f } if _, e := d.Index.WriteTo(out); e != nil { return e } return nil }(); err != nil { return } if err = func() (e error) { p := path.Join(base, ipaDicConnectionFileName) var out io.Writer if archive { if out, e = zw.Create(p); e != nil { return } } else { var f *os.File if f, e = os.OpenFile(p, os.O_RDWR|os.O_TRUNC|os.O_CREATE, 0666); e != nil { return } defer f.Close() out = f } if _, e = d.Connection.WriteTo(out); e != nil { return e } // var buf bytes.Buffer // enc := gob.NewEncoder(&buf) // if e = enc.Encode(d.Connection); e != nil { // return e // } // if _, e = buf.WriteTo(out); e != nil { // return e // } return e }(); err != nil { return } if err = func() (e error) { p := path.Join(base, ipaDicCharDefFileName) var out io.Writer if archive { if out, e = zw.Create(p); e != nil { return } } else { var f *os.File if f, e = os.OpenFile(p, os.O_RDWR|os.O_TRUNC|os.O_CREATE, 0666); e != nil { return } defer f.Close() out = f } var buf bytes.Buffer enc := gob.NewEncoder(&buf) if e = enc.Encode(d.CharClass); e != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } if e = enc.Encode(d.CharCategory); e != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } if e = enc.Encode(d.InvokeList); e != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } if e = enc.Encode(d.GroupList); e != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } return nil }(); err != nil { return } if err = func() (e error) { p := path.Join(base, ipaDicUnkFileName) var out io.Writer if archive { if out, e = zw.Create(p); e != nil { return } } else { var f *os.File if f, e = os.OpenFile(p, os.O_RDWR|os.O_TRUNC|os.O_CREATE, 0666); e != nil { return } defer f.Close() out = f } var buf bytes.Buffer enc := gob.NewEncoder(&buf) if e = enc.Encode(d.UnkMorphs); e != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } if e = enc.Encode(d.UnkIndex); e != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } if err = enc.Encode(d.UnkIndexDup); err != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } if e = enc.Encode(d.UnkContents); e != nil { return e } if _, e = buf.WriteTo(out); e != nil { return e } return nil }(); err != nil { return } if archive { err = zw.Close() } return }