func (b *diskBackend) Restore() { if b.tail { log.Info("restoring with tail ", b.directory+"/unc/"+b.filename) b.RestoreTail(b.directory + "/unc/" + b.filename) return } log.Info("restoring gzipped file ", b.directory+"/"+b.filename) f, err := os.Open(b.directory + "/" + b.filename) if err != nil { return } gzrd, _ := gzip.NewReader(f) rd := bufio.NewReader(gzrd) defer gzrd.Close() defer f.Close() t0 := time.Now() i := 1 for { input, err := rd.ReadBytes('\n') if err != nil { f.Close() log.Info("disk: restored ", i, " removing ", b.filename) os.Remove(b.directory + "/" + b.filename) return } input2 := input[:len(input)-1] b.ctx.parsedLines <- &input2 if time.Since(t0).Seconds() > 5 { log.Info("disk: restoring ...") t0 = time.Now() } i++ } }
func OpenDecompress(fn string) (r io.ReadCloser, err error) { f, err := os.Open(fn) if err != nil { return nil, err } ext := filepath.Ext(fn) switch ext { case ".log", ".txt": return f, err case ".gz": gzr, err := gzip.NewReader(f) return &WrappedDecompressor{ ReadCloser: gzr, wrapped: f, }, err case ".bz2": bzr := bzip2.NewReader(f) return &WrappedDecompressor{ ReadCloser: ioutil.NopCloser(bzr), wrapped: f, }, nil default: return f, err } }
func NewDecoder(r io.Reader) (*MetadataDecoder, error) { m := MetadataDecoder{} // read header var h Header d := xdr.NewDecoder(r) _, err := d.Decode(&h) if err != nil { return nil, err } if h.Version != Version { return nil, ErrVersion } switch { case bytes.Compare(h.Compression[:], CompNone[:]) == 0: m.d = d case bytes.Compare(h.Compression[:], CompGZIP[:]) == 0: br, err := pgzip.NewReader(r) if err != nil { return nil, err } m.d = xdr.NewDecoder(br) default: return nil, ErrCompression } return &m, nil }
func (c *Compressor) BenchmarkPGZIPReader(b *testing.B) { cr, _ := pgzip.NewReader(c.w) b.ResetTimer() _, err := io.Copy(ioutil.Discard, cr) if err != nil { b.Fatal(err) } }
// NewGzLine reads one password per line until // \0xa (newline) is encountered. // The input is assumed to be gzip compressed. // Input is streamed. func NewGzLine(r io.Reader) (*LineReader, error) { l := &LineReader{in: r} var err error l.gr, err = gzip.NewReader(r) if err != nil { return nil, err } l.br = bufio.NewReader(l.gr) return l, nil }
func NaClDecrypt(body []byte, key *[KeySize]byte) (*Header, []byte, error) { // obtain nonce var nonce [NonceSize]byte copy(nonce[:], body[:NonceSize]) // decrypt payload payload, ok := secretbox.Open(nil, body[NonceSize:], &nonce, key) if !ok { return nil, nil, fmt.Errorf("could not decrypt body") } // deal with actual payload r := bytes.NewReader(payload) // decode header d := xdr.NewDecoder(r) var mh Header _, err := d.Decode(&mh) if err != nil { return nil, nil, err } // deal with compression var rd io.Reader switch mh.Compression { case CompNone: // reuse reader rd = r case CompGZIP: // always use parallel decompression rd, err = pgzip.NewReader(r) if err != nil { return nil, nil, err } default: return nil, nil, fmt.Errorf("invalid compression: %v", mh.Compression) } var cleartext bytes.Buffer f := bufio.NewWriter(&cleartext) // read left over from the xdr reader _, err = io.Copy(f, rd) if err != nil { return nil, nil, err } f.Flush() return &mh, cleartext.Bytes(), nil }
// Write gunzipped data to a Writer. Returns bytes written and an error. func gunzipWrite(w io.Writer, data []byte) (int, error) { // Write gzipped data to the client gr, err := pgzip.NewReader(bytes.NewBuffer(data)) defer gr.Close() data, err = ioutil.ReadAll(gr) if err != nil { return 0, err } bytesWritten, err := w.Write(data) if err != nil { return 0, err } return bytesWritten, nil }
// recurseDirectory recurses into a directory and sends all files // fulfilling the selected options into global.filesChan func recurseDirectory(dirname string) { dir, err := os.Open(dirname) if err != nil { errorLogger.Printf("cannot open directory '%s': %s\n", dirname, err) return } defer dir.Close() for { entries, err := dir.Readdir(256) if err == io.EOF { return } if err != nil { errorLogger.Printf("cannot read directory '%s': %s\n", dirname, err) return } nextEntry: for _, fi := range entries { fullpath := filepath.Join(dirname, fi.Name()) // check directory include/exclude options if fi.IsDir() { if !options.Recursive { continue nextEntry } for _, dirPattern := range options.ExcludeDirs { matched, err := filepath.Match(dirPattern, fi.Name()) if err != nil { errorLogger.Fatalf("cannot match malformed pattern '%s' against directory name: %s\n", dirPattern, err) } if matched { continue nextEntry } } if len(options.IncludeDirs) > 0 { for _, dirPattern := range options.IncludeDirs { matched, err := filepath.Match(dirPattern, fi.Name()) if err != nil { errorLogger.Fatalf("cannot match malformed pattern '%s' against directory name: %s\n", dirPattern, err) } if matched { goto includeDirMatchFound } } continue nextEntry includeDirMatchFound: } recurseDirectory(fullpath) continue nextEntry } // check whether this is a regular file if fi.Mode()&os.ModeType != 0 { continue nextEntry } // check file path options if global.excludeFilepathRegex != nil { if global.excludeFilepathRegex.MatchString(fullpath) { continue nextEntry } } if global.includeFilepathRegex != nil { if !global.includeFilepathRegex.MatchString(fullpath) { continue nextEntry } } // check file extension options if len(options.ExcludeExtensions) > 0 { for _, e := range strings.Split(options.ExcludeExtensions, ",") { if filepath.Ext(fi.Name()) == "."+e { continue nextEntry } } } if len(options.IncludeExtensions) > 0 { for _, e := range strings.Split(options.IncludeExtensions, ",") { if filepath.Ext(fi.Name()) == "."+e { goto includeExtensionFound } } continue nextEntry includeExtensionFound: } // check file include/exclude options for _, filePattern := range options.ExcludeFiles { matched, err := filepath.Match(filePattern, fi.Name()) if err != nil { errorLogger.Fatalf("cannot match malformed pattern '%s' against file name: %s\n", filePattern, err) } if matched { continue nextEntry } } if len(options.IncludeFiles) > 0 { for _, filePattern := range options.IncludeFiles { matched, err := filepath.Match(filePattern, fi.Name()) if err != nil { errorLogger.Fatalf("cannot match malformed pattern '%s' against file name: %s\n", filePattern, err) } if matched { goto includeFileMatchFound } } continue nextEntry includeFileMatchFound: } // check file type options if len(options.ExcludeTypes) > 0 { for _, t := range strings.Split(options.ExcludeTypes, ",") { for _, filePattern := range global.fileTypesMap[t].Patterns { if matched, _ := filepath.Match(filePattern, fi.Name()); matched { continue nextEntry } } sr := global.fileTypesMap[t].ShebangRegex if sr != nil { if m, err := checkShebang(global.fileTypesMap[t].ShebangRegex, fullpath); m && err == nil { continue nextEntry } } } } if len(options.IncludeTypes) > 0 { for _, t := range strings.Split(options.IncludeTypes, ",") { for _, filePattern := range global.fileTypesMap[t].Patterns { if matched, _ := filepath.Match(filePattern, fi.Name()); matched { goto includeTypeFound } } sr := global.fileTypesMap[t].ShebangRegex if sr != nil { if m, err := checkShebang(global.fileTypesMap[t].ShebangRegex, fullpath); err != nil || m { goto includeTypeFound } } } continue nextEntry includeTypeFound: } global.filesChan <- fullpath } } } // checkShebang checks whether the first line of file matches the given regex func checkShebang(regex *regexp.Regexp, filepath string) (bool, error) { f, err := os.Open(filepath) defer f.Close() if err != nil { return false, err } b, err := bufio.NewReader(f).ReadBytes('\n') return regex.Match(b), nil } // processFileTargets reads filesChan, builds an io.Reader for the target and calls processReader func processFileTargets() { defer global.targetsWaitGroup.Done() dataBuffer := make([]byte, InputBlockSize) testBuffer := make([]byte, InputBlockSize) matchRegexes := make([]*regexp.Regexp, len(global.matchPatterns)) for i := range global.matchPatterns { matchRegexes[i] = regexp.MustCompile(global.matchPatterns[i]) } for filepath := range global.filesChan { var err error var infile *os.File var reader io.Reader if filepath == "-" { infile = os.Stdin } else { infile, err = os.Open(filepath) if err != nil { errorLogger.Printf("cannot open file '%s': %s\n", filepath, err) continue } } if options.Zip && strings.HasSuffix(filepath, ".gz") { rawReader := bufio.NewReader(infile) reader, err = gzip.NewReader(rawReader) if err != nil { errorLogger.Printf("error decompressing file '%s', opening as normal file\n", infile.Name()) infile.Seek(0, 0) reader = bufio.NewReader(infile) } } else if infile == os.Stdin && options.Multiline { reader = nbreader.NewNBReader(bufio.NewReader(infile), InputBlockSize, nbreader.ChunkTimeout(MultilinePipeChunkTimeout), nbreader.Timeout(MultilinePipeTimeout)) } else { reader = bufio.NewReader(infile) } if options.InvertMatch { err = processReaderInvertMatch(reader, matchRegexes, filepath) } else { err = processReader(reader, matchRegexes, dataBuffer, testBuffer, filepath) } if err != nil { if err == errLineTooLong { if !options.ErrSkipLineLength { errmsg := fmt.Sprintf("file contains very long lines (no newline in %d bytes)", InputBlockSize) errorLogger.Printf("cannot process data from file '%s': %s\n", filepath, errmsg) } } else { errorLogger.Printf("cannot process data from file '%s': %s\n", filepath, err) } } infile.Close() } } // processNetworkTarget starts a listening TCP socket and calls processReader func processNetworkTarget(target string) { matchRegexes := make([]*regexp.Regexp, len(global.matchPatterns)) for i := range global.matchPatterns { matchRegexes[i] = regexp.MustCompile(global.matchPatterns[i]) } defer global.targetsWaitGroup.Done() var reader io.Reader netParams := global.netTcpRegex.FindStringSubmatch(target) proto := netParams[1] addr := netParams[2] listener, err := net.Listen(proto, addr) if err != nil { errorLogger.Fatalf("could not listen on '%s'\n", target) } conn, err := listener.Accept() if err != nil { errorLogger.Fatalf("could not accept connections on '%s'\n", target) } if options.Multiline { reader = nbreader.NewNBReader(conn, InputBlockSize, nbreader.ChunkTimeout(MultilinePipeChunkTimeout), nbreader.Timeout(MultilinePipeTimeout)) } else { reader = conn } dataBuffer := make([]byte, InputBlockSize) testBuffer := make([]byte, InputBlockSize) err = processReader(reader, matchRegexes, dataBuffer, testBuffer, target) if err != nil { errorLogger.Printf("error processing data from '%s'\n", target) return } } func executeSearch(targets []string) (ret int, err error) { defer func() { if r := recover(); r != nil { ret = 2 err = errors.New(r.(string)) } }() tstart := time.Now() global.filesChan = make(chan string, 256) global.resultsChan = make(chan *Result, 128) global.resultsDoneChan = make(chan struct{}) global.totalTargetCount = 0 global.totalMatchCount = 0 global.totalResultCount = 0 go resultHandler() for i := 0; i < options.Cores; i++ { global.targetsWaitGroup.Add(1) go processFileTargets() } for _, target := range targets { switch { case target == "-": global.filesChan <- "-" case global.netTcpRegex.MatchString(target): global.targetsWaitGroup.Add(1) go processNetworkTarget(target) default: fileinfo, err := os.Stat(target) if err != nil { if os.IsNotExist(err) { errorLogger.Fatalf("no such file or directory: %s\n", target) } else { errorLogger.Fatalf("cannot open file or directory: %s\n", target) } } if fileinfo.IsDir() { recurseDirectory(target) } else { global.filesChan <- target } } } close(global.filesChan) global.targetsWaitGroup.Wait() close(global.resultsChan) <-global.resultsDoneChan var retVal int if global.totalResultCount > 0 { retVal = 0 } else { retVal = 1 } if options.Stats { tend := time.Now() fmt.Fprintln(os.Stderr, global.totalTargetCount, "files processed") fmt.Fprintln(os.Stderr, global.totalResultCount, "files match") fmt.Fprintln(os.Stderr, global.totalMatchCount, "matches found") fmt.Fprintf(os.Stderr, "in %v\n", tend.Sub(tstart)) } return retVal, nil } func main() { var targets []string options.LoadDefaults() parser := flags.NewNamedParser("sift", flags.HelpFlag|flags.PassDoubleDash) parser.AddGroup("Options", "Options", &options) parser.Name = "sift" parser.Usage = "[OPTIONS] PATTERN [FILE|PATH|tcp://HOST:PORT...]\n" + " sift [OPTIONS] [-e PATTERN | -f FILE] [FILE|PATH|tcp://HOST:PORT...]" args, err := parser.Parse() if err != nil { if e, ok := err.(*flags.Error); ok && e.Type == flags.ErrHelp { fmt.Println(e.Error()) os.Exit(0) } else { errorLogger.Println(err) os.Exit(2) } } for _, pattern := range options.Patterns { global.matchPatterns = append(global.matchPatterns, pattern) } if options.PatternFile != "" { f, err := os.Open(options.PatternFile) if err != nil { errorLogger.Fatalln("Cannot open pattern file:\n", err) } scanner := bufio.NewScanner(f) for scanner.Scan() { pattern := scanner.Text() global.matchPatterns = append(global.matchPatterns, pattern) } } if len(global.matchPatterns) == 0 { if len(args) == 0 && !(options.PrintConfig || options.WriteConfig) { errorLogger.Fatalln("No pattern given. Try 'sift --help' for more information.") } if len(args) > 0 { global.matchPatterns = append(global.matchPatterns, args[0]) args = args[1:len(args)] } } if len(args) == 0 { // check whether there is input on STDIN if !terminal.IsTerminal(int(os.Stdin.Fd())) { targets = []string{"-"} } else { targets = []string{"."} } } else { targets = args[0:len(args)] } // expand arguments containing patterns on Windows if runtime.GOOS == "windows" { targetsExpanded := []string{} for _, t := range targets { if t == "-" { targetsExpanded = append(targetsExpanded, t) continue } expanded, err := filepath.Glob(t) if err == filepath.ErrBadPattern { errorLogger.Fatalf("cannot parse argument '%s': %s\n", t, err) } if expanded != nil { for _, e := range expanded { targetsExpanded = append(targetsExpanded, e) } } } targets = targetsExpanded } if err := options.Apply(global.matchPatterns, targets); err != nil { errorLogger.Fatalf("cannot process options: %s\n", err) } global.matchRegexes = make([]*regexp.Regexp, len(global.matchPatterns)) for i := range global.matchPatterns { global.matchRegexes[i], err = regexp.Compile(global.matchPatterns[i]) if err != nil { errorLogger.Fatalf("cannot parse pattern: %s\n", err) } } retVal, err := executeSearch(targets) if err != nil { errorLogger.Println(err) } os.Exit(retVal) }
func downloadForgeModule(name string, version string) { //url := "https://forgeapi.puppetlabs.com/v3/files/puppetlabs-apt-2.1.1.tar.gz" fileName := name + "-" + version + ".tar.gz" if _, err := os.Stat(config.ForgeCacheDir + name + "-" + version); os.IsNotExist(err) { url := "https://forgeapi.puppetlabs.com/v3/files/" + fileName req, err := http.NewRequest("GET", url, nil) req.Header.Set("User-Agent", "https://github.com/xorpaul/g10k/") req.Header.Set("Connection", "close") proxyUrl, err := http.ProxyFromEnvironment(req) if err != nil { log.Fatal("downloadForgeModule(): Error while getting http proxy with golang http.ProxyFromEnvironment()", err) os.Exit(1) } client := &http.Client{Transport: &http.Transport{Proxy: http.ProxyURL(proxyUrl)}} before := time.Now() resp, err := client.Do(req) duration := time.Since(before).Seconds() Verbosef("GETing " + url + " took " + strconv.FormatFloat(duration, 'f', 5, 64) + "s") mutex.Lock() syncForgeTime += duration mutex.Unlock() if err != nil { log.Print("downloadForgeModule(): Error while GETing Forge module ", name, " from ", url, ": ", err) os.Exit(1) } defer resp.Body.Close() if resp.Status == "200 OK" { Debugf("downloadForgeModule(): Trying to create " + config.ForgeCacheDir + fileName) out, err := os.Create(config.ForgeCacheDir + fileName) if err != nil { log.Print("downloadForgeModule(): Error while creating file for Forge module "+config.ForgeCacheDir+fileName, err) os.Exit(1) } defer out.Close() io.Copy(out, resp.Body) file, err := os.Open(config.ForgeCacheDir + fileName) if err != nil { fmt.Println("downloadForgeModule(): Error while opening file", file, err) os.Exit(1) } defer file.Close() var fileReader io.ReadCloser = resp.Body if strings.HasSuffix(fileName, ".gz") { if fileReader, err = pgzip.NewReader(file); err != nil { fmt.Println("downloadForgeModule(): pgzip reader error for module ", fileName, " error:", err) os.Exit(1) } defer fileReader.Close() } tarBallReader := tar.NewReader(fileReader) if err = os.Chdir(config.ForgeCacheDir); err != nil { fmt.Println("downloadForgeModule(): error while chdir to", config.ForgeCacheDir, err) os.Exit(1) } for { header, err := tarBallReader.Next() if err != nil { if err == io.EOF { break } fmt.Println("downloadForgeModule(): error while tar reader.Next() for ", fileName, err) os.Exit(1) } // get the individual filename and extract to the current directory filename := header.Name //Debugf("downloadForgeModule(): Trying to extract file" + filename) switch header.Typeflag { case tar.TypeDir: // handle directory //fmt.Println("Creating directory :", filename) //err = os.MkdirAll(filename, os.FileMode(header.Mode)) // or use 0755 if you prefer err = os.MkdirAll(filename, os.FileMode(0755)) // or use 0755 if you prefer if err != nil { fmt.Println("downloadForgeModule(): error while MkdirAll()", filename, err) os.Exit(1) } case tar.TypeReg: // handle normal file //fmt.Println("Untarring :", filename) writer, err := os.Create(filename) if err != nil { fmt.Println("downloadForgeModule(): error while Create()", filename, err) os.Exit(1) } io.Copy(writer, tarBallReader) err = os.Chmod(filename, os.FileMode(0644)) if err != nil { fmt.Println("downloadForgeModule(): error while Chmod()", filename, err) os.Exit(1) } writer.Close() default: fmt.Printf("Unable to untar type : %c in file %s", header.Typeflag, filename) } } } else { log.Print("downloadForgeModule(): Unexpected response code while GETing " + url + resp.Status) os.Exit(1) } } else { Debugf("downloadForgeModule(): Using cache for Forge module " + name + " version: " + version) } }
// downloadTables retrieves files from the HTTP server. Files to download is MySQL engine specific. func downloadTable(clientConfig clientConfigStruct, downloadInfo downloadInfoStruct) { downloadInfo.displayInfo.w = os.Stdout downloadInfo.displayInfo.fqTable = downloadInfo.schema + "." + downloadInfo.table downloadInfo.displayInfo.status = "Downloading" downloadInfo.displayChan <- downloadInfo.displayInfo // Use encoded schema and table if present var schemaFilename string var tableFilename string if downloadInfo.encodedSchema != "" { schemaFilename = downloadInfo.encodedSchema } else { schemaFilename = downloadInfo.schema } if downloadInfo.encodedTable != "" { tableFilename = downloadInfo.encodedTable } else { tableFilename = downloadInfo.table } // Ensure backup exists and check the engine type // Assume InnoDB first resp, err := http.Head(downloadInfo.backurl + path.Join(schemaFilename, tableFilename+".ibd")) checkErr(err) var engine string var extensions []string if resp.StatusCode == 200 { engine = "InnoDB" // 5.1 & 5.5 use .exp - 5.6 uses .cfg but it is ignored. Metadata checks appeared too brittle in testing. if strings.HasPrefix(downloadInfo.version, "5.1") || strings.HasPrefix(downloadInfo.version, "5.5") { extensions = append(extensions, ".exp") } extensions = append(extensions, ".ibd") } else { // Check for MyISAM resp, err := http.Head(downloadInfo.backurl + path.Join(schemaFilename, tableFilename+".MYD")) checkErr(err) if resp.StatusCode == 200 { engine = "MyISAM" extensions = append(extensions, ".MYI") extensions = append(extensions, ".MYD") extensions = append(extensions, ".frm") } else { errDownloadUnsupported = fmt.Errorf("Table %s.%s is using an unsupported engine", downloadInfo.schema, downloadInfo.table) handleDownloadError(clientConfig, &downloadInfo, errDownloadUnsupported) return } } // Update downloadInfo struct with engine type and extensions array downloadInfo.engine = engine downloadInfo.extensions = extensions // Loop through and download all files from extensions array var triteFiles []string for _, extension := range extensions { triteFile := filepath.Join(downloadInfo.mysqldir, schemaFilename, tableFilename+extension+".trite") // Ensure the .exp exists if we expect it // Checking this due to a bug encountered where XtraBackup did not create a tables .exp file if extension == ".exp" { resp, err := http.Head(downloadInfo.backurl + path.Join(schemaFilename, tableFilename+".exp")) checkHTTP(resp, downloadInfo.backurl+path.Join(schemaFilename, tableFilename+".exp")) checkErr(err) if resp.StatusCode != 200 { errDownloadExp = fmt.Errorf("The .exp file is missing for table %s.%s", downloadInfo.schema, downloadInfo.table) handleDownloadError(clientConfig, &downloadInfo, errDownloadExp) return } } // Request and write file fo, err := os.Create(triteFile) checkErr(err) defer fo.Close() if runtime.GOOS != "windows" { // Chown to mysql user os.Chown(triteFile, downloadInfo.uid, downloadInfo.gid) os.Chmod(triteFile, mysqlPerms) } // Get the size of the file from the trite server here because the file may be compressed during download in which case the content length is -1 headfile := downloadInfo.backurl + path.Join(schemaFilename, tableFilename+extension) head, err := http.Head(headfile) checkHTTP(head, headfile) checkErr(err) sizeServer := head.ContentLength var urlfile string if clientConfig.gz == true { urlfile = downloadInfo.gzurl + path.Join(schemaFilename, tableFilename+extension) } else { urlfile = downloadInfo.backurl + path.Join(schemaFilename, tableFilename+extension) } // Download files from trite server w := bufio.NewWriter(fo) resp, err := http.Get(urlfile) checkHTTP(resp, urlfile) defer resp.Body.Close() checkErr(err) var r io.Reader if clientConfig.gz == true { r, _ = pgzip.NewReader(resp.Body) } else { r = bufio.NewReader(resp.Body) } var sizeDown int64 if extension != ".exp" && sizeServer > clientConfig.minDownloadProgressSize*1073741824 { progressReader := &reader{ reader: r, size: sizeServer, drawFunc: drawTerminalf(downloadInfo.displayInfo.w, drawTextFormatPercent), drawPrefix: "Downloading: " + downloadInfo.schema + "." + downloadInfo.table, } sizeDown, err = w.ReadFrom(progressReader) } else { sizeDown, err = w.ReadFrom(r) } checkErr(err) w.Flush() // Check if size of file downloaded matches size on server -- Add retry ability if sizeDown != sizeServer { // Remove partial file download os.Remove(triteFile) errDownloadSize = fmt.Errorf("The %s file did not download properly for %s.%s", extension, downloadInfo.schema, downloadInfo.table) handleDownloadError(clientConfig, &downloadInfo, errDownloadSize) } triteFiles = append(triteFiles, triteFile) } downloadInfo.triteFiles = triteFiles // Call applyTables go applyTables(clientConfig, &downloadInfo) }
func extractForgeModule(wgForgeModule *sync.WaitGroup, file *io.PipeReader, fileName string) { defer wgForgeModule.Done() funcName := funcName() before := time.Now() fileReader, err := pgzip.NewReader(file) if err != nil { Fatalf(funcName + "(): pgzip reader error for module " + fileName + " error:" + err.Error()) } defer fileReader.Close() tarBallReader := tar.NewReader(fileReader) for { header, err := tarBallReader.Next() if err != nil { if err == io.EOF { break } Fatalf(funcName + "(): error while tar reader.Next() for " + fileName + err.Error()) } // get the individual filename and extract to the current directory filename := header.Name targetFilename := config.ForgeCacheDir + "/" + filename //Debugf("Trying to extract file" + filename) switch header.Typeflag { case tar.TypeDir: // handle directory //fmt.Println("Creating directory :", filename) //err = os.MkdirAll(targetFilename, os.FileMode(header.Mode)) // or use 0755 if you prefer err = os.MkdirAll(targetFilename, os.FileMode(0755)) // or use 0755 if you prefer if err != nil { Fatalf(funcName + "(): error while MkdirAll() " + filename + err.Error()) } case tar.TypeReg: // handle normal file //fmt.Println("Untarring :", filename) writer, err := os.Create(targetFilename) if err != nil { Fatalf(funcName + "(): error while Create() " + filename + err.Error()) } io.Copy(writer, tarBallReader) err = os.Chmod(targetFilename, os.FileMode(0644)) if err != nil { Fatalf(funcName + "(): error while Chmod() " + filename + err.Error()) } writer.Close() default: Fatalf(funcName + "(): Unable to untar type: " + string(header.Typeflag) + " in file " + filename) } } duration := time.Since(before).Seconds() Verbosef("Extracting " + config.ForgeCacheDir + fileName + " took " + strconv.FormatFloat(duration, 'f', 5, 64) + "s") mutex.Lock() ioForgeTime += duration mutex.Unlock() }
func ReadSynthesisFile(repo Repository, file string, out chan<- Package) error { f, err := os.Open(file) if err != nil { return err } defer f.Close() gz, err := gzip.NewReader(f) if err != nil { return err } defer gz.Close() r := bufio.NewReader(gz) cur := Package{} for err == nil { line, err := r.ReadString('\n') if err == io.EOF { break } if err != nil { return fmt.Errorf("can't read synthesis file: %v", err) } line = strings.Trim(line, "\n\r") // @[email protected]@libvo-amrwbenc0[== 0.1.2-1:2014.1] // @[email protected]@libc.so.6(GLIBC_2.0)@libc.so.6(GLIBC_2.1.3) // @summary@VisualOn AMR-WB encoder library // @filesize@68793 // @[email protected]@0@143156@System/Libraries@[email protected] if strings.HasPrefix(line, "@info") { var size string err = splitLine(line, &cur.FileName, nil, &size, &cur.Group, &cur.Disttag, &cur.Distepoch) if err != nil { return err } cur.Repository = repo.Name cur.Size, err = strconv.Atoi(size) if err != nil { return fmt.Errorf("Can't read synthesis file: incorrect filesize '%v': %v", size, err) } items := strings.Split(cur.FileName, "-") if len(items) < 4 { return fmt.Errorf("Can't parse package filename %s", cur.FileName) } s := items[len(items)-1] n := strings.LastIndex(s, ".") cur.Arch = s[n+1:] cur.Version = items[len(items)-3] + "-" + items[len(items)-2] cur.Name = strings.Join(items[:len(items)-3], "-") out <- cur cur = Package{} continue } if strings.HasPrefix(line, "@summary@") { cur.Summary = line[9:] continue } if strings.HasPrefix(line, "@filesize@") { items := strings.Split(line, "@") if len(items) < 3 { return fmt.Errorf("can't read synthesis file: package filesize not found %v", line) } cur.RPMSize, err = strconv.Atoi(items[2]) if err != nil { return fmt.Errorf("Can't read synthesis file: incorrect RPM size %v: %v", items[2], err) } continue } } return nil }