Exemple #1
1
func (s *archiveDataSuite) SetUpTest(c *gc.C) {
	s.IsolationSuite.SetUpTest(c)

	meta, err := backups.NewMetadataJSONReader(bytes.NewBufferString(`{` +
		`"ID":"20140909-115934.asdf-zxcv-qwe",` +
		`"Checksum":"123af2cef",` +
		`"ChecksumFormat":"SHA-1, base64 encoded",` +
		`"Size":10,` +
		`"Stored":"0001-01-01T00:00:00Z",` +
		`"Started":"2014-09-09T11:59:34Z",` +
		`"Finished":"2014-09-09T12:00:34Z",` +
		`"Notes":"",` +
		`"Environment":"asdf-zxcv-qwe",` +
		`"Machine":"0",` +
		`"Hostname":"myhost",` +
		`"Version":"1.21-alpha3"` +
		`}` + "\n"))
	c.Assert(err, jc.ErrorIsNil)

	archiveFile := s.newArchiveFile(c, meta)
	compressed, err := ioutil.ReadAll(archiveFile)
	c.Assert(err, jc.ErrorIsNil)
	gzr, err := gzip.NewReader(bytes.NewBuffer(compressed))
	c.Assert(err, jc.ErrorIsNil)
	data, err := ioutil.ReadAll(gzr)
	c.Assert(err, jc.ErrorIsNil)

	s.archiveFile = bytes.NewBuffer(compressed)
	s.data = data
	s.meta = meta
}
Exemple #2
0
func main() {
	flag.Parse()
	f, err := os.Open(*geoData)
	x.Check(err)

	gr, err := gzip.NewReader(f)
	x.Check(err)

	//var strBuf bytes.Buffer
	//bufReader := bufio.NewReader(gr)
	dec := json.NewDecoder(gr)

	countryToGeo := make(map[string]string)
	findFeatureArray(dec)
	for dec.More() {
		var f geojson.Feature
		err := dec.Decode(&f)
		fmt.Println(f.Properties["NAME_LONG"])
		gg, err := geojson.Marshal(f.Geometry)
		ggg := strings.Replace(string(gg), "\"", "'", -1)
		country, ok := f.Properties["NAME_LONG"].(string)
		if ok {
			countryToGeo[country] = ggg
		}
		//fmt.Printf("\"%s\"", ggg)
		if err != nil {
			fmt.Println(err)
		}
	}
	gr.Close()
	f.Close()

	f, err = os.Open(*rdf)
	x.Check(err)

	gr, err = gzip.NewReader(f)
	x.Check(err)

	scanner := bufio.NewScanner(gr)

	out, err := os.Create("countryGeoData")
	x.Check(err)
	defer out.Close()
	count1, count2 := 0, 0
	for scanner.Scan() {
		line := scanner.Text()
		if strings.Contains(line, "@en") {
			items := strings.Split(line, "\t")
			country := strings.Trim(strings.Split(items[2], "@")[0], "\"")
			fmt.Println(country)
			if geoD, ok := countryToGeo[country]; ok {
				count1++
				out.WriteString(fmt.Sprintf("%s <loc> \"%s\"^^<geo:geojson> .\n", items[0], geoD))
			} else {
				count2++
			}
		}
	}
	fmt.Println(count1, count2)
}
Exemple #3
0
func openStream(path string) (io.ReadCloser, error) {
	if strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://") {
		req, err := http.NewRequest("GET", path, nil)
		if err != nil {
			return nil, err
		}
		req.Header.Set("Accept-Encoding", "gzip")
		resp, err := http.DefaultClient.Do(req)
		if err != nil {
			return nil, err
		}

		gz, err := gzip.NewReader(resp.Body)
		if err != nil {
			resp.Body.Close()
			return nil, err
		}

		return gz, nil
	} else {
		f, err := os.Open(path)
		if err != nil {
			return nil, err
		}

		gz, err := gzip.NewReader(f)
		if err != nil {
			f.Close()
			return nil, err
		}

		return gz, nil
	}
	panic("Unreachable")
}
func extractTarGz(body []byte, location string) (string, error) {
	bodyCopy := make([]byte, len(body))
	copy(bodyCopy, body)
	tarFile, _ := gzip.NewReader(bytes.NewReader(body))
	tarReader := tar.NewReader(tarFile)

	var dirList []string

	for {
		header, err := tarReader.Next()
		if err == io.EOF {
			break
		}
		dirList = append(dirList, header.Name)
	}

	basedir := findBaseDir(dirList)

	tarFile, _ = gzip.NewReader(bytes.NewReader(bodyCopy))
	tarReader = tar.NewReader(tarFile)

	for {
		header, err := tarReader.Next()
		if err == io.EOF {
			break
		} else if err != nil {
			//return location, err
		}

		path := filepath.Join(location, strings.Replace(header.Name, basedir, "", -1))
		info := header.FileInfo()

		if info.IsDir() {
			if err = os.MkdirAll(path, info.Mode()); err != nil {
				return location, err
			}
			continue
		}

		if header.Typeflag == tar.TypeSymlink {
			err = os.Symlink(header.Linkname, path)
			continue
		}

		file, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, info.Mode())
		if err == nil {
			defer file.Close()
		}
		_, err = io.Copy(file, tarReader)
		if err != nil {
			//return location, err
		}
	}
	return location, nil
}
Exemple #5
0
func indexTarball(tarPath string, logChan chan *store.Record) bool {
	currentTar.Set(tarPath)
	handle, err := os.Open(tarPath)
	if err != nil {
		log.Printf("Error reading %s: %s\n", tarPath, err)
		tarsFailed.Add(1)
		return false
	}
	defer handle.Close()
	fileinfo, err := handle.Stat()
	if err != nil {
		log.Printf("Error stating %s: %s\n", tarPath, err)
		tarsFailed.Add(1)
		return false
	}
	tarBytesRead.Add(fileinfo.Size())
	unzippedHandle, err := gzip.NewReader(handle)
	if err != nil {
		log.Printf("Error unzipping tarball %s: %s\n", tarPath, err)
		tarsFailed.Add(1)
		return false
	}
	parentReader := tar.NewReader(unzippedHandle)
	for {
		parentHeader, err := parentReader.Next()
		if err == io.EOF {
			break
		} else if err != nil {
			tarsFailed.Add(1)
			log.Printf("Error indexing %v: %v", tarPath, err)
			break
		}
		if parentHeader.Typeflag != tar.TypeReg && parentHeader.Typeflag != tar.TypeRegA {
			continue
		}
		if filepath.Ext(parentHeader.Name) != ".gz" {
			continue
		}
		parentGzipHandle, err := gzip.NewReader(parentReader)
		if err != nil {
			nestedTarsFailed.Add(1)
			log.Printf("Error gunzipping trace %s/%s: %v", tarPath, parentHeader.Name, err)
			continue
		}
		if err := indexNestedTarball(parentGzipHandle, logChan); err != nil {
			nestedTarsFailed.Add(1)
			continue
		}
		nestedTarsIndexed.Add(1)
	}
	tarsIndexed.Add(1)
	return true
}
Exemple #6
0
func Fuzz(data []byte) int {
	fr, err := gzip.NewReader(bytes.NewReader(data))
	if err != nil {
		return 0
	}
	if len(fr.Comment) > 1<<20 || len(fr.Name) > 1<<20 || len(fr.Extra) > 1<<20 {
		panic("huge header")
	}
	uncomp := make([]byte, 64<<10)
	n, err := fr.Read(uncomp)
	if err != nil && err != io.EOF {
		return 0
	}
	if n == len(uncomp) {
		return 0 // too large
	}
	uncomp = uncomp[:n]
	for c := 0; c <= 9; c++ {
		buf := new(bytes.Buffer)
		gw, err := gzip.NewWriterLevel(buf, c)
		if err != nil {
			panic(err)
		}
		gw.Header = fr.Header
		n, err := gw.Write(uncomp)
		if err != nil {
			panic(err)
		}
		if n != len(uncomp) {
			panic("short write")
		}
		if err := gw.Close(); err != nil {
			panic(err)
		}
		fr1, err := gzip.NewReader(buf)
		if err != nil {
			panic(err)
		}
		uncomp1, err := ioutil.ReadAll(fr1)
		if err != nil {
			panic(err)
		}
		if !bytes.Equal(uncomp, uncomp1) {
			panic("data differs")
		}
	}
	return 1
}
Exemple #7
0
// Parse parses a profile and checks for its validity.  The input
// may be a gzip-compressed encoded protobuf or one of many legacy
// profile formats which may be unsupported in the future.
func Parse(r io.Reader) (*Profile, error) {
	orig, err := ioutil.ReadAll(r)
	if err != nil {
		return nil, err
	}

	var p *Profile
	if len(orig) >= 2 && orig[0] == 0x1f && orig[1] == 0x8b {
		var data []byte

		if gz, err := gzip.NewReader(bytes.NewBuffer(orig)); err == nil {
			data, err = ioutil.ReadAll(gz)
		}
		if err != nil {
			return nil, fmt.Errorf("decompressing profile: %v", err)
		}
		orig = data
	}
	if p, err = parseUncompressed(orig); err != nil {
		if p, err = parseLegacy(orig); err != nil {
			return nil, fmt.Errorf("parsing profile: %v", err)
		}
	}

	if err := p.CheckValid(); err != nil {
		return nil, fmt.Errorf("malformed profile: %v", err)
	}
	return p, nil
}
Exemple #8
0
func openFile(name string) (*os.File, io.Reader, error) {
	f, err := os.Open(name)

	if err != nil {
		return nil, nil, err
	}

	var r io.Reader

	// Detect compression.
	switch filepath.Ext(name) {
	case ".gzip", ".gz":
		r, err = gzip.NewReader(f)

		if err != nil {
			return nil, nil, err
		}

	case ".bzip2", ".bz2":
		r = bzip2.NewReader(f)

	default:
		r = f
	}

	return f, r, nil
}
Exemple #9
0
func readcsv(csvfile string) []byte {
	var err error
	f, err := os.Open(csvfile)
	if err != nil {
		if os.IsNotExist(err) {
			// fmt.Println("file does not exist")
		} else {
			fmt.Println("error", err)
		}
		return []byte("")
	}
	defer f.Close()
	if strings.Contains(csvfile, ".gz") {
		f, err := gzip.NewReader(f)
		if err != nil {
			fmt.Println(err)
		}
		defer f.Close()
	}
	contents, err := ioutil.ReadAll(f)
	if err != nil {
		fmt.Println("error", err)
		return []byte("")
	}
	return contents
}
Exemple #10
0
func (d *Data) decodeBase64() (data []byte, err error) {
	rawData := bytes.TrimSpace(d.RawData)
	r := bytes.NewReader(rawData)

	encr := base64.NewDecoder(base64.StdEncoding, r)

	var comr io.Reader
	switch d.Compression {
	case "gzip":
		comr, err = gzip.NewReader(encr)
		if err != nil {
			return
		}
	case "zlib":
		comr, err = zlib.NewReader(encr)
		if err != nil {
			return
		}
	case "":
		comr = encr
	default:
		err = UnknownCompression
		return
	}

	return ioutil.ReadAll(comr)
}
Exemple #11
0
func DecompressStream(archive io.Reader) (io.ReadCloser, error) {
	p := pools.BufioReader32KPool
	buf := p.Get(archive)
	bs, err := buf.Peek(10)
	if err != nil {
		return nil, err
	}

	compression := DetectCompression(bs)
	switch compression {
	case Uncompressed:
		readBufWrapper := p.NewReadCloserWrapper(buf, buf)
		return readBufWrapper, nil
	case Gzip:
		gzReader, err := gzip.NewReader(buf)
		if err != nil {
			return nil, err
		}
		readBufWrapper := p.NewReadCloserWrapper(buf, gzReader)
		return readBufWrapper, nil
	case Bzip2:
		bz2Reader := bzip2.NewReader(buf)
		readBufWrapper := p.NewReadCloserWrapper(buf, bz2Reader)
		return readBufWrapper, nil
	case Xz:
		xzReader, err := xzDecompress(buf)
		if err != nil {
			return nil, err
		}
		readBufWrapper := p.NewReadCloserWrapper(buf, xzReader)
		return readBufWrapper, nil
	default:
		return nil, fmt.Errorf("Unsupported compression format %s", (&compression).Extension())
	}
}
// not follow redirect....
func Gethtml3(url string) {
	client := new(http.Client)

	request, _ := http.NewRequest("GET", "http://www.baidu.com", nil)
	request.Header.Add("Accept-Encoding", "gzip")

	response, _ := client.Do(request)
	defer response.Body.Close()
	for k, v := range response.Header {
		fmt.Println(k)
		fmt.Println(v)
	}

	// Check that the server actually sent compressed data
	var reader io.ReadCloser
	switch response.Header.Get("Content-Encoding") {
	case "gzip":
		fmt.Println("XXXXXXXXXX gzip")
		reader, _ = gzip.NewReader(response.Body)
		defer reader.Close()
	default:
		reader = response.Body
	}
	var s string
	if b, err := ioutil.ReadAll(reader); err == nil {
		s = string(b)
	}

	println(s)
}
func fetchCamliSrc() {
	check(os.MkdirAll("/gopath/src/camlistore.org", 0777))
	check(os.Chdir("/gopath/src/camlistore.org"))

	res, err := http.Get("https://camlistore.googlesource.com/camlistore/+archive/" + *rev + ".tar.gz")
	check(err)
	defer res.Body.Close()
	gz, err := gzip.NewReader(res.Body)
	check(err)
	defer gz.Close()
	tr := tar.NewReader(gz)
	for {
		h, err := tr.Next()
		if err == io.EOF {
			break
		}
		check(err)
		if h.Typeflag == tar.TypeDir {
			check(os.MkdirAll(h.Name, os.FileMode(h.Mode)))
			continue
		}
		f, err := os.Create(h.Name)
		check(err)
		n, err := io.Copy(f, tr)
		if err != nil && err != io.EOF {
			log.Fatal(err)
		}
		if n != h.Size {
			log.Fatalf("Error when creating %v: wanted %v bytes, got %v bytes", h.Name, h.Size, n)
		}
		check(f.Close())
	}
}
Exemple #14
0
func TestGzip(t *testing.T) {
	req, _ := http.NewRequest(vodka.GET, "/", nil)
	rec := httptest.NewRecorder()
	c := vodka.NewContext(req, vodka.NewResponse(rec), vodka.New())
	h := func(c *vodka.Context) error {
		c.Response().Write([]byte("test")) // For Content-Type sniffing
		return nil
	}

	// Skip if no Accept-Encoding header
	Gzip()(h)(c)
	assert.Equal(t, http.StatusOK, rec.Code)
	assert.Equal(t, "test", rec.Body.String())

	req, _ = http.NewRequest(vodka.GET, "/", nil)
	req.Header.Set(vodka.AcceptEncoding, "gzip")
	rec = httptest.NewRecorder()
	c = vodka.NewContext(req, vodka.NewResponse(rec), vodka.New())

	// Gzip
	Gzip()(h)(c)
	assert.Equal(t, http.StatusOK, rec.Code)
	assert.Equal(t, "gzip", rec.Header().Get(vodka.ContentEncoding))
	assert.Contains(t, rec.Header().Get(vodka.ContentType), vodka.TextPlain)
	r, err := gzip.NewReader(rec.Body)
	defer r.Close()
	if assert.NoError(t, err) {
		buf := new(bytes.Buffer)
		buf.ReadFrom(r)
		assert.Equal(t, "test", buf.String())
	}
}
Exemple #15
0
// TestAcceptEncoding hits the health endpoint while explicitly
// disabling decompression on a custom client's Transport and setting
// it conditionally via the request's Accept-Encoding headers.
func TestAcceptEncoding(t *testing.T) {
	defer leaktest.AfterTest(t)
	s := StartTestServer(t)
	defer s.Stop()
	client, err := testContext.GetHTTPClient()
	if err != nil {
		t.Fatal(err)
	}

	testData := []struct {
		acceptEncoding string
		newReader      func(io.Reader) io.Reader
	}{
		{"",
			func(b io.Reader) io.Reader {
				return b
			},
		},
		{util.GzipEncoding,
			func(b io.Reader) io.Reader {
				r, err := gzip.NewReader(b)
				if err != nil {
					t.Fatalf("could not create new gzip reader: %s", err)
				}
				return r
			},
		},
		{util.SnappyEncoding,
			func(b io.Reader) io.Reader {
				return snappy.NewReader(b)
			},
		},
	}
	for _, d := range testData {
		req, err := http.NewRequest("GET", testContext.HTTPRequestScheme()+"://"+s.ServingAddr()+healthPath, nil)
		if err != nil {
			t.Fatalf("could not create request: %s", err)
		}
		if d.acceptEncoding != "" {
			req.Header.Set(util.AcceptEncodingHeader, d.acceptEncoding)
		}
		resp, err := client.Do(req)
		if err != nil {
			t.Fatalf("could not make request to %s: %s", req.URL, err)
		}
		defer resp.Body.Close()
		if ce := resp.Header.Get(util.ContentEncodingHeader); ce != d.acceptEncoding {
			t.Fatalf("unexpected content encoding: '%s' != '%s'", ce, d.acceptEncoding)
		}
		r := d.newReader(resp.Body)
		b, err := ioutil.ReadAll(r)
		if err != nil {
			t.Fatalf("could not read '%s' response body: %s", d.acceptEncoding, err)
		}
		expected := "ok"
		if !strings.Contains(string(b), expected) {
			t.Errorf("expected body to contain %q, got %q", expected, b)
		}
	}
}
Exemple #16
0
func skipToArMember(arReader *ar.Reader, memberPrefix string) (io.Reader, error) {
	var err error

	// find the right ar member
	var header *ar.Header
	for {
		header, err = arReader.Next()
		if err != nil {
			return nil, err
		}
		if strings.HasPrefix(header.Name, memberPrefix) {
			break
		}
	}

	// figure out what compression to use
	var dataReader io.Reader
	switch {
	case strings.HasSuffix(header.Name, ".gz"):
		dataReader, err = gzip.NewReader(arReader)
		if err != nil {
			return nil, err
		}
	case strings.HasSuffix(header.Name, ".bz2"):
		dataReader = bzip2.NewReader(arReader)
	case strings.HasSuffix(header.Name, ".xz"):
		dataReader = xzPipeReader(arReader)
	default:
		return nil, fmt.Errorf("Can not handle %s", header.Name)
	}

	return dataReader, nil
}
func TestGzipOK(t *testing.T) {
	c := makeEnv()

	w := httptest.NewRecorder()

	h := GzipMiddleWare(&c, http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
		w.Write([]byte("super things"))
	}))

	r, _ := http.NewRequest("GET", "/", nil)
	r.Header.Set("Accept-Encoding", "gzip")

	h.ServeHTTP(w, r)

	if w.Code != http.StatusOK {
		t.Errorf("expect 200, get %d, %s", w.Code, w.Body.String())
	}

	gr, err := gzip.NewReader(w.Body)
	if err != nil {
		t.Errorf("couldn't create a gzip raader, %v", err)
	}
	body, err := ioutil.ReadAll(gr)
	if err != nil {
		t.Errorf("Couldn't read gzip content. %v", err)
	}

	if string(body) != "super things" {
		t.Errorf("body not as expected.  have \"%s\"", body)
	}

	if w.HeaderMap.Get("Content-Encoding") != "gzip" {
		t.Errorf("expected gzip encoding - have %v", w.HeaderMap)
	}
}
Exemple #18
0
func (t *TarInfo) Load(file io.ReadSeeker) {
	var reader *tar.Reader
	file.Seek(0, 0)
	gzipReader, err := gzip.NewReader(file)
	if err != nil {
		// likely not a gzip compressed file
		file.Seek(0, 0)
		reader = tar.NewReader(file)
	} else {
		reader = tar.NewReader(gzipReader)
	}
	for {
		header, err := reader.Next()
		if err == io.EOF {
			// end of tar file
			break
		} else if err != nil {
			// error occured
			logger.Debug("[TarInfoLoad] Error when reading tar stream tarsum. Disabling TarSum, TarFilesInfo. Error: %s", err.Error())
			t.Error = TarError(err.Error())
			return
		}
		t.TarSum.Append(header, reader)
		t.TarFilesInfo.Append(header)
	}
}
func readBody(b io.ReadCloser, ctype string, encoding string) (body string, err error) {
	defer b.Close()
	var r io.Reader
	if encoding == gzipHeader {
		gr, err := gzip.NewReader(b)
		if err != nil {
			return "", err
		}
		r = gr
		defer gr.Close()
	} else if encoding == "" {
		r = b
	} else {
		return "", fmt.Errorf("Unknown %s: %s", encHeader, encoding)
	}

	// TODO(iantw): If we find a need, allow character set conversions...
	// Unlikely to be an issue for now.
	// if ctype != "" {
	// 	 r, err = charset.NewReader(r, ctype)
	//
	//	 if err != nil {
	//		 return "", err
	//	 }
	// }

	bytes, err := ioutil.ReadAll(r)
	return string(bytes), err
}
Exemple #20
0
// doRequest runs a request with our client
func (c *Client) doRequest(r *request) (time.Duration, *http.Response, error) {
	req, err := r.toHTTP()
	if err != nil {
		return 0, nil, err
	}
	start := time.Now()
	resp, err := c.config.HttpClient.Do(req)
	diff := time.Now().Sub(start)

	// If the response is compressed, we swap the body's reader.
	if resp != nil && resp.Header != nil {
		var reader io.ReadCloser
		switch resp.Header.Get("Content-Encoding") {
		case "gzip":
			greader, err := gzip.NewReader(resp.Body)
			if err != nil {
				return 0, nil, err
			}

			// The gzip reader doesn't close the wrapped reader so we use
			// multiCloser.
			reader = &multiCloser{
				reader:       greader,
				inorderClose: []io.Closer{greader, resp.Body},
			}
		default:
			reader = resp.Body
		}
		resp.Body = reader
	}

	return diff, resp, err
}
Exemple #21
0
func testArchiveStr(t *testing.T, path string) []string {
	f, err := os.Open(path)
	if err != nil {
		t.Fatalf("err: %s", err)
	}
	defer f.Close()

	// Ungzip
	gzipR, err := gzip.NewReader(f)
	if err != nil {
		t.Fatalf("err: %s", err)
	}

	// Accumulator
	result := make([]string, 0, 10)

	// Untar
	tarR := tar.NewReader(gzipR)
	for {
		header, err := tarR.Next()
		if err == io.EOF {
			break
		}
		if err != nil {
			t.Fatalf("err: %s", err)
		}

		result = append(result, header.Name)
	}

	sort.Strings(result)
	return result
}
Exemple #22
0
// BenchmarkParseProtoGzip is like BenchmarkParseProto above, but parses gzipped
// protobuf format.
func BenchmarkParseProtoGzip(b *testing.B) {
	b.StopTimer()
	data, err := ioutil.ReadFile("testdata/protobuf.gz")
	if err != nil {
		b.Fatal(err)
	}
	b.StartTimer()

	for i := 0; i < b.N; i++ {
		family := &dto.MetricFamily{}
		in, err := gzip.NewReader(bytes.NewReader(data))
		if err != nil {
			b.Fatal(err)
		}
		for {
			family.Reset()
			if _, err := pbutil.ReadDelimited(in, family); err != nil {
				if err == io.EOF {
					break
				}
				b.Fatal(err)
			}
		}
	}
}
func cmdNbt(args []string) (err error) {
	if len(args) != 1 {
		os.Stderr.WriteString("usage: " + os.Args[0] + " nbt <NBT file path>\n")
		return
	}

	file, err := os.Open(args[0])
	if err != nil {
		return
	}
	defer file.Close()

	gzipReader, err := gzip.NewReader(file)
	if err != nil {
		return
	}
	defer gzipReader.Close()

	namedTag, err := nbt.Read(gzipReader)
	if err != nil {
		return
	}

	displayNbt(1, namedTag)

	return
}
Exemple #24
0
func getLogReader(logfile string, logf *os.File) (*bufio.Reader, error) {
	var rdr *bufio.Reader
	// Is this a gzip file?
	if path.Ext(logfile) == gzipext {
		gzrdr, err := gzip.NewReader(logf)
		if err != nil {
			return nil, err
		}
		rdr = bufio.NewReader(gzrdr)
	} else {
		// See if the file has shrunk. If so, read from the beginning.
		fi, err := logf.Stat()
		if err != nil {
			return nil, err
		}
		if fi.Size() < pos {
			pos = 0
		}
		logf.Seek(pos, os.SEEK_SET)
		fmt.Printf("Starting read at offset %d\n", pos)
		rdr = bufio.NewReader(logf)
	}

	return rdr, nil
}
Exemple #25
0
func (c *checksums) ChecksumForGraphID(id, parent, oldTarDataPath, newTarDataPath string) (diffID layer.DiffID, size int64, err error) {
	defer func() {
		if err != nil {
			logrus.Debugf("could not get checksum for %q with tar-split: %q. Attempting fallback.", id, err)
			diffID, size, err = c.checksumForGraphIDNoTarsplit(id, parent, newTarDataPath)
		}
	}()

	if oldTarDataPath == "" {
		err = errors.New("no tar-split file")
		return
	}

	tarDataFile, err := os.Open(oldTarDataPath)
	if err != nil {
		return
	}
	defer tarDataFile.Close()
	uncompressed, err := gzip.NewReader(tarDataFile)
	if err != nil {
		return
	}

	dgst := digest.Canonical.New()
	err = c.assembleTarTo(id, uncompressed, &size, dgst.Hash())
	if err != nil {
		return
	}

	diffID = layer.DiffID(dgst.Digest())
	os.RemoveAll(newTarDataPath)
	err = os.Link(oldTarDataPath, newTarDataPath)
	return
}
Exemple #26
0
func (m *Model) loadIndex(repo string, dir string) []protocol.FileInfo {
	id := fmt.Sprintf("%x", sha1.Sum([]byte(m.repoDirs[repo])))
	name := id + ".idx.gz"
	name = filepath.Join(dir, name)

	idxf, err := os.Open(name)
	if err != nil {
		return nil
	}
	defer idxf.Close()

	gzr, err := gzip.NewReader(idxf)
	if err != nil {
		return nil
	}
	defer gzr.Close()

	var im protocol.IndexMessage
	err = im.DecodeXDR(gzr)
	if err != nil || im.Repository != repo {
		return nil
	}

	return im.Files
}
Exemple #27
0
func RunTestGzip(data []byte) {
	log.Printf("encoding/RunTestGzip: Testing comprssion Gzip\n")

	var compressed bytes.Buffer
	w := gzip.NewWriter(&compressed)
	defer w.Close()
	now := time.Now()
	w.Write(data)

	cl := compressed.Len()
	log.Printf("encoding/RunTestGzip: Compressed from %d bytes to %d bytes in %d ns\n", len(data), cl, time.Since(now).Nanoseconds())

	recovered := make([]byte, len(data))
	r, _ := gzip.NewReader(&compressed)
	defer r.Close()

	total := 0
	n := 100
	var err error = nil
	for err != io.EOF && n != 0 {
		n, err = r.Read(recovered[total:])
		total += n
	}
	log.Printf("encoding/RunTestGzip: Uncompressed from %d bytes to %d bytes in %d ns\n", cl, len(recovered), time.Since(now).Nanoseconds())
}
Exemple #28
0
func checkToolsContent(c *gc.C, data []byte, uploaded string) {
	zr, err := gzip.NewReader(bytes.NewReader(data))
	c.Check(err, gc.IsNil)
	defer zr.Close()
	tr := tar.NewReader(zr)
	found := false
	for {
		hdr, err := tr.Next()
		if err == io.EOF {
			break
		}
		c.Check(err, gc.IsNil)
		if strings.ContainsAny(hdr.Name, "/\\") {
			c.Fail()
		}
		if hdr.Typeflag != tar.TypeReg {
			c.Fail()
		}
		content, err := ioutil.ReadAll(tr)
		c.Check(err, gc.IsNil)
		c.Check(string(content), gc.Equals, uploaded)
		found = true
	}
	c.Check(found, jc.IsTrue)
}
Exemple #29
0
func (h *Handler) serveWrite(w http.ResponseWriter, r *http.Request, user *meta.UserInfo) {
	h.statMap.Add(statWriteRequest, 1)

	// Handle gzip decoding of the body
	body := r.Body
	if r.Header.Get("Content-encoding") == "gzip" {
		b, err := gzip.NewReader(r.Body)
		if err != nil {
			resultError(w, influxql.Result{Err: err}, http.StatusBadRequest)
			return
		}
		body = b
	}
	defer body.Close()

	b, err := ioutil.ReadAll(body)
	if err != nil {
		if h.WriteTrace {
			h.Logger.Print("write handler unable to read bytes from request body")
		}
		resultError(w, influxql.Result{Err: err}, http.StatusBadRequest)
		return
	}
	h.statMap.Add(statWriteRequestBytesReceived, int64(len(b)))
	if h.WriteTrace {
		h.Logger.Printf("write body received by handler: %s", string(b))
	}

	if r.Header.Get("Content-Type") == "application/json" {
		h.serveWriteJSON(w, r, b, user)
		return
	}
	h.serveWriteLine(w, r, b, user)
}
Exemple #30
-1
func generateNewName() (string, error) {
	s1 := rand.NewSource(time.Now().UnixNano())
	r1 := rand.New(s1)
	nameIdx := r1.Intn(258000)
	surnameIdx := r1.Intn(88799)

	nameFile, err := os.Open("names.dat.gz")
	if err != nil {
		return "", err
	}

	nameReader, err := gzip.NewReader(nameFile)
	if err != nil {
		return "", err
	}
	defer nameReader.Close()

	name, err := readLine(nameReader, nameIdx)

	surnameFile, err := os.Open("names.dat.gz")
	if err != nil {
		return "", err
	}

	surnameReader, err := gzip.NewReader(surnameFile)
	if err != nil {
		return "", err
	}
	defer surnameReader.Close()

	surname, err := readLine(surnameReader, surnameIdx)

	return strings.Join([]string{name, surname}, " "), nil

}