Ejemplo n.º 1
0
// GetPage is an HTTP client that automatically decodes gzip when necessary.
func GetPage(url string) ([]byte, error) {
	tr := &http.Transport{
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{Transport: tr}
	req, err := http.NewRequest("GET", url, nil)
	if err != nil {
		return nil, err
	}
	req.Header.Add("Accept-Encoding", "gzip, deflate")
	resp, err := client.Do(req)
	if err != nil {
		return nil, err
	}
	defer resp.Body.Close()
	var body []byte
	if resp.Header.Get("Content-Encoding") == "gzip" {
		var gz *gzip.Reader
		gz, err = gzip.NewReader(resp.Body)
		if err != nil {
			return nil, err
		}
		defer gz.Close()
		body, err = ioutil.ReadAll(gz)
	} else {
		body, err = ioutil.ReadAll(resp.Body)
	}
	if err != nil {
		return nil, err
	}
	return body, nil
}
Ejemplo n.º 2
0
func NewDscin(waiter *sync.WaitGroup, filename string) *Dscin {
	d := new(Dscin)
	d.Operation.Waiter = waiter
	d.Operation.Waiter.Add(1)
	file, err := os.Open(filename)
	if err != nil {
		return nil
	}
	d.closer = func() {
		file.Close()
	}
	var reader io.ReadCloser = file
	var uncompressor *gzip.Reader
	if strings.HasSuffix(filename, ".gz") {
		uncompressor, err = gzip.NewReader(file)
		d.closer = func() { uncompressor.Close(); file.Close() }
		reader = uncompressor
	}
	uncompressed_name := strings.TrimRight(filename, ".gz")
	switch {
	case strings.HasSuffix(uncompressed_name, ".gob"):
		d.marshaler = new(formats.GobMarshaler)
	case strings.HasSuffix(uncompressed_name, ".xml"):
		d.marshaler = new(formats.XmlMarshaler)
	}
	if d.marshaler != nil {
		d.marshaler.ValidateFile(reader)
	}
	return d
}
Ejemplo n.º 3
0
func handleAssets(w http.ResponseWriter, r *http.Request) {
	assets := auto.Assets()
	path := r.URL.Path[1:]
	if path == "" {
		path = "index.html"
	}

	bs, ok := assets[path]
	if !ok {
		w.WriteHeader(http.StatusNotFound)
		return
	}

	mtype := mimeTypeForFile(path)
	if len(mtype) != 0 {
		w.Header().Set("Content-Type", mtype)
	}

	if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
		w.Header().Set("Content-Encoding", "gzip")
	} else {
		// ungzip if browser not send gzip accepted header
		var gr *gzip.Reader
		gr, _ = gzip.NewReader(bytes.NewReader(bs))
		bs, _ = ioutil.ReadAll(gr)
		gr.Close()
	}
	w.Header().Set("Content-Length", fmt.Sprintf("%d", len(bs)))

	w.Write(bs)
}
Ejemplo n.º 4
0
// readBytes reads an encrypted/compressed steam from an io.Reader
// and returns a decoded byte slice
func readBytes(in io.Reader, key string) ([]byte, error) {
	var gzReader *gzip.Reader  // compressed reader
	var iv [aes.BlockSize]byte // initialization vector
	var cb cipher.Block        // cipher block interface
	var outBytes *bytes.Buffer // output buffer
	var err error              // general error handler

	// init cipher block
	if cb, err = aes.NewCipher(hashKey(key)); err != nil {
		return nil, err
	}

	// init encrypted reader
	encReader := &cipher.StreamReader{
		S: cipher.NewOFB(cb, iv[:]),
		R: in,
	}

	// wrap encrypted reader
	if gzReader, err = gzip.NewReader(encReader); err != nil {
		return nil, err
	}

	// close when done
	defer gzReader.Close()

	// init output
	outBytes = new(bytes.Buffer)

	// read data into output buffer decompressing and decrypting along the way
	_, err = io.Copy(outBytes, gzReader)

	// return bytes and last error state
	return outBytes.Bytes(), err
}
Ejemplo n.º 5
0
// Decode returns the decoded data for the bundle entry. Returns a
// slice of bytes with the decoded, decompressed (if required), ready
// to use entry data, and an error indication which is not-nil if the
// data cannot be decoded. If argument "flag" is NODC, and the entry
// data are compressed (Entry.Gzip == true), Decode will not
// decompress the data it returns (it will only decode them). In most
// cases it is preferable to use the Reader interface instead of
// calling Decode.
func (e *Entry) Decode(flag int) ([]byte, error) {
	var rs *strings.Reader
	var r64 io.Reader
	var rz *gzip.Reader
	var buf *bytes.Buffer
	var err error

	rs = strings.NewReader(e.Data)
	r64 = base64.NewDecoder(base64.StdEncoding, rs)
	if e.Gzip && (flag&NODC == 0) {
		rz, err = gzip.NewReader(r64)
		if err != nil {
			return nil, err
		}
		defer rz.Close()
	} else {
		rz = nil
	}
	buf = new(bytes.Buffer)
	if rz != nil {
		_, err = io.Copy(buf, rz)
	} else {
		_, err = io.Copy(buf, r64)
	}
	if err != nil {
		return nil, err
	}
	return buf.Bytes(), nil
}
Ejemplo n.º 6
0
// Reads transfer-encoding: chunked payloads from the connection reader.
func (c *Connection) readChunkedData() error {
	var err error
	var line []byte
	var size uint64
	var start time.Time

	start = time.Now()
	writer := &nonEmptyWriter{os.Stdout}

	var buffer *bytes.Buffer
	var decompressor *gzip.Reader
	var zipReader *bufio.Reader
	var data []byte

	if c.conf.GZip == true {
		buffer = bytes.NewBufferString("")
	}

	for err == nil {
		line, _, err = c.reader.ReadLine()
		if err != nil {
			return err
		}
		size, err = decodeHexString(line)
		if err != nil {
			str := fmt.Sprintf("Expected hex, got %v", string(line))
			return errors.New(str)
		}
		if c.conf.GZip == false {
			_, err = io.CopyN(writer, c.reader, int64(size))
		} else {
			_, err = io.CopyN(buffer, c.reader, int64(size))
			if err != nil {
				return err
			}
			if decompressor == nil {
				decompressor, err = gzip.NewReader(buffer)
				defer decompressor.Close()
				if err != nil {
					return err
				}
				zipReader = bufio.NewReader(decompressor)
			}
			data = make([]byte, 512, 512)
			_, err = zipReader.Read(data)
			if err != nil {
				return err
			}
			strBuffer := bytes.NewBuffer(data)
			io.CopyN(writer, strBuffer, int64(len(data)))
		}
		if c.conf.TTL > 0 {
			if time.Now().Sub(start).Nanoseconds() > c.conf.TTL {
				return nil
			}
		}
	}
	return err
}
Ejemplo n.º 7
0
func (s embeddedStatic) ServeHTTP(w http.ResponseWriter, r *http.Request) {
	file := r.URL.Path

	if file[0] == '/' {
		file = file[1:]
	}

	if len(file) == 0 {
		file = "index.html"
	}

	if s.assetDir != "" {
		p := filepath.Join(s.assetDir, filepath.FromSlash(file))
		_, err := os.Stat(p)
		if err == nil {
			http.ServeFile(w, r, p)
			return
		}
	}

	s.mut.RLock()
	theme := s.theme
	modified := s.lastModified
	s.mut.RUnlock()

	bs, ok := s.assets[theme+"/"+file]
	if !ok {
		bs, ok = s.assets[config.DefaultTheme+"/"+file]
		if !ok {
			http.NotFound(w, r)
			return
		}
	}

	if modifiedSince, err := time.Parse(r.Header.Get("If-Modified-Since"), http.TimeFormat); err == nil && modified.Before(modifiedSince) {
		w.WriteHeader(http.StatusNotModified)
		return
	}

	mtype := s.mimeTypeForFile(file)
	if len(mtype) != 0 {
		w.Header().Set("Content-Type", mtype)
	}
	if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
		w.Header().Set("Content-Encoding", "gzip")
	} else {
		// ungzip if browser not send gzip accepted header
		var gr *gzip.Reader
		gr, _ = gzip.NewReader(bytes.NewReader(bs))
		bs, _ = ioutil.ReadAll(gr)
		gr.Close()
	}
	w.Header().Set("Content-Length", fmt.Sprintf("%d", len(bs)))
	w.Header().Set("Last-Modified", modified.Format(http.TimeFormat))
	w.Header().Set("Cache-Control", "public")

	w.Write(bs)
}
Ejemplo n.º 8
0
Archivo: ipkg.go Proyecto: thz/kellner
// extract 'control' file from 'reader'. the contents of a 'control' file
// is a set of key-value pairs as described in
// https://www.debian.org/doc/debian-policy/ch-controlfields.html
func extractControlFromIpk(reader io.Reader) (string, error) {

	var (
		arReader  *ar.Reader
		tarReader *tar.Reader
		gzReader  *gzip.Reader
	)

	arReader = ar.NewReader(reader)
	for {
		header, err := arReader.Next()
		if err != nil && err != io.EOF {
			return "", fmt.Errorf("extracting contents: %v", err)
		} else if header == nil {
			break
		}

		// NOTE: strangeley the name of the files end with a "/" ... content error?
		if header.Name == "control.tar.gz/" || header.Name == "control.tar.gz" {
			gzReader, err = gzip.NewReader(arReader)
			if err != nil {
				return "", fmt.Errorf("analyzing control.tar.gz: %v", err)
			}
			break
		}
	}

	if gzReader == nil {
		return "", fmt.Errorf("missing control.tar.gz entry")
	}
	defer gzReader.Close()

	buffer := bytes.NewBuffer(nil)
	tarReader = tar.NewReader(gzReader)
	for {
		header, err := tarReader.Next()
		if err != nil && err != io.EOF {
			return "", fmt.Errorf("extracting control.tar.gz: %v", err)
		} else if header == nil {
			break
		}
		if header.Name != "./control" {
			continue
		}

		io.Copy(buffer, tarReader)
		break
	}

	if buffer.Len() == 0 {
		return "", fmt.Errorf("missing or empty 'control' file inside 'control.tar.gz'")
	}
	return buffer.String(), nil
}
Ejemplo n.º 9
0
func (s embeddedStatic) ServeHTTP(w http.ResponseWriter, r *http.Request) {
	file := r.URL.Path

	if file[0] == '/' {
		file = file[1:]
	}

	if len(file) == 0 {
		file = "index.html"
	}

	if s.assetDir != "" {
		p := filepath.Join(s.assetDir, filepath.FromSlash(file))
		_, err := os.Stat(p)
		if err == nil {
			http.ServeFile(w, r, p)
			return
		}
	}

	bs, ok := s.assets[file]
	if !ok {
		http.NotFound(w, r)
		return
	}

	if r.Header.Get("If-Modified-Since") == auto.AssetsBuildDate {
		w.WriteHeader(http.StatusNotModified)
		return
	}

	mtype := s.mimeTypeForFile(file)
	if len(mtype) != 0 {
		w.Header().Set("Content-Type", mtype)
	}
	if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
		w.Header().Set("Content-Encoding", "gzip")
	} else {
		// ungzip if browser not send gzip accepted header
		var gr *gzip.Reader
		gr, _ = gzip.NewReader(bytes.NewReader(bs))
		bs, _ = ioutil.ReadAll(gr)
		gr.Close()
	}
	w.Header().Set("Content-Length", fmt.Sprintf("%d", len(bs)))
	w.Header().Set("Last-Modified", auto.AssetsBuildDate)
	w.Header().Set("Cache-Control", "public")

	w.Write(bs)
}
Ejemplo n.º 10
0
func readTheThings() {
	file_r_handle, _ := os.OpenFile("testing/testfile.tar.gz", os.O_RDWR, os.ModePerm)
	defer file_r_handle.Close()
	var zip_r_handle *gzip.Reader
	var err error
	if zip_r_handle, err = gzip.NewReader(file_r_handle); err != nil {
		return
	}
	defer zip_r_handle.Close()
	tar_r_handle := tar.NewReader(zip_r_handle)
	tar_r_handle.Next()
	/*
		for header, err := tar_r_handle.Next(); err == nil; tar_r_handle.Next() {
			fmt.Printf("File handle: %s\n", header.Name)
		}
	*/
}
Ejemplo n.º 11
0
func openInvoiceFile(filename string) (io.ReadCloser, func(), error) {
	file, err := os.Open(filename)
	if err != nil {
		return nil, nil, err
	}
	closer := func() { file.Close() }
	var reader io.ReadCloser = file
	var decompressor *gzip.Reader
	if strings.HasSuffix(filename, ".gz") {
		if decompressor, err = gzip.NewReader(file); err != nil {
			return file, closer, err
		}
		closer = func() { decompressor.Close(); file.Close() }
		reader = decompressor
	}
	return reader, closer, nil
}
Ejemplo n.º 12
0
Archivo: input.go Proyecto: rmpalmer/io
func NewDscin(filename string) *Dscin {
	d := new(Dscin)
	file, err := os.Open(filename)
	if err != nil {
		return nil
	}
	d.closer = func() {
		fmt.Printf("closing file %s\n", filename)
		file.Close()
	}
	var reader io.ReadCloser = file
	var decompressor *gzip.Reader
	if strings.HasSuffix(filename, ".gz") {
		decompressor, _ = gzip.NewReader(file)
		d.closer = func() { decompressor.Close(); file.Close() }
		reader = decompressor
	}
	d.marshaler = formats.GobMarshaler{}
	d.marshaler.ValidateFile(reader)
	return d
}
Ejemplo n.º 13
0
func main() {
	var file *os.File
	var err error
	var reader *gzip.Reader

	if file, err = os.Open("output/sample.tar.gz"); err != nil {
		log.Fatalln(err)
	}
	defer file.Close()

	if reader, err = gzip.NewReader(file); err != nil {
		log.Fatalln(err)
	}
	defer reader.Close()

	tr := tar.NewReader(reader)

	var header *tar.Header
	for {
		header, err = tr.Next()
		if err == io.EOF {
			// ファイルの最後
			break
		}
		if err != nil {
			log.Fatalln(err)
		}

		buf := new(bytes.Buffer)
		if _, err = io.Copy(buf, tr); err != nil {
			log.Fatalln(err)
		}

		if err = ioutil.WriteFile("output/"+header.Name, buf.Bytes(), 0755); err != nil {
			log.Fatal(err)
		}
	}
}
Ejemplo n.º 14
0
// deal with .tar.gz
func openTarGz(za *ZipAssets) (err error) {
	var (
		f  *os.File
		tr *tar.Reader
		gr *gzip.Reader
	)

	if f, err = os.Open(za.path); err != nil {
		return
	}
	defer f.Close()

	if gr, err = gzip.NewReader(f); err != nil {
		return
	}
	defer gr.Close()

	tr = tar.NewReader(gr)

	err = openTar(za, tr)

	return
}
Ejemplo n.º 15
0
// RingOrBuilder attempts to determine whether a file is a Ring or Builder file
// and then loads it accordingly.
func RingOrBuilder(fileName string) (Ring, *Builder, error) {
	var f *os.File
	var r Ring
	var b *Builder
	var err error
	if f, err = os.Open(fileName); err != nil {
		return r, b, err
	}
	var gf *gzip.Reader
	if gf, err = gzip.NewReader(f); err != nil {
		return r, b, err
	}
	header := make([]byte, 16)
	if _, err = io.ReadFull(gf, header); err != nil {
		return r, b, err
	}
	if string(header[:5]) == "RINGv" {
		if string(header[:16]) != RINGVERSION {
			return r, b, fmt.Errorf("Ring Version missmatch, expected %s found %s", RINGVERSION, header[:16])
		}
		gf.Close()
		if _, err = f.Seek(0, 0); err != nil {
			return r, b, err
		}
		r, err = LoadRing(f)
	} else if string(header[:12]) == "RINGBUILDERv" {
		if string(header[:16]) != BUILDERVERSION {
			return r, b, fmt.Errorf("Builder Version missmatch, expected %s found %s", BUILDERVERSION, header[:16])
		}
		gf.Close()
		if _, err = f.Seek(0, 0); err != nil {
			return r, b, err
		}
		b, err = LoadBuilder(f)
	}
	return r, b, err
}
Ejemplo n.º 16
0
func UnpackTar(filename string, path string) (err error) {
	var file *os.File
	if file, err = os.Open(filename); err != nil {
		return err
	}
	defer file.Close()
	var fileReader io.Reader = file
	var decompressor *gzip.Reader
	if strings.HasSuffix(filename, ".gz") {
		if decompressor, err = gzip.NewReader(file); err != nil {
			return err
		}
		defer decompressor.Close()
	} else if strings.HasSuffix(filename, ".bz2") {
		fileReader = bzip2.NewReader(file)
	}
	var reader *tar.Reader
	if decompressor != nil {
		reader = tar.NewReader(decompressor)
	} else {
		reader = tar.NewReader(fileReader)
	}
	return unpackTarFiles(reader, path)
}
Ejemplo n.º 17
0
// Closes a gzip reader and returns it to the pool:
func ReturnGZipReader(gz *gzip.Reader) {
	gz.Close()
	gzipReaderCache.Put(gz)
}
Ejemplo n.º 18
0
func (s *staticsServer) serveAsset(w http.ResponseWriter, r *http.Request) {
	file := r.URL.Path

	if file[0] == '/' {
		file = file[1:]
	}

	if len(file) == 0 {
		file = "index.html"
	}

	s.mut.RLock()
	theme := s.theme
	s.mut.RUnlock()

	// Check for an override for the current theme.
	if s.assetDir != "" {
		p := filepath.Join(s.assetDir, theme, filepath.FromSlash(file))
		if _, err := os.Stat(p); err == nil {
			http.ServeFile(w, r, p)
			return
		}
	}

	// Check for a compiled in asset for the current theme.
	bs, ok := s.assets[theme+"/"+file]
	if !ok {
		// Check for an overridden default asset.
		if s.assetDir != "" {
			p := filepath.Join(s.assetDir, config.DefaultTheme, filepath.FromSlash(file))
			if _, err := os.Stat(p); err == nil {
				http.ServeFile(w, r, p)
				return
			}
		}

		// Check for a compiled in default asset.
		bs, ok = s.assets[config.DefaultTheme+"/"+file]
		if !ok {
			http.NotFound(w, r)
			return
		}
	}

	mtype := s.mimeTypeForFile(file)
	if len(mtype) != 0 {
		w.Header().Set("Content-Type", mtype)
	}
	if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
		w.Header().Set("Content-Encoding", "gzip")
	} else {
		// ungzip if browser not send gzip accepted header
		var gr *gzip.Reader
		gr, _ = gzip.NewReader(bytes.NewReader(bs))
		bs, _ = ioutil.ReadAll(gr)
		gr.Close()
	}
	w.Header().Set("Content-Length", fmt.Sprintf("%d", len(bs)))

	w.Write(bs)
}
Ejemplo n.º 19
0
func xsavez2(q *Context) {

	var fpz, fpgz *os.File
	var z *zip.Writer
	var gz *gzip.Reader
	var dact *dbxml.Db
	var docs *dbxml.Docs
	var dirname, fulldirname string
	var okall bool

	defer func() {
		if z != nil {
			z.Close()
		}
		if fpz != nil {
			fpz.Close()
		}
		if gz != nil {
			gz.Close()
		}
		if fpgz != nil {
			fpgz.Close()
		}
		if docs != nil {
			docs.Close()
		}
		if dact != nil {
			dact.Close()
		}
		if !okall {
			os.RemoveAll(fulldirname)
			q.db.Exec(fmt.Sprintf("DELETE FROM `%s_info` WHERE `id` = %q", Cfg.Prefix, dirname))
		}
	}()

	protected := 0

	if !q.auth {
		http.Error(q.w, "Je bent niet ingelogd", http.StatusUnauthorized)
		return
	}

	methode := firstf(q.form, "mt")
	if methode != "dx" {
		methode = "std"
	}

	corpora := make([]string, 0, len(q.form.Value["db"]))
	for _, c := range q.form.Value["db"] {
		if s := strings.TrimSpace(c); s != "" {
			corpora = append(corpora, s)
		}
	}
	for _, corpus := range corpora {
		if !q.prefixes[corpus] {
			http.Error(q.w, "Geen toegang tot corpus", http.StatusUnauthorized)
			return
		}
		if q.protected[corpus] || !q.myprefixes[corpus] {
			protected = 1
		}
	}

	if len(corpora) == 0 {
		writeHtml(q, "Fout", "Geen corpora gekozen")
		return
	}

	xpath := firstf(q.form, "xpath")
	if xpath == "" {
		writeHtml(q, "Fout", "Zoekterm ontbreekt")
		return
	}

	title := maxtitlelen(firstf(q.form, "title"))
	if title == "" {
		writeHtml(q, "Fout", "Titel ontbreekt")
		return
	}

	maxdup, _ := strconv.Atoi(firstf(q.form, "maxdup"))
	if maxdup < 1 || maxdup > Cfg.Maxdup {
		maxdup = Cfg.Maxdup
	}

	dirname, fulldirname, ok := beginNewCorpus(q, q.db, title, hErr)
	if !ok {
		return
	}

	fpz, err := os.Create(fulldirname + "/data")
	if hErr(q, err) {
		fpz = nil
		return
	}
	z = zip.NewWriter(fpz)

	linecount := 0
	for _, prefix := range corpora {
		if linecount == maxdup && maxdup > 0 {
			break
		}

		global, ok := isGlobal(q, prefix)
		if !ok {
			return
		}
		pathlen, ok := getPathLen(q, prefix, global, true)
		if !ok {
			return
		}

		dactfiles := make([]string, 0)
		if !global {
			dactfiles = append(dactfiles, fmt.Sprintf("%s/data/%s/data.dact", paqudir, prefix))
		} else {
			rows, err := q.db.Query(fmt.Sprintf("SELECT `arch` FROM `%s_c_%s_arch` ORDER BY `id`", Cfg.Prefix, prefix))
			if hErr(q, err) {
				return
			}
			for rows.Next() {
				var s string
				if hErr(q, rows.Scan(&s)) {
					rows.Close()
					return
				}
				if strings.HasSuffix(s, ".dact") {
					dactfiles = append(dactfiles, s)
				}
			}
			if hErr(q, rows.Err()) {
				return
			}
		}

		fullquery := xpath
		if strings.Contains(xpath, "%") {
			rules := getMacrosRules(q)
			fullquery = macroKY.ReplaceAllStringFunc(xpath, func(s string) string {
				return rules[s[1:len(s)-1]]
			})
		}
		queryparts := strings.Split(fullquery, "+|+")

		for _, dactfile := range dactfiles {
			if linecount == maxdup && maxdup > 0 {
				break
			}
			if Cfg.Dactx && methode == "dx" {
				dactfile += "x"
			}
			var data []byte
			dact, err = dbxml.Open(dactfile)
			if hErr(q, err) {
				dact = nil
				return
			}

			qu, err := dact.Prepare(queryparts[0])
			if hErr(q, err) {
				return
			}
			docs, err = qu.Run()
			if hErr(q, err) {
				docs = nil
				return
			}
			seen := make(map[string]bool)
		NEXTDOC:
			for docs.Next() {
				if linecount == maxdup && maxdup > 0 {
					break
				}
				filename := docs.Name()
				if seen[filename] {
					continue
				}
				seen[filename] = true
				found := false
				if len(queryparts) == 1 {
					found = true
					data = []byte(docs.Content())
				} else {
					doctxt := fmt.Sprintf("[dbxml:metadata('dbxml:name')=%q]", filename)
					for i := 1; i < len(queryparts)-1; i++ {
						docs2, err := dact.Query(doctxt + queryparts[i])
						if hErr(q, err) {
							return
						}
						if !docs2.Next() {
							continue NEXTDOC
						}
						docs2.Close()
					}
					docs2, err := dact.Query(doctxt + queryparts[len(queryparts)-1])
					if hErr(q, err) {
						return
					}
					found = false
					if docs2.Next() {
						found = true
						data = []byte(docs2.Content())
						docs2.Close()
					}

				}
				if !found {
					continue
				}

				newfile := filename
				if global {
					newfile = dactfile[pathlen:len(dactfile)-5] + "::" + filename
				}
				if len(corpora) > 1 {
					newfile = prefix + "/" + newfile
					data = xmlSetSource(data, prefix)
				}
				f, err := z.Create(newfile)
				if hErr(q, err) {
					return
				}
				if methode == "dx" {
					data, err = unexpandDact(data)
					if hErr(q, err) {
						return
					}
				}
				_, err = f.Write(data)
				if hErr(q, err) {
					return
				}
				linecount++
			} // for docs.Next()
			err = docs.Error()
			docs = nil
			if hErr(q, err) {
				return
			}
			dact.Close()
			dact = nil
		} // for range dactfiles
	} // for range corpora

	err = z.Close()
	z = nil
	if hErr(q, err) {
		return
	}
	fpz.Close()
	fpz = nil

	s := "xmlzip-d"
	if protected != 0 {
		s = "xmlzip-p"
	}
	newCorpus(q, q.db, dirname, title, s, protected, hErr, true)
	okall = true
}
Ejemplo n.º 20
0
func TestGzippedReplication(t *testing.T) {
	if testing.Short() {
		t.Skip()
	}

	cluster := createCluster()

	member1 := createMember(6106)
	repServer1, _ := New(&Config{
		Registrator: cluster.Registrator(member1),
		Membership:  cluster.Membership()})
	defer repServer1.Stop()
	<-repServer1.Sync(1)

	member2 := createMember(6206)
	repServer2, _ := New(&Config{
		Registrator: cluster.Registrator(member2),
		Membership:  cluster.Membership()})
	defer repServer2.Stop()
	<-repServer2.Sync(1)

	var resp *http.Response

	replicator2, _ := repServer2.GetReplicator(auth.NamespaceFrom("ns1"))

	client := http.Client{Transport: &http.Transport{MaxIdleConnsPerHost: 1}}

	var requestData bytes.Buffer
	for i := 0; i < 100; i++ {
		requestData.Write([]byte("9999999999999999999999999999999999"))
	}

	// Iterate over both sync and replication HTTP endpoints
	for _, serviceType := range []string{syncContext, repContext} {
		// Iterate over both content-encoding options- gzip and no zip (identity)
		for _, encoding := range []string{"gzip", "identity"} {
			var dec decoder
			var unZipper *gzip.Reader
			var respBuff bytes.Buffer

			// prepare event that triggers a replication or a sync
			if serviceType == repContext {
				time.AfterFunc(time.Duration(2)*time.Second, func() {
					replicator2.Broadcast(requestData.Bytes())
				})
			} else {
				// grab the channel that was created for us
				// insert an entity inside it and close the channel to signal sync finish
				outMsg := &outMessage{Data: requestData.Bytes()}
				buff, _ := json.Marshal(outMsg)
				time.AfterFunc(time.Duration(2)*time.Second, func() {
					ch := <-repServer2.SyncRequest()
					ch <- buff
					close(ch)
				})
			}

			url := fmt.Sprintf("http://%s:%d/%s/%s", network.GetPrivateIP(), 6206, version, serviceType)
			req, _ := http.NewRequest("GET", url, nil)
			req.Header.Set("Accept-Encoding", encoding)
			req.Header.Set(headerMemberID, "fake member id")
			resp, _ = client.Do(req)
			// wait 4 sec to read enough data, since ioutil.ReadAll blocks until EOF
			// and we use an indefinite connection in case of replication service endpoint
			if serviceType == repContext {
				time.AfterFunc(time.Duration(4)*time.Second, func() { resp.Body.Close() })
			}
			responseData, _ := ioutil.ReadAll(resp.Body)
			respBuff.Write(responseData)

			// if we've sent a gzipped entity, decode it before comparing
			// but also ensure that the returned payload was compressed
			// by comparing the size of the payload to the size sent
			if encoding == "gzip" {
				assert.True(t, len(requestData.Bytes()) > len(responseData),
					"Seems like the gzip hasn't shrunk the payload size enough or at all")
				unZipper, _ = gzip.NewReader(&respBuff)
				dec.Reader = bufio.NewReader(unZipper)
			} else {
				dec.Reader = bufio.NewReader(&respBuff)
			}

			event, _ := dec.Decode()
			if encoding == "gzip" {
				unZipper.Close()
			}
			var entityFromServer outMessage
			json.Unmarshal([]byte(event.Data()), &entityFromServer)
			assert.Equal(t, string(requestData.Bytes()), string(entityFromServer.Data),
				"Didn't receive the same content we sent in the case where encoding is %s", encoding)
		}
	}
}
Ejemplo n.º 21
0
func savez2(q *Context) {

	var fpz, fpgz *os.File
	var z *zip.Writer
	var gz *gzip.Reader
	var dact interface{}
	var err error
	var dirname, fulldirname string
	var okall bool

	defer func() {
		if z != nil {
			z.Close()
		}
		if fpz != nil {
			fpz.Close()
		}
		if gz != nil {
			gz.Close()
		}
		if fpgz != nil {
			fpgz.Close()
		}
		saveCloseDact(dact)
		if !okall {
			os.RemoveAll(fulldirname)
			q.db.Exec(fmt.Sprintf("DELETE FROM `%s_info` WHERE `id` = %q", Cfg.Prefix, dirname))
		}
	}()

	protected := 0

	if !q.auth {
		http.Error(q.w, "Je bent niet ingelogd", http.StatusUnauthorized)
		return
	}

	corpora := make([]string, 0, len(q.form.Value["db"]))
	for _, c := range q.form.Value["db"] {
		if s := strings.TrimSpace(c); s != "" {
			corpora = append(corpora, s)
		}
	}
	for _, corpus := range corpora {
		if !q.prefixes[corpus] {
			http.Error(q.w, "Geen toegang tot corpus", http.StatusUnauthorized)
			return
		}
		if q.protected[corpus] || !q.myprefixes[corpus] {
			protected = 1
		}
	}

	if len(corpora) == 0 {
		writeHtml(q, "Fout", "Geen corpora gekozen")
		return
	}

	word := firstf(q.form, "word")
	rel := firstf(q.form, "rel")
	hword := firstf(q.form, "hword")
	postag := firstf(q.form, "postag")
	hpostag := firstf(q.form, "hpostag")
	meta := firstf(q.form, "meta")
	if word == "" && hword == "" && rel == "" && postag == "" && hpostag == "" && meta == "" {
		writeHtml(q, "Fout", "Zoektermen ontbreken")
		return
	}

	title := maxtitlelen(firstf(q.form, "title"))
	if title == "" {
		writeHtml(q, "Fout", "Titel ontbreekt")
		return
	}

	maxdup, _ := strconv.Atoi(firstf(q.form, "maxdup"))
	if maxdup < 1 || maxdup > Cfg.Maxdup {
		maxdup = Cfg.Maxdup
	}

	dirname, fulldirname, ok := beginNewCorpus(q, q.db, title, hErr)
	if !ok {
		return
	}

	fpz, err = os.Create(fulldirname + "/data")
	if hErr(q, err) {
		fpz = nil
		return
	}
	z = zip.NewWriter(fpz)

	linecount := 0

	chClose := make(<-chan bool)
	for _, prefix := range corpora {
		if linecount == maxdup && maxdup > 0 {
			break
		}

		global, ok := isGlobal(q, prefix)
		if !ok {
			return
		}
		pathlen, ok := getPathLen(q, prefix, global, false)
		if !ok {
			return
		}

		query, joins, usererr, syserr := makeQueryF(q, prefix, "c", chClose)
		if hErr(q, syserr) {
			return
		}
		if uhErr(q, usererr) {
			return
		}

		query = fmt.Sprintf(
			"SELECT DISTINCT `f`.`file`, `c`.`arch` FROM `%s_c_%s_deprel` `c` "+
				"JOIN `%s_c_%s_file` `f` ON (`f`.`id`=`c`.`file`) %s WHERE %s",
			Cfg.Prefix, prefix,
			Cfg.Prefix, prefix,
			joins,
			query)
		rows, err := q.db.Query(query)
		if hErr(q, err) {
			return
		}
		currentarch := -1
		dact = nil
		var arch int
		var filename, dactname string
		for rows.Next() {
			if linecount == maxdup && maxdup > 0 {
				rows.Close()
				break
			}
			err = rows.Scan(&filename, &arch)
			if hErr(q, err) {
				rows.Close()
				return
			}
			var data []byte
			if arch < 0 {
				fpgz, err = os.Open(filename + ".gz")
				if err == nil {
					gz, err = gzip.NewReader(fpgz)
					if hErr(q, err) {
						gz = nil
						rows.Close()
						return
					}
					data, err = ioutil.ReadAll(gz)
					if hErr(q, err) {
						rows.Close()
						return
					}
					gz.Close()
					gz = nil
					fpgz.Close()
					fpgz = nil
				} else {
					fpgz, err = os.Open(filename)
					if hErr(q, err) {
						fpgz = nil
						rows.Close()
						return
					}
					data, err = ioutil.ReadAll(fpgz)
					if hErr(q, err) {
						rows.Close()
						return
					}
					fpgz.Close()
					fpgz = nil
				}
			} else {
				if arch != currentarch {
					currentarch = arch
					saveCloseDact(dact)
					dact, dactname = saveOpenDact(q, prefix, arch)
				}
				data = saveGetDact(q, dact, filename)
			}

			var newfile string
			if arch < 0 {
				newfile = filename[pathlen:]
				if !global {
					if strings.Contains(q.params[prefix], "-lbl") || strings.HasPrefix(q.params[prefix], "folia") || strings.HasPrefix(q.params[prefix], "tei") {
						newfile = decode_filename(newfile[10:])
					} else if strings.HasPrefix(q.params[prefix], "xmlzip") || q.params[prefix] == "dact" {
						newfile = decode_filename(newfile[5:])
					}
				}
			} else {
				newfile = dactname[pathlen:] + "::" + filename
			}
			if len(corpora) > 1 {
				newfile = prefix + "/" + newfile
				data = xmlSetSource(data, prefix)
			}

			f, err := z.Create(newfile)
			if hErr(q, err) {
				rows.Close()
				return
			}
			_, err = f.Write(data)
			if hErr(q, err) {
				rows.Close()
				return
			}
			linecount++
		} // for rows.Next()
		err = rows.Err()
		if hErr(q, err) {
			return
		}
		saveCloseDact(dact)
		dact = nil
	}

	err = z.Close()
	z = nil
	if hErr(q, err) {
		return
	}
	fpz.Close()
	fpz = nil

	s := "xmlzip-d"
	if protected != 0 {
		s = "xmlzip-p"
	}
	newCorpus(q, q.db, dirname, title, s, protected, hErr, true)
	okall = true
}
Ejemplo n.º 22
0
func (hec *HTTPEventCollectorMock) ServeHTTP(writer http.ResponseWriter, request *http.Request) {
	var err error

	hec.numOfRequests++

	if hec.simulateServerError {
		if request.Body != nil {
			defer request.Body.Close()
		}
		writer.WriteHeader(http.StatusInternalServerError)
		return
	}

	switch request.Method {
	case http.MethodOptions:
		// Verify that options method is getting called only once
		if hec.connectionVerified {
			hec.test.Errorf("Connection should not be verified more than once. Got second request with %s method.", request.Method)
		}
		hec.connectionVerified = true
		writer.WriteHeader(http.StatusOK)
	case http.MethodPost:
		// Always verify that Driver is using correct path to HEC
		if request.URL.String() != "/services/collector/event/1.0" {
			hec.test.Errorf("Unexpected path %v", request.URL)
		}
		defer request.Body.Close()

		if authorization, ok := request.Header["Authorization"]; !ok || authorization[0] != ("Splunk "+hec.token) {
			hec.test.Error("Authorization header is invalid.")
		}

		gzipEnabled := false
		if contentEncoding, ok := request.Header["Content-Encoding"]; ok && contentEncoding[0] == "gzip" {
			gzipEnabled = true
		}

		if hec.gzipEnabled == nil {
			hec.gzipEnabled = &gzipEnabled
		} else if gzipEnabled != *hec.gzipEnabled {
			// Nothing wrong with that, but we just know that Splunk Logging Driver does not do that
			hec.test.Error("Driver should not change Content Encoding.")
		}

		var gzipReader *gzip.Reader
		var reader io.Reader
		if gzipEnabled {
			gzipReader, err = gzip.NewReader(request.Body)
			if err != nil {
				hec.test.Fatal(err)
			}
			reader = gzipReader
		} else {
			reader = request.Body
		}

		// Read body
		var body []byte
		body, err = ioutil.ReadAll(reader)
		if err != nil {
			hec.test.Fatal(err)
		}

		// Parse message
		messageStart := 0
		for i := 0; i < len(body); i++ {
			if i == len(body)-1 || (body[i] == '}' && body[i+1] == '{') {
				var message splunkMessage
				err = json.Unmarshal(body[messageStart:i+1], &message)
				if err != nil {
					hec.test.Log(string(body[messageStart : i+1]))
					hec.test.Fatal(err)
				}
				hec.messages = append(hec.messages, &message)
				messageStart = i + 1
			}
		}

		if gzipEnabled {
			gzipReader.Close()
		}

		writer.WriteHeader(http.StatusOK)
	default:
		hec.test.Errorf("Unexpected HTTP method %s", http.MethodOptions)
		writer.WriteHeader(http.StatusBadRequest)
	}
}
Ejemplo n.º 23
0
func releaseGzipReader(zr *gzip.Reader) {
	zr.Close()
	gzipReaderPool.Put(zr)
}
Ejemplo n.º 24
0
// PutReader closes and returns a gzip.Reader to the pool
// so that it can be reused via GetReader.
func (pool *GzipPool) PutReader(reader *gzip.Reader) {
	reader.Close()
	pool.readers.Put(reader)
}
Ejemplo n.º 25
0
func TestCompressed(t *testing.T) {
	var entries []string
	var e *bundle.Entry
	var br *bundle.Reader
	var gr *gzip.Reader
	var data, rdata, fdata, ddata []byte
	var i int
	var err error

	entries, err = mkentries(data_dir)
	if err != nil {
		t.Fatalf("mkentries failed (%s): %s", data_dir, err)
	}
	for i = 0; i < len(entries); i++ {
		// Read data from file
		fdata, err = ioutil.ReadFile(data_dir + entries[i])
		if err != nil {
			t.Fatalf("ReadFile error: %s", err)
		}
		// Get bundle entry
		e = _bundleIdx.Entry(entries[i])
		if e == nil {
			t.Fatalf("Not found: %s", entries[i])
		}
		// Get data from bundle using Decode
		data, err = e.Decode(bundle.NODC)
		if err != nil {
			t.Fatalf("bundle.Decode(): %s", err)
		}
		gr, err = gzip.NewReader(bytes.NewReader(data))
		if err != nil {
			t.Fatalf("gzip.NewReader: %s", err)
		}
		ddata, err = ioutil.ReadAll(gr)
		if err != nil {
			t.Fatalf("ReadAll(gr): %s")
		}
		err = gr.Close()
		if err != nil {
			t.Fatalf("gr.Close(): %s")
		}
		if len(ddata) != e.Size {
			t.Fatalf("len(ddata) %d != e.Size %d",
				len(ddata), e.Size)
		}
		if len(ddata) != len(fdata) {
			t.Fatalf("Bad ddata sz for: %s", entries[i])
		}
		if bytes.Compare(ddata, fdata) != 0 {
			t.Fatalf("Bad ddata for: %s", entries[i])
		}
		// Get data from bundle using bundle.Reader
		br, err = e.Open(bundle.NODC)
		if err != nil {
			t.Fatalf("e.Open(): %s", err)
		}
		gr, err = gzip.NewReader(br)
		if err != nil {
			t.Fatalf("gzip.NewReader(br): %s")
		}
		ddata, err = ioutil.ReadAll(gr)
		if err != nil {
			t.Fatalf("ReadAll(gr): %s")
		}
		err = gr.Close()
		if err != nil {
			t.Fatalf("gr.Close(): %s")
		}
		if len(ddata) != e.Size {
			t.Fatalf("len(ddata) %d != e.Size %d",
				len(rdata), e.Size)
		}
		if len(ddata) != len(fdata) {
			t.Fatalf("Bad ddata sz for: %s", entries[i])
		}
		if bytes.Compare(ddata, fdata) != 0 {
			t.Fatalf("Bad ddata for: %s", entries[i])
		}
		t.Logf("Entry: %s, Size: %d, Gzip: %v",
			e.Name, e.Size, e.Gzip)
	}
}
Ejemplo n.º 26
-11
func ringOrBuilder(fileName string) (r ring.Ring, b *ring.Builder, err error) {
	var f *os.File
	if f, err = os.Open(fileName); err != nil {
		return
	}
	var gf *gzip.Reader
	if gf, err = gzip.NewReader(f); err != nil {
		return
	}
	header := make([]byte, 16)
	if _, err = io.ReadFull(gf, header); err != nil {
		return
	}
	if string(header[:5]) == "RINGv" {
		gf.Close()
		if _, err = f.Seek(0, 0); err != nil {
			return
		}
		r, err = ring.LoadRing(f)
	} else if string(header[:12]) == "RINGBUILDERv" {
		gf.Close()
		if _, err = f.Seek(0, 0); err != nil {
			return
		}
		b, err = ring.LoadBuilder(f)
	}
	return
}