Ejemplo n.º 1
0
func (this *SnappyCodec) Inverse(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	count := this.size

	if this.size == 0 {
		count = uint(len(src))
	}

	res, err := snappy.Decode(dst, src[0:count])

	if err != nil {
		return 0, 0, fmt.Errorf("Decoding error: %v", err)
	}

	if len(res) > len(dst) {
		// Encode returns a newly allocated slice if the provided 'dst' array is too small.
		// There is no way to return this new slice, so treat it as an error
		return 0, 0, fmt.Errorf("Output buffer is too small - size: %d, required %d", len(res), len(dst))
	}

	return count, uint(len(res)), nil
}
Ejemplo n.º 2
0
func (this *SnappyCodec) Forward(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	count := this.size

	if this.size == 0 {
		count = uint(len(src))
	}

	if n := snappy.MaxEncodedLen(int(count)); len(dst) < n {
		return 0, 0, fmt.Errorf("Output buffer is too small - size: %d, required %d", len(dst), n)
	}

	res, err := snappy.Encode(dst, src[0:count])

	if err != nil {
		return 0, 0, fmt.Errorf("Encoding error: %v", err)
	}

	return count, uint(len(res)), nil
}
Ejemplo n.º 3
0
func doCopy(src, dst []byte, sz uint) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	length := len(src)

	if sz > 0 {
		length = int(sz)

		if length > len(src) {
			return uint(0), uint(0), errors.New("Source buffer too small")
		}
	}

	if length > len(dst) {
		return uint(0), uint(0), errors.New("Destination buffer too small")
	}

	if kanzi.SameByteSlices(src, dst, false) == false {
		copy(dst, src[0:length])
	}

	return uint(length), uint(length), nil
}
Ejemplo n.º 4
0
func (this *RLT) Forward(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	srcEnd := this.size

	if this.size == 0 {
		srcEnd = uint(len(src))
	}

	dstEnd := uint(len(dst))
	run := 1
	threshold := int(this.runThreshold)
	maxThreshold := RLT_MAX_RUN + int(this.runThreshold)
	srcIdx := uint(0)
	dstIdx := uint(0)

	// Initialize with a value different from the first data
	prev := ^src[srcIdx]

	for srcIdx < srcEnd && dstIdx < dstEnd {
		val := byte(src[srcIdx])
		srcIdx++

		// Encode up to 0x7FFF repetitions in the 'length' information
		if prev == val && run < maxThreshold {
			run++

			if run < threshold {
				dst[dstIdx] = prev
				dstIdx++
			}

			continue
		}

		if run >= threshold {
			dst[dstIdx] = prev
			dstIdx++
			run -= threshold

			// Force MSB to indicate a 2 byte encoding of the length
			if run >= TWO_BYTE_RLE_MASK {
				dst[dstIdx] = byte((run >> 8) | TWO_BYTE_RLE_MASK)
				dstIdx++
			}

			dst[dstIdx] = byte(run)
			dstIdx++
			run = 1
		}

		dst[dstIdx] = val
		dstIdx++

		if prev != val {
			prev = val
			run = 1
		}
	}

	// Fill up the destination array
	if run >= threshold {
		dst[dstIdx] = prev
		dstIdx++
		run -= threshold

		// Force MSB to indicate a 2 byte encoding of the length
		if run >= TWO_BYTE_RLE_MASK {
			dst[dstIdx] = byte((run >> 8) | TWO_BYTE_RLE_MASK)
			dstIdx++
		}

		dst[dstIdx] = byte(run & 0xFF)
		dstIdx++
	}

	return srcIdx, dstIdx, nil
}
Ejemplo n.º 5
0
func (this *RLT) Inverse(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	srcEnd := this.size

	if this.size == 0 {
		srcEnd = uint(len(src))
	}

	dstEnd := uint(len(dst))
	run := 0
	threshold := int(this.runThreshold)
	srcIdx := uint(0)
	dstIdx := uint(0)

	// Initialize with a value different from the first data
	prev := ^src[srcIdx]

	for srcIdx < srcEnd && dstIdx < dstEnd {
		val := src[srcIdx]
		srcIdx++

		if prev == val {
			run++

			if run >= threshold {
				// Read the length
				run = int(src[srcIdx])
				srcIdx++

				// If the length is encoded in 2 bytes, process next byte
				if run&TWO_BYTE_RLE_MASK != 0 {
					run = ((run & (^TWO_BYTE_RLE_MASK)) << 8) | int(src[srcIdx])
					srcIdx++
				}

				// Emit length times the previous byte
				for run > 0 {
					dst[dstIdx] = prev
					dstIdx++
					run--
				}
			}
		} else {
			prev = val
			run = 1
		}

		dst[dstIdx] = val
		dstIdx++
	}

	return srcIdx, dstIdx, nil
}
Ejemplo n.º 6
0
// Return no error if the compression chain succeeded. In this case, the input data
// may be modified. If the compression failed, the input data is returned unmodified.
func (this *BWTBlockCodec) Forward(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return 0, 0, errors.New("Input buffer cannot be null")
	}

	if dst == nil {
		return 0, 0, errors.New("Output buffer cannot be null")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	blockSize := this.size

	if blockSize == 0 {
		blockSize = uint(len(src))

		if blockSize > this.maxBlockSize() {
			errMsg := fmt.Sprintf("Block size is %v, max value is %v", blockSize, this.maxBlockSize())
			return 0, 0, errors.New(errMsg)
		}
	} else if blockSize > uint(len(src)) {
		errMsg := fmt.Sprintf("Block size is %v, input buffer length is %v", blockSize, len(src))
		return 0, 0, errors.New(errMsg)
	}

	this.transform.(kanzi.Sizeable).SetSize(blockSize)

	// Apply forward Transform
	iIdx, oIdx, _ := this.transform.Forward(src, dst)

	headerSizeBytes := uint(0)
	pIndexSizeBits := uint(0)
	primaryIndex := uint(0)

	if this.isBijectiveBWT == false {
		primaryIndex = this.transform.(*transform.BWT).PrimaryIndex()
		pIndexSizeBits = uint(6)

		for 1<<pIndexSizeBits <= primaryIndex {
			pIndexSizeBits++
		}

		headerSizeBytes = (2 + pIndexSizeBits + 7) >> 3
	}

	if this.mode != GST_MODE_RAW {
		// Apply Post Transform
		gst, err := this.createGST(blockSize)

		if err != nil {
			return 0, 0, err
		}

		gst.Forward(dst, src)

		if ZRLT, err := NewZRLT(blockSize); err == nil {
			// Apply Zero Run Length Encoding
			iIdx, oIdx, err = ZRLT.Forward(src, dst[headerSizeBytes:])

			if err != nil {
				// Compression failed, recover source data
				gst.Inverse(src, dst)
				this.transform.Inverse(dst, src)
				return 0, 0, err
			}
		}
	} else if headerSizeBytes > 0 {
		// Shift output data to leave space for header
		hs := int(headerSizeBytes)

		for i := int(blockSize - 1); i >= 0; i-- {
			dst[i+hs] = dst[i]
		}
	}

	if this.isBijectiveBWT == false {
		oIdx += headerSizeBytes

		// Write block header (mode + primary index). See top of file for format
		shift := (headerSizeBytes - 1) << 3
		blockMode := (pIndexSizeBits + 1) >> 3
		blockMode = (blockMode << 6) | ((primaryIndex >> shift) & 0x3F)
		dst[0] = byte(blockMode)

		for i := uint(1); i < headerSizeBytes; i++ {
			shift -= 8
			dst[i] = byte(primaryIndex >> shift)
		}
	}

	return iIdx, oIdx, nil
}
Ejemplo n.º 7
0
// Reads same byte input as LZ4_decompress_generic in LZ4 r131 (7/15)
// for a 32 bit architecture.
func (this *LZ4Codec) Inverse(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	count := int(this.size)

	if this.size == 0 {
		count = len(src)
	}

	srcEnd := count - COPY_LENGTH
	dstEnd := len(dst) - COPY_LENGTH
	srcIdx := 0
	dstIdx := 0

	for {
		// Get literal length
		token := int(src[srcIdx])
		srcIdx++
		length := token >> ML_BITS

		if length == RUN_MASK {
			for src[srcIdx] == byte(0xFF) && srcIdx < count {
				srcIdx++
				length += 0xFF
			}

			length += int(src[srcIdx])
			srcIdx++

			if length > MAX_LENGTH {
				return 0, 0, fmt.Errorf("Invalid length decoded: %d", length)
			}
		}

		// Copy literals
		for i := 0; i < length; i++ {
			dst[dstIdx+i] = src[srcIdx+i]
		}

		srcIdx += length
		dstIdx += length

		if dstIdx > dstEnd || srcIdx > srcEnd {
			break
		}

		// Get offset
		delta := int(src[srcIdx]) | (int(src[srcIdx+1]) << 8)
		srcIdx += 2
		match := dstIdx - delta

		if match < 0 {
			break
		}

		length = token & ML_MASK

		// Get match length
		if length == ML_MASK {
			for src[srcIdx] == byte(0xFF) && srcIdx < count {
				srcIdx++
				length += 0xFF
			}

			length += int(src[srcIdx])
			srcIdx++

			if length > MAX_LENGTH {
				return 0, 0, fmt.Errorf("Invalid length decoded: %d", length)
			}
		}

		length += MIN_MATCH
		cpy := dstIdx + length

		if cpy > dstEnd {
			// Do not use copy on (potentially) overlapping slices
			for i := 0; i < length; i++ {
				dst[dstIdx+i] = dst[match+i]
			}
		} else {
			// Unroll loop
			for {
				dst[dstIdx] = dst[match]
				dst[dstIdx+1] = dst[match+1]
				dst[dstIdx+2] = dst[match+2]
				dst[dstIdx+3] = dst[match+3]
				dst[dstIdx+4] = dst[match+4]
				dst[dstIdx+5] = dst[match+5]
				dst[dstIdx+6] = dst[match+6]
				dst[dstIdx+7] = dst[match+7]
				match += 8
				dstIdx += 8

				if dstIdx >= cpy {
					break
				}
			}
		}

		// Correction
		dstIdx = cpy
	}

	return uint(count), uint(dstIdx), nil
}
Ejemplo n.º 8
0
// Generates same byte output as LZ4_compress_generic in LZ4 r131 (7/15)
// for a 32 bit architecture.
func (this *LZ4Codec) Forward(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	count := int(this.size)

	if this.size == 0 {
		count = len(src)
	}

	if n := this.MaxEncodedLen(count); len(dst) < n {
		return 0, 0, fmt.Errorf("Output buffer is too small - size: %d, required %d", len(dst), n)
	}

	var hashLog uint

	if count < LZ4_64K_LIMIT {
		hashLog = HASH_LOG_64K
	} else {
		hashLog = HASH_LOG
	}

	hashShift := 32 - hashLog
	srcEnd := count
	matchLimit := srcEnd - LAST_LITERALS
	mfLimit := srcEnd - MF_LIMIT
	srcIdx := 0
	dstIdx := 0
	anchor := 0
	table := this.buffer // aliasing

	if count > MIN_LENGTH {
		for i := (1 << hashLog) - 1; i >= 0; i-- {
			table[i] = 0
		}

		// First byte
		h32 := (readInt(src[srcIdx:]) * HASH_SEED) >> hashShift
		table[h32] = srcIdx
		srcIdx++
		h32 = (readInt(src[srcIdx:]) * HASH_SEED) >> hashShift

		for {
			fwdIdx := srcIdx
			step := 1
			searchMatchNb := SEARCH_MATCH_NB
			var match int

			// Find a match
			for {
				srcIdx = fwdIdx
				fwdIdx += step

				if fwdIdx > mfLimit {
					// Encode last literals
					dstIdx += writeLastLiterals(src[anchor:], dst[dstIdx:], srcEnd-anchor)
					return uint(srcEnd), uint(dstIdx), error(nil)
				}

				step = searchMatchNb >> SKIP_STRENGTH
				searchMatchNb++
				match = table[h32]
				table[h32] = srcIdx
				h32 = (readInt(src[fwdIdx:]) * HASH_SEED) >> hashShift

				if differentInts(src, match, srcIdx) == false && match > srcIdx-MAX_DISTANCE {
					break
				}
			}

			// Catch up
			for match > 0 && srcIdx > anchor && src[match-1] == src[srcIdx-1] {
				match--
				srcIdx--
			}

			// Encode literal length
			litLength := srcIdx - anchor
			token := dstIdx
			dstIdx++

			if litLength >= RUN_MASK {
				dst[token] = byte(RUN_MASK << ML_BITS)
				dstIdx += writeLength(dst[dstIdx:], litLength-RUN_MASK)
			} else {
				dst[token] = byte(litLength << ML_BITS)
			}

			// Copy literals
			copy(dst[dstIdx:], src[anchor:anchor+litLength])
			dstIdx += litLength

			// Next match
			for {
				// Encode offset
				dst[dstIdx] = byte(srcIdx - match)
				dst[dstIdx+1] = byte((srcIdx - match) >> 8)
				dstIdx += 2

				// Encode match length
				srcIdx += MIN_MATCH
				match += MIN_MATCH
				anchor = srcIdx

				for srcIdx < matchLimit && src[srcIdx] == src[match] {
					srcIdx++
					match++
				}

				matchLength := srcIdx - anchor

				// Encode match length
				if matchLength >= ML_MASK {
					dst[token] += byte(ML_MASK)
					dstIdx += writeLength(dst[dstIdx:], matchLength-ML_MASK)
				} else {
					dst[token] += byte(matchLength)
				}

				anchor = srcIdx

				if srcIdx > mfLimit {
					dstIdx += writeLastLiterals(src[anchor:], dst[dstIdx:], srcEnd-anchor)
					return uint(srcEnd), uint(dstIdx), error(nil)
				}

				// Fill table
				h32 = (readInt(src[srcIdx-2:]) * HASH_SEED) >> hashShift
				table[h32] = srcIdx - 2

				// Test next position
				h32 = (readInt(src[srcIdx:]) * HASH_SEED) >> hashShift
				match = table[h32]
				table[h32] = srcIdx

				if differentInts(src, match, srcIdx) == true || match <= srcIdx-MAX_DISTANCE {
					break
				}

				token = dstIdx
				dstIdx++
				dst[token] = 0
			}

			// Prepare next loop
			srcIdx++
			h32 = (readInt(src[srcIdx:]) * HASH_SEED) >> hashShift
		}
	}

	// Encode last literals
	dstIdx += writeLastLiterals(src[anchor:], dst[dstIdx:], srcEnd-anchor)
	return uint(srcEnd), uint(dstIdx), error(nil)
}
Ejemplo n.º 9
0
func (this *ZRLT) Forward(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	srcEnd := this.size

	if this.size == 0 {
		srcEnd = uint(len(src))
	}

	dstEnd := uint(len(dst))
	dstEnd2 := dstEnd - 2
	runLength := 1
	srcIdx := uint(0)
	dstIdx := uint(0)

	for srcIdx < srcEnd && dstIdx < dstEnd {
		val := src[srcIdx]

		if val == 0 {
			runLength++
			srcIdx++

			if srcIdx < srcEnd && runLength < ZRLT_MAX_RUN {
				continue
			}
		}

		if runLength > 1 {
			// Encode length
			log2 := uint(1)

			for runLength>>log2 > 1 {
				log2++
			}

			if dstIdx >= dstEnd-log2 {
				break
			}

			// Write every bit as a byte except the most significant one
			for log2 > 0 {
				log2--
				dst[dstIdx] = byte((runLength >> log2) & 1)
				dstIdx++
			}

			runLength = 1
			continue
		}

		if val >= 0xFE {
			if dstIdx >= dstEnd2 {
				break
			}

			dst[dstIdx] = 0xFF
			dstIdx++
			dst[dstIdx] = val - 0xFE
			dstIdx++
		} else {
			dst[dstIdx] = val + 1
			dstIdx++
		}

		srcIdx++
	}

	if srcIdx != srcEnd || runLength != 1 {
		return srcIdx, dstIdx, errors.New("Output buffer is too small")
	}

	return srcIdx, dstIdx, nil
}
Ejemplo n.º 10
0
func (this *ZRLT) Inverse(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	srcEnd := this.size

	if this.size == 0 {
		srcEnd = uint(len(src))
	}

	dstEnd := uint(len(dst))
	runLength := 1
	srcIdx := uint(0)
	dstIdx := uint(0)

	for srcIdx < srcEnd && dstIdx < dstEnd {
		if runLength > 1 {
			runLength--
			dst[dstIdx] = 0
			dstIdx++
			continue
		}

		val := src[srcIdx]

		if val <= 1 {
			// Generate the run length bit by bit (but force MSB)
			runLength = 1

			for {
				runLength = (runLength << 1) | int(val)
				srcIdx++

				if srcIdx >= srcEnd {
					break
				}

				val = src[srcIdx]

				if val > 1 {
					break
				}
			}

			continue
		}

		// Regular data processing
		if val == 0xFF {
			srcIdx++

			if srcIdx >= srcEnd {
				break
			}

			dst[dstIdx] = 0xFE + src[srcIdx]
		} else {
			dst[dstIdx] = val - 1
		}

		dstIdx++
		srcIdx++
	}

	// If runLength is not 1, add trailing 0s
	end := dstIdx + uint(runLength) - 1

	if end > dstEnd {
		return srcIdx, dstIdx, errors.New("Output buffer is too small")
	}

	for dstIdx < end {
		dst[dstIdx] = 0
		dstIdx++
	}

	if srcIdx < srcEnd {
		return srcIdx, dstIdx, errors.New("Output buffer is too small")
	}

	return srcIdx, dstIdx, nil
}
Ejemplo n.º 11
0
func (this *LZ4Codec) Forward(src, dst []byte) (uint, uint, error) {
	if src == nil {
		return uint(0), uint(0), errors.New("Invalid null source buffer")
	}

	if dst == nil {
		return uint(0), uint(0), errors.New("Invalid null destination buffer")
	}

	if kanzi.SameByteSlices(src, dst, false) {
		return 0, 0, errors.New("Input and output buffers cannot be equal")
	}

	count := int(this.size)

	if this.size == 0 {
		count = len(src)
	}

	if n := this.MaxEncodedLen(count); len(dst) < n {
		return 0, 0, fmt.Errorf("Output buffer is too small - size: %d, required %d", len(dst), n)
	}

	if count < MIN_LENGTH {
		srcIdx, dstIdx, _ := emitLiterals(src, dst, count, true)
		return uint(srcIdx), uint(dstIdx), error(nil)
	}

	var hashLog uint

	if count < LZ4_64K_LIMIT {
		hashLog = HASH_LOG_64K
	} else {
		hashLog = HASH_LOG
	}

	hashShift := 32 - hashLog
	srcEnd := count
	srcLimit := srcEnd - LAST_LITERALS
	mfLimit := srcEnd - MF_LIMIT
	srcIdx := 0
	dstIdx := 0
	anchor := srcIdx
	srcIdx++
	table := this.buffer // aliasing

	for i := (1 << hashLog) - 1; i >= 0; i-- {
		table[i] = 0
	}

	for {
		attempts := DEFAULT_FIND_MATCH_ATTEMPTS
		fwdIdx := srcIdx
		var ref int

		// Find a match
		for {
			srcIdx = fwdIdx
			fwdIdx += (attempts >> SKIP_STRENGTH)

			if fwdIdx > mfLimit {
				_, dstDelta, _ := emitLiterals(src[anchor:], dst[dstIdx:], srcEnd-anchor, true)
				return uint(srcEnd), uint(dstIdx + dstDelta), error(nil)
			}

			attempts++
			h32 := (readInt(src, srcIdx) * HASH_SEED) >> hashShift
			ref = table[h32]
			table[h32] = srcIdx

			if differentInts(src, ref, srcIdx) == false && ref > srcIdx-MAX_DISTANCE {
				break
			}
		}

		// Catch up
		for ref > 0 && srcIdx > anchor && src[ref-1] == src[srcIdx-1] {
			ref--
			srcIdx--
		}

		// Encode literal length
		runLen := srcIdx - anchor
		tokenOff := dstIdx
		dstIdx++
		_, dstDelta, token := emitLiterals(src[anchor:], dst[dstIdx:], runLen, false)
		dstIdx += dstDelta

		for true {
			// Encode offset
			dst[dstIdx] = byte(srcIdx - ref)
			dst[dstIdx+1] = byte((srcIdx - ref) >> 8)
			dstIdx += 2

			// Count matches
			srcIdx += MIN_MATCH
			ref += MIN_MATCH
			anchor = srcIdx

			for srcIdx < srcLimit && src[srcIdx] == src[ref] {
				srcIdx++
				ref++
			}

			matchLen := srcIdx - anchor

			// Encode match length
			if matchLen >= ML_MASK {
				dst[tokenOff] = byte(token | ML_MASK)
				dstIdx += writeLength(dst[dstIdx:], matchLen-ML_MASK)
			} else {
				dst[tokenOff] = byte(token | matchLen)
			}

			// Test end of chunk
			if srcIdx > mfLimit {
				_, dstDelta, _ := emitLiterals(src[srcIdx:], dst[dstIdx:], srcEnd-srcIdx, true)
				return uint(srcEnd), uint(dstIdx + dstDelta), error(nil)
			}

			// Test next position
			h32_1 := (readInt(src, srcIdx-2) * HASH_SEED) >> hashShift
			h32_2 := (readInt(src, srcIdx) * HASH_SEED) >> hashShift
			table[h32_1] = srcIdx - 2
			ref = table[h32_2]
			table[h32_2] = srcIdx

			if differentInts(src, ref, srcIdx) == true || ref <= srcIdx-MAX_DISTANCE {
				break
			}

			tokenOff = dstIdx
			dstIdx++
			token = 0
		}

		// Update
		anchor = srcIdx
		srcIdx++
	}
}
Ejemplo n.º 12
0
func (this *EncodingTask) encode() {
	transform, err := function.NewByteFunction(this.blockLength, this.typeOfTransform)

	if err != nil {
		<-this.input
		this.output <- NewIOError(err.Error(), ERR_CREATE_CODEC)
		return
	}

	buffer := this.buf
	requiredSize := transform.MaxEncodedLen(int(this.blockLength))

	if requiredSize == -1 {
		// Max size unknown => guess
		requiredSize = int(this.blockLength*5) >> 2
	}

	if this.typeOfTransform == function.NULL_TRANSFORM_TYPE {
		buffer = this.data // share buffers if no transform
	} else if len(buffer) < requiredSize {
		buffer = make([]byte, requiredSize)
	}

	mode := byte(0)
	dataSize := uint(0)
	postTransformLength := this.blockLength
	checksum := uint32(0)
	iIdx := uint(0)
	oIdx := uint(0)

	// Compute block checksum
	if this.hasher != nil {
		checksum = this.hasher.Hash(this.data[0:this.blockLength])
	}

	if len(this.listeners) > 0 {
		// Notify before transform
		evt := &BlockEvent{eventType: EVT_BEFORE_TRANSFORM, blockId: this.currentBlockId,
			blockSize: int(this.blockLength), hash: checksum, time_: time.Now(),
			hashing: this.hasher != nil}
		notifyListeners(this.listeners, evt)
	}

	if this.blockLength <= SMALL_BLOCK_SIZE {
		// Just copy
		if !kanzi.SameByteSlices(buffer, this.data, false) {
			copy(buffer, this.data[0:this.blockLength])
		}

		iIdx += this.blockLength
		oIdx += this.blockLength
		mode = byte(SMALL_BLOCK_SIZE | (this.blockLength & COPY_LENGTH_MASK))
	} else {

		// Forward transform
		iIdx, oIdx, err = transform.Forward(this.data, buffer)

		if err != nil {
			// Transform failed (probably due to lack of space in output buffer)
			if !kanzi.SameByteSlices(buffer, this.data, false) {
				copy(buffer, this.data)
			}

			iIdx = this.blockLength
			oIdx = this.blockLength
			mode |= SKIP_FUNCTION_MASK
		}

		postTransformLength = oIdx

		for i := uint64(0xFF); i < uint64(postTransformLength); i <<= 8 {
			dataSize++
		}

		if dataSize > 3 {
			<-this.input
			this.output <- NewIOError("Invalid block data length", ERR_WRITE_FILE)
			return
		}

		// Record size of 'block size' - 1 in bytes
		mode |= byte(dataSize & 0x03)
		dataSize++
	}

	if len(this.listeners) > 0 {
		// Notify after transform
		evt := &BlockEvent{eventType: EVT_AFTER_TRANSFORM, blockId: this.currentBlockId,
			blockSize: int(postTransformLength), hash: checksum, time_: time.Now(),
			hashing: this.hasher != nil}
		notifyListeners(this.listeners, evt)
	}

	// Wait for the concurrent task processing the previous block to complete
	// entropy encoding. Entropy encoding must happen sequentially (and
	// in the correct block order) in the bitstream.
	err2 := <-this.input

	if err2 != nil {
		this.output <- err2
		return
	}

	// Each block is encoded separately
	// Rebuild the entropy encoder to reset block statistics
	ee, err := entropy.NewEntropyEncoder(this.obs, this.typeOfEntropy)

	if err != nil {
		this.output <- NewIOError(err.Error(), ERR_CREATE_CODEC)
		return
	}

	// Write block 'header' (mode + compressed length)
	written := this.obs.Written()
	this.obs.WriteBits(uint64(mode), 8)

	if dataSize > 0 {
		this.obs.WriteBits(uint64(postTransformLength), 8*dataSize)
	}

	// Write checksum
	if this.hasher != nil {
		this.obs.WriteBits(uint64(checksum), 32)
	}

	if len(this.listeners) > 0 {
		// Notify before entropy
		evt := &BlockEvent{eventType: EVT_BEFORE_ENTROPY, blockId: this.currentBlockId,
			blockSize: int(postTransformLength), time_: time.Now(),
			hash: checksum, hashing: this.hasher != nil}
		notifyListeners(this.listeners, evt)
	}

	// Entropy encode block
	_, err = ee.Encode(buffer[0:postTransformLength])

	if err != nil {
		this.output <- NewIOError(err.Error(), ERR_PROCESS_BLOCK)
		return
	}

	// Dispose before displaying statistics. Dispose may write to the bitstream
	ee.Dispose()

	if len(this.listeners) > 0 {
		// Notify after entropy
		evt := &BlockEvent{eventType: EVT_AFTER_ENTROPY, blockId: this.currentBlockId,
			blockSize: int((this.obs.Written() - written) / 8), time_: time.Now(),
			hash: checksum, hashing: this.hasher != nil}
		notifyListeners(this.listeners, evt)
	}

	// Notify of completion of the task
	this.output <- error(nil)
}