TUN-528: Move cloudflared into a separate repo

This commit is contained in:
Areg Harutyunyan
2018-05-01 18:45:06 -05:00
parent e8c621a648
commit d06fc520c7
4726 changed files with 1763680 additions and 0 deletions

View File

@@ -0,0 +1,19 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
go_library(
name = "go_default_library",
srcs = [
"discard.go",
"discard_go14.go",
"packed.go",
],
importpath = "zombiezen.com/go/capnproto2/internal/packed",
visibility = ["//:__subpackages__"],
)
go_test(
name = "go_default_test",
srcs = ["packed_test.go"],
data = glob(["testdata/**"]),
embed = [":go_default_library"],
)

View File

@@ -0,0 +1,11 @@
// +build go1.5
package packed
import (
"bufio"
)
func discard(r *bufio.Reader, n int) {
r.Discard(n)
}

View File

@@ -0,0 +1,13 @@
// +build !go1.5
package packed
import (
"bufio"
"io"
"io/ioutil"
)
func discard(r *bufio.Reader, n int) {
io.CopyN(ioutil.Discard, r, int64(n))
}

View File

@@ -0,0 +1,65 @@
// +build gofuzz
// Fuzz test harness. To run:
// go-fuzz-build zombiezen.com/go/capnproto2/internal/packed
// go-fuzz -bin=packed-fuzz.zip -workdir=internal/packed/testdata
package packed
import (
"bufio"
"bytes"
"io"
"io/ioutil"
)
func Fuzz(data []byte) int {
result := 0
// Unpacked
if unpacked, err := Unpack(nil, data); err == nil {
checkRepack(unpacked)
result = 1
}
// Read
{
r := NewReader(bufio.NewReader(bytes.NewReader(data)))
if unpacked, err := ioutil.ReadAll(r); err == nil {
checkRepack(unpacked)
result = 1
}
}
// ReadWord
{
r := NewReader(bufio.NewReader(bytes.NewReader(data)))
var unpacked []byte
var err error
for {
n := len(unpacked)
unpacked = append(unpacked, 0, 0, 0, 0, 0, 0, 0, 0)
if err = r.ReadWord(unpacked[n:]); err != nil {
unpacked = unpacked[:n]
break
}
}
if err == io.EOF {
checkRepack(unpacked)
result = 1
}
}
return result
}
func checkRepack(unpacked []byte) {
packed := Pack(nil, unpacked)
unpacked2, err := Unpack(nil, packed)
if err != nil {
panic("correctness: unpack, pack, unpack gives error: " + err.Error())
}
if !bytes.Equal(unpacked, unpacked2) {
panic("correctness: unpack, pack, unpack gives different results")
}
}

View File

@@ -0,0 +1,334 @@
// Package packed provides functions to read and write the "packed"
// compression scheme described at https://capnproto.org/encoding.html#packing.
package packed
import (
"bufio"
"errors"
"io"
)
const wordSize = 8
// Special case tags.
const (
zeroTag byte = 0x00
unpackedTag byte = 0xff
)
// Pack appends the packed version of src to dst and returns the
// resulting slice. len(src) must be a multiple of 8 or Pack panics.
func Pack(dst, src []byte) []byte {
if len(src)%wordSize != 0 {
panic("packed.Pack len(src) must be a multiple of 8")
}
var buf [wordSize]byte
for len(src) > 0 {
var hdr byte
n := 0
for i := uint(0); i < wordSize; i++ {
if src[i] != 0 {
hdr |= 1 << i
buf[n] = src[i]
n++
}
}
dst = append(dst, hdr)
dst = append(dst, buf[:n]...)
src = src[wordSize:]
switch hdr {
case zeroTag:
z := min(numZeroWords(src), 0xff)
dst = append(dst, byte(z))
src = src[z*wordSize:]
case unpackedTag:
i := 0
end := min(len(src), 0xff*wordSize)
for i < end {
zeros := 0
for _, b := range src[i : i+wordSize] {
if b == 0 {
zeros++
}
}
if zeros > 1 {
break
}
i += wordSize
}
rawWords := byte(i / wordSize)
dst = append(dst, rawWords)
dst = append(dst, src[:i]...)
src = src[i:]
}
}
return dst
}
// numZeroWords returns the number of leading zero words in b.
func numZeroWords(b []byte) int {
for i, bb := range b {
if bb != 0 {
return i / wordSize
}
}
return len(b) / wordSize
}
// Unpack appends the unpacked version of src to dst and returns the
// resulting slice.
func Unpack(dst, src []byte) ([]byte, error) {
for len(src) > 0 {
tag := src[0]
src = src[1:]
pstart := len(dst)
dst = allocWords(dst, 1)
p := dst[pstart : pstart+wordSize]
if len(src) >= wordSize {
i := 0
nz := tag & 1
p[0] = src[i] & -nz
i += int(nz)
nz = tag >> 1 & 1
p[1] = src[i] & -nz
i += int(nz)
nz = tag >> 2 & 1
p[2] = src[i] & -nz
i += int(nz)
nz = tag >> 3 & 1
p[3] = src[i] & -nz
i += int(nz)
nz = tag >> 4 & 1
p[4] = src[i] & -nz
i += int(nz)
nz = tag >> 5 & 1
p[5] = src[i] & -nz
i += int(nz)
nz = tag >> 6 & 1
p[6] = src[i] & -nz
i += int(nz)
nz = tag >> 7 & 1
p[7] = src[i] & -nz
i += int(nz)
src = src[i:]
} else {
for i := uint(0); i < wordSize; i++ {
if tag&(1<<i) == 0 {
continue
}
if len(src) == 0 {
return dst, io.ErrUnexpectedEOF
}
p[i] = src[0]
src = src[1:]
}
}
switch tag {
case zeroTag:
if len(src) == 0 {
return dst, io.ErrUnexpectedEOF
}
dst = allocWords(dst, int(src[0]))
src = src[1:]
case unpackedTag:
if len(src) == 0 {
return dst, io.ErrUnexpectedEOF
}
start := len(dst)
dst = allocWords(dst, int(src[0]))
src = src[1:]
n := copy(dst[start:], src)
src = src[n:]
}
}
return dst, nil
}
func allocWords(p []byte, n int) []byte {
target := len(p) + n*wordSize
if cap(p) >= target {
pp := p[len(p):target]
for i := range pp {
pp[i] = 0
}
return p[:target]
}
newcap := cap(p)
doublecap := newcap + newcap
if target > doublecap {
newcap = target
} else {
if len(p) < 1024 {
newcap = doublecap
} else {
for newcap < target {
newcap += newcap / 4
}
}
}
pp := make([]byte, target, newcap)
copy(pp, p)
return pp
}
// A Reader decompresses a packed byte stream.
type Reader struct {
// ReadWord state
rd *bufio.Reader
err error
zeroes int
literal int
// Read state
word [wordSize]byte
wordIdx int
}
// NewReader returns a reader that decompresses a packed stream from r.
func NewReader(r *bufio.Reader) *Reader {
return &Reader{rd: r, wordIdx: wordSize}
}
func min(a, b int) int {
if b < a {
return b
}
return a
}
// ReadWord decompresses the next word from the underlying stream.
func (r *Reader) ReadWord(p []byte) error {
if len(p) < wordSize {
return errors.New("packed: read word buffer too small")
}
r.wordIdx = wordSize // if the caller tries to call ReadWord and Read, don't give them partial words.
if r.err != nil {
err := r.err
r.err = nil
return err
}
p = p[:wordSize]
switch {
case r.zeroes > 0:
r.zeroes--
for i := range p {
p[i] = 0
}
return nil
case r.literal > 0:
r.literal--
_, err := io.ReadFull(r.rd, p)
return err
}
var tag byte
if r.rd.Buffered() < wordSize+1 {
var err error
tag, err = r.rd.ReadByte()
if err != nil {
return err
}
for i := range p {
p[i] = 0
}
for i := uint(0); i < wordSize; i++ {
if tag&(1<<i) != 0 {
p[i], err = r.rd.ReadByte()
if err != nil {
if err == io.EOF {
err = io.ErrUnexpectedEOF
}
return err
}
} else {
p[i] = 0
}
}
} else {
b, _ := r.rd.Peek(wordSize + 1)
tag = b[0]
i := 1
nz := tag & 1
p[0] = b[i] & -nz
i += int(nz)
nz = tag >> 1 & 1
p[1] = b[i] & -nz
i += int(nz)
nz = tag >> 2 & 1
p[2] = b[i] & -nz
i += int(nz)
nz = tag >> 3 & 1
p[3] = b[i] & -nz
i += int(nz)
nz = tag >> 4 & 1
p[4] = b[i] & -nz
i += int(nz)
nz = tag >> 5 & 1
p[5] = b[i] & -nz
i += int(nz)
nz = tag >> 6 & 1
p[6] = b[i] & -nz
i += int(nz)
nz = tag >> 7 & 1
p[7] = b[i] & -nz
i += int(nz)
discard(r.rd, i)
}
switch tag {
case zeroTag:
z, err := r.rd.ReadByte()
if err == io.EOF {
r.err = io.ErrUnexpectedEOF
return nil
} else if err != nil {
r.err = err
return nil
}
r.zeroes = int(z)
case unpackedTag:
l, err := r.rd.ReadByte()
if err == io.EOF {
r.err = io.ErrUnexpectedEOF
return nil
} else if err != nil {
r.err = err
return nil
}
r.literal = int(l)
}
return nil
}
// Read reads up to len(p) bytes into p. This will decompress whole
// words at a time, so mixing calls to Read and ReadWord may lead to
// bytes missing.
func (r *Reader) Read(p []byte) (n int, err error) {
if r.wordIdx < wordSize {
n = copy(p, r.word[r.wordIdx:])
r.wordIdx += n
}
for n < len(p) {
if r.rd.Buffered() < wordSize+1 && n > 0 {
return n, nil
}
if len(p)-n >= wordSize {
err := r.ReadWord(p[n:])
if err != nil {
return n, err
}
n += wordSize
} else {
err := r.ReadWord(r.word[:])
if err != nil {
return n, err
}
r.wordIdx = copy(p[n:], r.word[:])
n += r.wordIdx
}
}
return n, nil
}

View File

@@ -0,0 +1,503 @@
package packed
import (
"bufio"
"bytes"
"compress/gzip"
"encoding/hex"
"io"
"io/ioutil"
"math"
"strings"
"testing"
"testing/iotest"
)
type testCase struct {
name string
original []byte
compressed []byte
}
var compressionTests = []testCase{
{
"empty",
[]byte{},
[]byte{},
},
{
"one zero word",
[]byte{0, 0, 0, 0, 0, 0, 0, 0},
[]byte{0, 0},
},
{
"one word with mixed zero bytes",
[]byte{0, 0, 12, 0, 0, 34, 0, 0},
[]byte{0x24, 12, 34},
},
{
"two words with mixed zero bytes",
[]byte{
0x08, 0x00, 0x00, 0x00, 0x03, 0x00, 0x02, 0x00,
0x19, 0x00, 0x00, 0x00, 0xaa, 0x01, 0x00, 0x00,
},
[]byte{0x51, 0x08, 0x03, 0x02, 0x31, 0x19, 0xaa, 0x01},
},
{
"two words with mixed zero bytes",
[]byte{0x8, 0, 0, 0, 0x3, 0, 0x2, 0, 0x19, 0, 0, 0, 0xaa, 0x1, 0, 0},
[]byte{0x51, 0x08, 0x03, 0x02, 0x31, 0x19, 0xaa, 0x01},
},
{
"four zero words",
[]byte{
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
},
[]byte{0x00, 0x03},
},
{
"four words without zero bytes",
[]byte{
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
},
[]byte{
0xff,
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
0x03,
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
},
},
{
"one word without zero bytes",
[]byte{1, 3, 2, 4, 5, 7, 6, 8},
[]byte{0xff, 1, 3, 2, 4, 5, 7, 6, 8, 0},
},
{
"one zero word followed by one word without zero bytes",
[]byte{0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 2, 4, 5, 7, 6, 8},
[]byte{0, 0, 0xff, 1, 3, 2, 4, 5, 7, 6, 8, 0},
},
{
"one word with mixed zero bytes followed by one word without zero bytes",
[]byte{0, 0, 12, 0, 0, 34, 0, 0, 1, 3, 2, 4, 5, 7, 6, 8},
[]byte{0x24, 12, 34, 0xff, 1, 3, 2, 4, 5, 7, 6, 8, 0},
},
{
"two words with no zero bytes",
[]byte{1, 3, 2, 4, 5, 7, 6, 8, 8, 6, 7, 4, 5, 2, 3, 1},
[]byte{0xff, 1, 3, 2, 4, 5, 7, 6, 8, 1, 8, 6, 7, 4, 5, 2, 3, 1},
},
{
"five words, with only the last containing zero bytes",
[]byte{
1, 2, 3, 4, 5, 6, 7, 8,
1, 2, 3, 4, 5, 6, 7, 8,
1, 2, 3, 4, 5, 6, 7, 8,
1, 2, 3, 4, 5, 6, 7, 8,
0, 2, 4, 0, 9, 0, 5, 1,
},
[]byte{
0xff, 1, 2, 3, 4, 5, 6, 7, 8,
3,
1, 2, 3, 4, 5, 6, 7, 8,
1, 2, 3, 4, 5, 6, 7, 8,
1, 2, 3, 4, 5, 6, 7, 8,
0xd6, 2, 4, 9, 5, 1,
},
},
{
"five words, with the middle and last words containing zero bytes",
[]byte{
1, 2, 3, 4, 5, 6, 7, 8,
1, 2, 3, 4, 5, 6, 7, 8,
6, 2, 4, 3, 9, 0, 5, 1,
1, 2, 3, 4, 5, 6, 7, 8,
0, 2, 4, 0, 9, 0, 5, 1,
},
[]byte{
0xff, 1, 2, 3, 4, 5, 6, 7, 8,
3,
1, 2, 3, 4, 5, 6, 7, 8,
6, 2, 4, 3, 9, 0, 5, 1,
1, 2, 3, 4, 5, 6, 7, 8,
0xd6, 2, 4, 9, 5, 1,
},
},
{
"words with mixed zeroes sandwiching zero words",
[]byte{
8, 0, 100, 6, 0, 1, 1, 2,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 2, 0, 3, 1,
},
[]byte{
0xed, 8, 100, 6, 1, 1, 2,
0, 2,
0xd4, 1, 2, 3, 1,
},
},
{
"real-world Cap'n Proto data",
[]byte{
0x0, 0x0, 0x0, 0x0, 0x5, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x2, 0x0, 0x1, 0x0,
0x25, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x1, 0x0, 0x0, 0x0, 0xc, 0x0, 0x0, 0x0,
0xd4, 0x7, 0xc, 0x7, 0x0, 0x0, 0x0, 0x0,
},
[]byte{
0x10, 0x5,
0x50, 0x2, 0x1,
0x1, 0x25,
0x0, 0x0,
0x11, 0x1, 0xc,
0xf, 0xd4, 0x7, 0xc, 0x7,
},
},
{
"shortened benchmark data",
[]byte{
8, 100, 6, 0, 1, 1, 0, 2,
8, 100, 6, 0, 1, 1, 0, 2,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 2, 0, 3, 0, 0,
'H', 'e', 'l', 'l', 'o', ',', ' ', 'W',
'o', 'r', 'l', 'd', '!', ' ', ' ', 'P',
'a', 'd', ' ', 't', 'e', 'x', 't', '.',
},
[]byte{
0xb7, 8, 100, 6, 1, 1, 2,
0xb7, 8, 100, 6, 1, 1, 2,
0x00, 3,
0x2a, 1, 2, 3,
0xff, 'H', 'e', 'l', 'l', 'o', ',', ' ', 'W',
2,
'o', 'r', 'l', 'd', '!', ' ', ' ', 'P',
'a', 'd', ' ', 't', 'e', 'x', 't', '.',
},
},
}
var decompressionTests = []testCase{
{
"fuzz hang #1",
mustGunzip("\x1f\x8b\b\x00\x00\tn\x88\x00\xff\xec\xce!\x11\x000\f\x04\xc1G\xd5Q\xff\x02\x8b" +
"\xab!(\xc9\xcc.>p\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\xf5^" +
"\xf7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x000\xc8\xc9" +
"-\xf5?\x00\x00\xff\xff6\xe2l*\x90\xcc\x00\x00"),
[]byte("\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff@\xf6\x00\xff\x00\xf6" +
"\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6" +
"\x00\xff\x00\xf6\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x05\x06 \x00\x04"),
},
{
"fuzz hang #2",
mustGunzip("\x1f\x8b\b\x00\x00\tn\x88\x00\xff\xec\xceA\r\x00\x00\b\x04\xa0\xeb\x1fد\xc6p:H@" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ު\xa4\xb7\x0f\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
"\\5\x01\x00\x00\xff\xff\r\xfb\xbac\xe0\xe8\x00\x00"),
[]byte("\x00\xf6\x00\xff\x00\u007f\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6" +
"\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\x005\x00\xf6\x00\xff\x00" +
"\xf6\x00\xff\x00\xf6\x00\xff\x00 \x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00" +
"\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6"),
},
}
var badDecompressionTests = []struct {
name string
input []byte
}{
{
"wrong tag",
[]byte{
0xa7, 8, 100, 6, 1, 1, 2,
0xa7, 8, 100, 6, 1, 1, 2,
},
},
{
"badly written decompression benchmark",
bytes.Repeat([]byte{
0xa7, 8, 100, 6, 1, 1, 2,
0xa7, 8, 100, 6, 1, 1, 2,
0x00, 3,
0x2a,
0xff, 'H', 'e', 'l', 'l', 'o', ',', ' ', 'W',
2,
'o', 'r', 'l', 'd', '!', ' ', ' ', 'P',
'a', 'd', ' ', 't', 'e', 'x', 't', '.',
}, 128),
},
}
func TestPack(t *testing.T) {
for _, test := range compressionTests {
orig := make([]byte, len(test.original))
copy(orig, test.original)
compressed := Pack(nil, orig)
if !bytes.Equal(compressed, test.compressed) {
t.Errorf("%s: Pack(nil,\n%s\n) =\n%s\n; want\n%s", test.name, hex.Dump(test.original), hex.Dump(compressed), hex.Dump(test.compressed))
}
}
}
func TestUnpack(t *testing.T) {
tests := make([]testCase, 0, len(compressionTests)+len(decompressionTests))
tests = append(tests, compressionTests...)
tests = append(tests, decompressionTests...)
for _, test := range tests {
compressed := make([]byte, len(test.compressed))
copy(compressed, test.compressed)
orig, err := Unpack(nil, compressed)
if err != nil {
t.Errorf("%s: Unpack(nil,\n%s\n) error: %v", test.name, hex.Dump(test.compressed), err)
} else if !bytes.Equal(orig, test.original) {
t.Errorf("%s: Unpack(nil,\n%s\n) =\n%s\n; want\n%s", test.name, hex.Dump(test.compressed), hex.Dump(orig), hex.Dump(test.original))
}
}
}
func TestUnpack_Fail(t *testing.T) {
for _, test := range badDecompressionTests {
compressed := make([]byte, len(test.input))
copy(compressed, test.input)
_, err := Unpack(nil, compressed)
if err == nil {
t.Errorf("%s: did not return error", test.name)
}
}
}
func TestReader(t *testing.T) {
tests := make([]testCase, 0, len(compressionTests)+len(decompressionTests))
tests = append(tests, compressionTests...)
tests = append(tests, decompressionTests...)
testing:
for _, test := range tests {
for readSize := 1; readSize <= 8+2*len(test.original); readSize = nextPrime(readSize) {
r := bytes.NewReader(test.compressed)
d := NewReader(bufio.NewReader(r))
buf := make([]byte, readSize)
var actual []byte
for {
n, err := d.Read(buf)
actual = append(actual, buf[:n]...)
if err != nil {
if err == io.EOF {
break
}
t.Errorf("%s: Read: %v", test.name, err)
continue testing
}
}
if len(test.original) != len(actual) {
t.Errorf("%s: readSize=%d: expected %d bytes, got %d", test.name, readSize, len(test.original), len(actual))
continue
}
if !bytes.Equal(test.original, actual) {
t.Errorf("%s: readSize=%d: bytes = %v; want %v", test.name, readSize, actual, test.original)
}
}
}
}
func TestReader_DataErr(t *testing.T) {
const readSize = 3
tests := make([]testCase, 0, len(compressionTests)+len(decompressionTests))
tests = append(tests, compressionTests...)
tests = append(tests, decompressionTests...)
testing:
for _, test := range tests {
r := iotest.DataErrReader(bytes.NewReader(test.compressed))
d := NewReader(bufio.NewReader(r))
buf := make([]byte, readSize)
var actual []byte
for {
n, err := d.Read(buf)
actual = append(actual, buf[:n]...)
if err != nil {
if err == io.EOF {
break
}
t.Errorf("%s: Read: %v", test.name, err)
continue testing
}
}
if len(test.original) != len(actual) {
t.Errorf("%s: expected %d bytes, got %d", test.name, len(test.original), len(actual))
continue
}
if !bytes.Equal(test.original, actual) {
t.Errorf("%s: bytes not equal", test.name)
}
}
}
func TestReader_Fail(t *testing.T) {
for _, test := range badDecompressionTests {
d := NewReader(bufio.NewReader(bytes.NewReader(test.input)))
_, err := ioutil.ReadAll(d)
if err == nil {
t.Errorf("%s: did not return error", test.name)
}
}
}
var result []byte
func BenchmarkPack(b *testing.B) {
src := bytes.Repeat([]byte{
8, 0, 100, 6, 0, 1, 1, 2,
8, 0, 100, 6, 0, 1, 1, 2,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 2, 0, 3, 0, 0,
'H', 'e', 'l', 'l', 'o', ',', ' ', 'W',
'o', 'r', 'l', 'd', '!', ' ', ' ', 'P',
'a', 'd', ' ', 't', 'e', 'x', 't', '.',
}, 128)
dst := make([]byte, 0, len(src))
b.SetBytes(int64(len(src)))
b.ResetTimer()
for i := 0; i < b.N; i++ {
dst = Pack(dst[:0], src)
}
result = dst
}
func benchUnpack(b *testing.B, src []byte) {
var unpackedSize int
{
tmp, err := Unpack(nil, src)
if err != nil {
b.Fatal(err)
}
unpackedSize = len(tmp)
}
b.SetBytes(int64(unpackedSize))
dst := make([]byte, 0, unpackedSize)
b.ResetTimer()
for i := 0; i < b.N; i++ {
var err error
dst, err = Unpack(dst[:0], src)
if err != nil {
b.Fatal(err)
}
}
result = dst
}
func BenchmarkUnpack(b *testing.B) {
benchUnpack(b, bytes.Repeat([]byte{
0xb7, 8, 100, 6, 1, 1, 2,
0xb7, 8, 100, 6, 1, 1, 2,
0x00, 3,
0x2a, 1, 2, 3,
0xff, 'H', 'e', 'l', 'l', 'o', ',', ' ', 'W',
2,
'o', 'r', 'l', 'd', '!', ' ', ' ', 'P',
'a', 'd', ' ', 't', 'e', 'x', 't', '.',
}, 128))
}
func BenchmarkUnpack_Large(b *testing.B) {
benchUnpack(b, []byte("\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff@\xf6\x00\xff\x00\xf6"+
"\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6"+
"\x00\xff\x00\xf6\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x05\x06 \x00\x04"))
}
func benchReader(b *testing.B, src []byte) {
var unpackedSize int
{
tmp, err := Unpack(nil, src)
if err != nil {
b.Fatal(err)
}
unpackedSize = len(tmp)
}
b.SetBytes(int64(unpackedSize))
r := bytes.NewReader(src)
br := bufio.NewReader(r)
dst := bytes.NewBuffer(make([]byte, 0, unpackedSize))
b.ResetTimer()
for i := 0; i < b.N; i++ {
dst.Reset()
r.Seek(0, 0)
br.Reset(r)
pr := NewReader(br)
_, err := dst.ReadFrom(pr)
if err != nil {
b.Fatal(err)
}
}
result = dst.Bytes()
}
func BenchmarkReader(b *testing.B) {
benchReader(b, bytes.Repeat([]byte{
0xb7, 8, 100, 6, 1, 1, 2,
0xb7, 8, 100, 6, 1, 1, 2,
0x00, 3,
0x2a, 1, 2, 3,
0xff, 'H', 'e', 'l', 'l', 'o', ',', ' ', 'W',
2,
'o', 'r', 'l', 'd', '!', ' ', ' ', 'P',
'a', 'd', ' ', 't', 'e', 'x', 't', '.',
}, 128))
}
func BenchmarkReader_Large(b *testing.B) {
benchReader(b, []byte("\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff@\xf6\x00\xff\x00\xf6"+
"\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6"+
"\x00\xff\x00\xf6\x00\xf6\x00\xff\x00\xf6\x00\xff\x00\xf6\x05\x06 \x00\x04"))
}
func nextPrime(n int) int {
inc:
for {
n++
root := int(math.Sqrt(float64(n)))
for f := 2; f <= root; f++ {
if n%f == 0 {
continue inc
}
}
return n
}
}
func mustGunzip(s string) []byte {
r, err := gzip.NewReader(strings.NewReader(s))
if err != nil {
panic(err)
}
data, err := ioutil.ReadAll(r)
if err != nil {
panic(err)
}
return data
}

View File

@@ -0,0 +1 @@
1#8188940354856483008125<08>

View File

@@ -0,0 +1 @@
<EFBFBD> M22008446049250313080847263336181640625049250d

View File

@@ -0,0 +1 @@
<EFBFBD><08>

View File

@@ -0,0 +1 @@
<EFBFBD>1#818989403545856475830078125<08>

View File

@@ -0,0 +1 @@
<EFBFBD>1#818989403545856475830078125

View File

@@ -0,0 +1 @@
<EFBFBD><03><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>

View File

@@ -0,0 +1 @@
<EFBFBD>2220446049250313080847263336181640625d

View File

@@ -0,0 +1 @@
<EFBFBD><08>

View File

@@ -0,0 +1 @@
<EFBFBD>1#81898940354585<38><35><EFBFBD>c<EFBFBD><63><EFBFBD>1<EFBFBD><31>ソソ<EFBDBF>'647<34><37>#<23>De5830078125

View File

@@ -0,0 +1 @@
<EFBFBD><07><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><05><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD> <05>

View File

@@ -0,0 +1 @@
<EFBFBD><08> 

View File

@@ -0,0 +1 @@
1#8188940354856483008125

View File

@@ -0,0 +1 @@
$ "

View File

@@ -0,0 +1 @@
Q1<19>

Binary file not shown.

View File

@@ -0,0 +1 @@
<EFBFBD>

View File

@@ -0,0 +1 @@
<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><03><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>

View File

@@ -0,0 +1 @@
<EFBFBD><08> 

Binary file not shown.

Binary file not shown.

Binary file not shown.