mirror of https://github.com/tidwall/tile38.git
Packed fields option
This commit is contained in:
parent
ffdf90eebf
commit
30d31d0926
|
@ -123,6 +123,14 @@
|
||||||
pruneopts = ""
|
pruneopts = ""
|
||||||
revision = "e8fc0692a7e26a05b06517348ed466349062eb47"
|
revision = "e8fc0692a7e26a05b06517348ed466349062eb47"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
digest = "1:de10194e2fb3787b2efdea394a7f710f49bfdcaae78f715b38a9057c602a4998"
|
||||||
|
name = "github.com/h2so5/half"
|
||||||
|
packages = ["."]
|
||||||
|
pruneopts = ""
|
||||||
|
revision = "c705bde19bd0aa32d718d167c961fe681ee91473"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
digest = "1:6f49eae0c1e5dab1dafafee34b207aeb7a42303105960944828c2079b92fc88e"
|
digest = "1:6f49eae0c1e5dab1dafafee34b207aeb7a42303105960944828c2079b92fc88e"
|
||||||
name = "github.com/jmespath/go-jmespath"
|
name = "github.com/jmespath/go-jmespath"
|
||||||
|
@ -224,11 +232,11 @@
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
digest = "1:2fef6390e8d9118debd4937a699afad9e1629f2d5d3c965a58fc33afe04f9b46"
|
digest = "1:4d2ec831fbaaf74fd75d2d9fe107e605c92489ec6cef6d36e1f23b678e9f2bd4"
|
||||||
name = "github.com/tidwall/buntdb"
|
name = "github.com/tidwall/buntdb"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
pruneopts = ""
|
pruneopts = ""
|
||||||
revision = "b67b1b8c1658cb01502801c14e33c61e6c4cbb95"
|
revision = "6249481c29c2cd96f53b691b74ac1893f72774c2"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
digest = "1:91acf4d86b348c1f1832336836035373b047ffcb16a0fde066bd531bbe3452b2"
|
digest = "1:91acf4d86b348c1f1832336836035373b047ffcb16a0fde066bd531bbe3452b2"
|
||||||
|
@ -254,12 +262,12 @@
|
||||||
version = "v1.1.1"
|
version = "v1.1.1"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
digest = "1:3ddca2bd5496c6922a2a9e636530e178a43c2a534ea6634211acdc7d10222794"
|
digest = "1:eade4ea6782f5eed4a6b3138a648f9a332900650804fd206e5daaf99cc5613ea"
|
||||||
name = "github.com/tidwall/gjson"
|
name = "github.com/tidwall/gjson"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
pruneopts = ""
|
pruneopts = ""
|
||||||
revision = "1e3f6aeaa5bad08d777ea7807b279a07885dd8b2"
|
revision = "eee0b6226f0d1db2675a176fdfaa8419bcad4ca8"
|
||||||
version = "v1.1.3"
|
version = "v1.2.1"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
|
@ -466,6 +474,7 @@
|
||||||
"github.com/eclipse/paho.mqtt.golang",
|
"github.com/eclipse/paho.mqtt.golang",
|
||||||
"github.com/golang/protobuf/proto",
|
"github.com/golang/protobuf/proto",
|
||||||
"github.com/gomodule/redigo/redis",
|
"github.com/gomodule/redigo/redis",
|
||||||
|
"github.com/h2so5/half",
|
||||||
"github.com/mmcloughlin/geohash",
|
"github.com/mmcloughlin/geohash",
|
||||||
"github.com/nats-io/go-nats",
|
"github.com/nats-io/go-nats",
|
||||||
"github.com/peterh/liner",
|
"github.com/peterh/liner",
|
||||||
|
|
|
@ -23,7 +23,8 @@
|
||||||
required = [
|
required = [
|
||||||
"github.com/tidwall/lotsa",
|
"github.com/tidwall/lotsa",
|
||||||
"github.com/mmcloughlin/geohash",
|
"github.com/mmcloughlin/geohash",
|
||||||
"github.com/tidwall/evio"
|
"github.com/tidwall/evio",
|
||||||
|
"github.com/h2so5/half"
|
||||||
]
|
]
|
||||||
|
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
|
@ -72,7 +73,7 @@ required = [
|
||||||
|
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
name = "github.com/tidwall/gjson"
|
name = "github.com/tidwall/gjson"
|
||||||
version = "1.0.1"
|
version = "1.2.1"
|
||||||
|
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
|
|
|
@ -111,7 +111,7 @@ func TestDescend(t *testing.T) {
|
||||||
var keys []string
|
var keys []string
|
||||||
for i := 0; i < 1000; i += 10 {
|
for i := 0; i < 1000; i += 10 {
|
||||||
keys = append(keys, fmt.Sprintf("%03d", i))
|
keys = append(keys, fmt.Sprintf("%03d", i))
|
||||||
tr.Set(item.New(keys[len(keys)-1], nil))
|
tr.Set(item.New(keys[len(keys)-1], nil, false))
|
||||||
}
|
}
|
||||||
var exp []string
|
var exp []string
|
||||||
tr.Reverse(func(item *item.Item) bool {
|
tr.Reverse(func(item *item.Item) bool {
|
||||||
|
@ -162,7 +162,7 @@ func TestAscend(t *testing.T) {
|
||||||
var keys []string
|
var keys []string
|
||||||
for i := 0; i < 1000; i += 10 {
|
for i := 0; i < 1000; i += 10 {
|
||||||
keys = append(keys, fmt.Sprintf("%03d", i))
|
keys = append(keys, fmt.Sprintf("%03d", i))
|
||||||
tr.Set(item.New(keys[len(keys)-1], nil))
|
tr.Set(item.New(keys[len(keys)-1], nil, false))
|
||||||
}
|
}
|
||||||
exp := keys
|
exp := keys
|
||||||
for i := -1; i < 1000; i++ {
|
for i := -1; i < 1000; i++ {
|
||||||
|
@ -205,7 +205,7 @@ func TestBTree(t *testing.T) {
|
||||||
|
|
||||||
// insert all items
|
// insert all items
|
||||||
for _, key := range keys {
|
for _, key := range keys {
|
||||||
value, replaced := tr.Set(item.New(key, testString(key)))
|
value, replaced := tr.Set(item.New(key, testString(key), false))
|
||||||
if replaced {
|
if replaced {
|
||||||
t.Fatal("expected false")
|
t.Fatal("expected false")
|
||||||
}
|
}
|
||||||
|
@ -362,7 +362,7 @@ func TestBTree(t *testing.T) {
|
||||||
|
|
||||||
// replace second half
|
// replace second half
|
||||||
for _, key := range keys[len(keys)/2:] {
|
for _, key := range keys[len(keys)/2:] {
|
||||||
value, replaced := tr.Set(item.New(key, testString(key)))
|
value, replaced := tr.Set(item.New(key, testString(key), false))
|
||||||
if !replaced {
|
if !replaced {
|
||||||
t.Fatal("expected true")
|
t.Fatal("expected true")
|
||||||
}
|
}
|
||||||
|
@ -420,7 +420,7 @@ func BenchmarkTidwallSequentialSet(b *testing.B) {
|
||||||
sort.Strings(keys)
|
sort.Strings(keys)
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
tr.Set(item.New(keys[i], nil))
|
tr.Set(item.New(keys[i], nil, false))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -429,7 +429,7 @@ func BenchmarkTidwallSequentialGet(b *testing.B) {
|
||||||
keys := randKeys(b.N)
|
keys := randKeys(b.N)
|
||||||
sort.Strings(keys)
|
sort.Strings(keys)
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
tr.Set(item.New(keys[i], nil))
|
tr.Set(item.New(keys[i], nil, false))
|
||||||
}
|
}
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -442,7 +442,7 @@ func BenchmarkTidwallRandomSet(b *testing.B) {
|
||||||
keys := randKeys(b.N)
|
keys := randKeys(b.N)
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
tr.Set(item.New(keys[i], nil))
|
tr.Set(item.New(keys[i], nil, false))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -450,7 +450,7 @@ func BenchmarkTidwallRandomGet(b *testing.B) {
|
||||||
var tr BTree
|
var tr BTree
|
||||||
keys := randKeys(b.N)
|
keys := randKeys(b.N)
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
tr.Set(item.New(keys[i], nil))
|
tr.Set(item.New(keys[i], nil, false))
|
||||||
}
|
}
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -528,11 +528,11 @@ func BenchmarkTidwallRandomGet(b *testing.B) {
|
||||||
|
|
||||||
func TestBTreeOne(t *testing.T) {
|
func TestBTreeOne(t *testing.T) {
|
||||||
var tr BTree
|
var tr BTree
|
||||||
tr.Set(item.New("1", testString("1")))
|
tr.Set(item.New("1", testString("1"), false))
|
||||||
tr.Delete("1")
|
tr.Delete("1")
|
||||||
tr.Set(item.New("1", testString("1")))
|
tr.Set(item.New("1", testString("1"), false))
|
||||||
tr.Delete("1")
|
tr.Delete("1")
|
||||||
tr.Set(item.New("1", testString("1")))
|
tr.Set(item.New("1", testString("1"), false))
|
||||||
tr.Delete("1")
|
tr.Delete("1")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -541,7 +541,7 @@ func TestBTree256(t *testing.T) {
|
||||||
var n int
|
var n int
|
||||||
for j := 0; j < 2; j++ {
|
for j := 0; j < 2; j++ {
|
||||||
for _, i := range rand.Perm(256) {
|
for _, i := range rand.Perm(256) {
|
||||||
tr.Set(item.New(fmt.Sprintf("%d", i), testString(fmt.Sprintf("%d", i))))
|
tr.Set(item.New(fmt.Sprintf("%d", i), testString(fmt.Sprintf("%d", i)), false))
|
||||||
n++
|
n++
|
||||||
if tr.Len() != n {
|
if tr.Len() != n {
|
||||||
t.Fatalf("expected 256, got %d", n)
|
t.Fatalf("expected 256, got %d", n)
|
||||||
|
|
|
@ -57,36 +57,28 @@ func (item *Item) setIsPacked(isPacked bool) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (item *Item) fieldsLen() int {
|
func (item *Item) fieldsDataSize() int {
|
||||||
return int(item.head[0] & 0x3FFFFFFF)
|
return int(item.head[0] & 0x3FFFFFFF)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (item *Item) setFieldsLen(len int) {
|
func (item *Item) setFieldsDataSize(len int) {
|
||||||
item.head[0] = item.head[0]>>30<<30 | uint32(len)
|
item.head[0] = item.head[0]>>30<<30 | uint32(len)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (item *Item) idLen() int {
|
func (item *Item) idDataSize() int {
|
||||||
return int(item.head[1])
|
return int(item.head[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (item *Item) setIDLen(len int) {
|
func (item *Item) setIDDataSize(len int) {
|
||||||
item.head[1] = uint32(len)
|
item.head[1] = uint32(len)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ID returns the items ID as a string
|
// ID returns the items ID as a string
|
||||||
func (item *Item) ID() string {
|
func (item *Item) ID() string {
|
||||||
return *(*string)((unsafe.Pointer)(&reflect.StringHeader{
|
return *(*string)((unsafe.Pointer)(&reflect.StringHeader{
|
||||||
Data: uintptr(unsafe.Pointer(item.data)) + uintptr(item.fieldsLen()),
|
Data: uintptr(unsafe.Pointer(item.data)) +
|
||||||
Len: item.idLen(),
|
uintptr(item.fieldsDataSize()),
|
||||||
}))
|
Len: item.idDataSize(),
|
||||||
}
|
|
||||||
|
|
||||||
// Fields returns the field values
|
|
||||||
func (item *Item) fields() []float64 {
|
|
||||||
return *(*[]float64)((unsafe.Pointer)(&reflect.SliceHeader{
|
|
||||||
Data: uintptr(unsafe.Pointer(item.data)),
|
|
||||||
Len: item.fieldsLen() / 8,
|
|
||||||
Cap: item.fieldsLen() / 8,
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,7 +104,7 @@ func New(id string, obj geojson.Object, packed bool) *Item {
|
||||||
item = (*Item)(unsafe.Pointer(oitem))
|
item = (*Item)(unsafe.Pointer(oitem))
|
||||||
}
|
}
|
||||||
item.setIsPacked(packed)
|
item.setIsPacked(packed)
|
||||||
item.setIDLen(len(id))
|
item.setIDDataSize(len(id))
|
||||||
item.data = unsafe.Pointer((*reflect.SliceHeader)(unsafe.Pointer(&id)).Data)
|
item.data = unsafe.Pointer((*reflect.SliceHeader)(unsafe.Pointer(&id)).Data)
|
||||||
return item
|
return item
|
||||||
}
|
}
|
||||||
|
@ -126,7 +118,7 @@ func (item *Item) WeightAndPoints() (weight, points int) {
|
||||||
} else if item.Obj() != nil {
|
} else if item.Obj() != nil {
|
||||||
weight = len(item.Obj().String())
|
weight = len(item.Obj().String())
|
||||||
}
|
}
|
||||||
weight += item.fieldsLen() + item.idLen()
|
weight += item.fieldsDataSize() + item.idDataSize()
|
||||||
return weight, points
|
return weight, points
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,21 +136,56 @@ func (item *Item) Less(other btree.Item, ctx interface{}) bool {
|
||||||
return item.ID() < other.(*Item).ID()
|
return item.ID() < other.(*Item).ID()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// fieldBytes returns the raw fields data section
|
||||||
|
func (item *Item) fieldsBytes() []byte {
|
||||||
|
return *(*[]byte)((unsafe.Pointer)(&reflect.SliceHeader{
|
||||||
|
Data: uintptr(unsafe.Pointer(item.data)),
|
||||||
|
Len: item.fieldsDataSize(),
|
||||||
|
Cap: item.fieldsDataSize(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Packed returns true when the item's fields are packed
|
||||||
|
func (item *Item) Packed() bool {
|
||||||
|
return item == nil || item.isPacked()
|
||||||
|
}
|
||||||
|
|
||||||
// CopyOverFields overwriting previous fields. Accepts an *Item or []float64
|
// CopyOverFields overwriting previous fields. Accepts an *Item or []float64
|
||||||
func (item *Item) CopyOverFields(from interface{}) {
|
func (item *Item) CopyOverFields(from interface{}) {
|
||||||
|
if item == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
var values []float64
|
var values []float64
|
||||||
|
var fieldBytes []byte
|
||||||
|
var directCopy bool
|
||||||
switch from := from.(type) {
|
switch from := from.(type) {
|
||||||
case *Item:
|
case *Item:
|
||||||
values = from.fields()
|
if item.Packed() == from.Packed() {
|
||||||
|
// direct copy the bytes
|
||||||
|
fieldBytes = from.fieldsBytes()
|
||||||
|
directCopy = true
|
||||||
|
} else {
|
||||||
|
// get the values through iteration
|
||||||
|
item.ForEachField(-1, func(value float64) bool {
|
||||||
|
values = append(values, value)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
case []float64:
|
case []float64:
|
||||||
values = from
|
values = from
|
||||||
}
|
}
|
||||||
fieldBytes := floatsToBytes(values)
|
if !directCopy {
|
||||||
oldData := item.dataBytes()
|
if item.Packed() {
|
||||||
newData := make([]byte, len(fieldBytes)+item.idLen())
|
fieldBytes = item.packedGenerateFieldBytes(values)
|
||||||
|
} else {
|
||||||
|
fieldBytes = item.unpackedGenerateFieldBytes(values)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
id := item.ID()
|
||||||
|
newData := make([]byte, len(fieldBytes)+len(id))
|
||||||
copy(newData, fieldBytes)
|
copy(newData, fieldBytes)
|
||||||
copy(newData[len(fieldBytes):], oldData[item.fieldsLen():])
|
copy(newData[len(fieldBytes):], id)
|
||||||
item.setFieldsLen(len(fieldBytes))
|
item.setFieldsDataSize(len(fieldBytes))
|
||||||
if len(newData) > 0 {
|
if len(newData) > 0 {
|
||||||
item.data = unsafe.Pointer(&newData[0])
|
item.data = unsafe.Pointer(&newData[0])
|
||||||
} else {
|
} else {
|
||||||
|
@ -166,54 +193,15 @@ func (item *Item) CopyOverFields(from interface{}) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getFieldAt(data unsafe.Pointer, index int) float64 {
|
|
||||||
return *(*float64)(unsafe.Pointer(uintptr(data) + uintptr(index*8)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func setFieldAt(data unsafe.Pointer, index int, value float64) {
|
|
||||||
*(*float64)(unsafe.Pointer(uintptr(data) + uintptr(index*8))) = value
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetField set a field value at specified index.
|
// SetField set a field value at specified index.
|
||||||
func (item *Item) SetField(index int, value float64) (updated bool) {
|
func (item *Item) SetField(index int, value float64) (updated bool) {
|
||||||
numFields := item.fieldsLen() / 8
|
if item == nil {
|
||||||
if index < numFields {
|
|
||||||
// field exists
|
|
||||||
if getFieldAt(item.data, index) == value {
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
} else {
|
if item.Packed() {
|
||||||
// make room for new field
|
return item.packedSetField(index, value)
|
||||||
oldBytes := item.dataBytes()
|
|
||||||
newData := make([]byte, (index+1)*8+item.idLen())
|
|
||||||
// copy the existing fields
|
|
||||||
copy(newData, oldBytes[:item.fieldsLen()])
|
|
||||||
// copy the id
|
|
||||||
copy(newData[(index+1)*8:], oldBytes[item.fieldsLen():])
|
|
||||||
// update the fields length
|
|
||||||
item.setFieldsLen((index + 1) * 8)
|
|
||||||
// update the raw data
|
|
||||||
item.data = unsafe.Pointer(&newData[0])
|
|
||||||
}
|
}
|
||||||
// set the new field
|
return item.unpackedSetField(index, value)
|
||||||
setFieldAt(item.data, index, value)
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (item *Item) dataBytes() []byte {
|
|
||||||
return *(*[]byte)((unsafe.Pointer)(&reflect.SliceHeader{
|
|
||||||
Data: uintptr(unsafe.Pointer(item.data)),
|
|
||||||
Len: item.fieldsLen() + item.idLen(),
|
|
||||||
Cap: item.fieldsLen() + item.idLen(),
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
func floatsToBytes(f []float64) []byte {
|
|
||||||
return *(*[]byte)((unsafe.Pointer)(&reflect.SliceHeader{
|
|
||||||
Data: ((*reflect.SliceHeader)(unsafe.Pointer(&f))).Data,
|
|
||||||
Len: len(f) * 8,
|
|
||||||
Cap: len(f) * 8,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ForEachField iterates over each field. The count param is the number of
|
// ForEachField iterates over each field. The count param is the number of
|
||||||
|
@ -222,27 +210,11 @@ func (item *Item) ForEachField(count int, iter func(value float64) bool) {
|
||||||
if item == nil {
|
if item == nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
fields := item.fields()
|
if item.Packed() {
|
||||||
var n int
|
item.packedForEachField(count, iter)
|
||||||
if count < 0 {
|
|
||||||
n = len(fields)
|
|
||||||
} else {
|
} else {
|
||||||
n = count
|
item.unpackedForEachField(count, iter)
|
||||||
}
|
}
|
||||||
for i := 0; i < n; i++ {
|
|
||||||
var field float64
|
|
||||||
if i < len(fields) {
|
|
||||||
field = fields[i]
|
|
||||||
}
|
|
||||||
if !iter(field) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Packed returns true when the item's fields are packed
|
|
||||||
func (item *Item) Packed() bool {
|
|
||||||
return item == nil || item.isPacked()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetField returns the value for a field at index.
|
// GetField returns the value for a field at index.
|
||||||
|
@ -254,26 +226,12 @@ func (item *Item) GetField(index int) float64 {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
if item.Packed() {
|
if item.Packed() {
|
||||||
var fvalue float64
|
return item.packedGetField(index)
|
||||||
var idx int
|
|
||||||
item.ForEachField(-1, func(value float64) bool {
|
|
||||||
if idx == index {
|
|
||||||
fvalue = value
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
idx++
|
return item.unpackedGetField(index)
|
||||||
return true
|
|
||||||
})
|
|
||||||
return fvalue
|
|
||||||
}
|
|
||||||
numFields := item.fieldsLen() / 8
|
|
||||||
if index < numFields {
|
|
||||||
return getFieldAt(item.data, index)
|
|
||||||
}
|
|
||||||
return 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// HasFields returns true when item has fields
|
// HasFields returns true when item has fields
|
||||||
func (item *Item) HasFields() bool {
|
func (item *Item) HasFields() bool {
|
||||||
return item != nil && item.fieldsLen() > 0
|
return item != nil && item.fieldsDataSize() > 0
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,23 +10,17 @@ import (
|
||||||
"github.com/tidwall/geojson/geometry"
|
"github.com/tidwall/geojson/geometry"
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
|
||||||
seed := time.Now().UnixNano()
|
|
||||||
println(seed)
|
|
||||||
rand.Seed(seed)
|
|
||||||
}
|
|
||||||
|
|
||||||
func testRandItemHead(t *testing.T, idx int, item *Item) {
|
func testRandItemHead(t *testing.T, idx int, item *Item) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
if idx == 0 {
|
if idx == 0 {
|
||||||
if item.isPoint() {
|
if item.isPoint() {
|
||||||
t.Fatalf("expected false")
|
t.Fatalf("expected false")
|
||||||
}
|
}
|
||||||
if item.fieldsLen() != 0 {
|
if item.fieldsDataSize() != 0 {
|
||||||
t.Fatalf("expected '%v', got '%v'", 0, item.fieldsLen())
|
t.Fatalf("expected '%v', got '%v'", 0, item.fieldsDataSize())
|
||||||
}
|
}
|
||||||
if item.idLen() != 0 {
|
if item.idDataSize() != 0 {
|
||||||
t.Fatalf("expected '%v', got '%v'", 0, item.idLen())
|
t.Fatalf("expected '%v', got '%v'", 0, item.idDataSize())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
isPoint := rand.Int()%2 == 0
|
isPoint := rand.Int()%2 == 0
|
||||||
|
@ -34,17 +28,17 @@ func testRandItemHead(t *testing.T, idx int, item *Item) {
|
||||||
idLen := int(rand.Uint32())
|
idLen := int(rand.Uint32())
|
||||||
|
|
||||||
item.setIsPoint(isPoint)
|
item.setIsPoint(isPoint)
|
||||||
item.setFieldsLen(fieldsLen)
|
item.setFieldsDataSize(fieldsLen)
|
||||||
item.setIDLen(idLen)
|
item.setIDDataSize(idLen)
|
||||||
|
|
||||||
if item.isPoint() != isPoint {
|
if item.isPoint() != isPoint {
|
||||||
t.Fatalf("isPoint: expected '%v', got '%v'", isPoint, item.isPoint())
|
t.Fatalf("isPoint: expected '%v', got '%v'", isPoint, item.isPoint())
|
||||||
}
|
}
|
||||||
if item.fieldsLen() != fieldsLen {
|
if item.fieldsDataSize() != fieldsLen {
|
||||||
t.Fatalf("fieldsLen: expected '%v', got '%v'", fieldsLen, item.fieldsLen())
|
t.Fatalf("fieldsLen: expected '%v', got '%v'", fieldsLen, item.fieldsDataSize())
|
||||||
}
|
}
|
||||||
if item.idLen() != idLen {
|
if item.idDataSize() != idLen {
|
||||||
t.Fatalf("idLen: expected '%v', got '%v'", idLen, item.idLen())
|
t.Fatalf("idLen: expected '%v', got '%v'", idLen, item.idDataSize())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,7 +56,6 @@ func testRandItem(t *testing.T) {
|
||||||
keyb := make([]byte, rand.Int()%16)
|
keyb := make([]byte, rand.Int()%16)
|
||||||
rand.Read(keyb)
|
rand.Read(keyb)
|
||||||
key := string(keyb)
|
key := string(keyb)
|
||||||
|
|
||||||
packed := rand.Int()%2 == 0
|
packed := rand.Int()%2 == 0
|
||||||
values := make([]float64, rand.Int()%64)
|
values := make([]float64, rand.Int()%64)
|
||||||
for i := range values {
|
for i := range values {
|
||||||
|
@ -111,10 +104,10 @@ func testRandItem(t *testing.T) {
|
||||||
}
|
}
|
||||||
for _, i := range setValues {
|
for _, i := range setValues {
|
||||||
if item.GetField(i) != values[i] {
|
if item.GetField(i) != values[i] {
|
||||||
t.Fatalf("expected '%v', got '%v'", values[i], item.GetField(i))
|
t.Fatalf("expected '%v', got '%v' for index %d", values[i], item.GetField(i), i)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fields := item.fields()
|
fields := itemFields(item)
|
||||||
for i := 0; i < len(fields); i++ {
|
for i := 0; i < len(fields); i++ {
|
||||||
for _, j := range setValues {
|
for _, j := range setValues {
|
||||||
if i == j {
|
if i == j {
|
||||||
|
@ -126,8 +119,9 @@ func testRandItem(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
weight, points := item.WeightAndPoints()
|
weight, points := item.WeightAndPoints()
|
||||||
if weight != len(fields)*8+len(key)+points*16 {
|
if weight != item.fieldsDataSize()+len(key)+points*16 {
|
||||||
t.Fatalf("expected '%v', got '%v'", len(fields)*8+len(key)+points*16, weight)
|
t.Fatalf("expected '%v', got '%v'",
|
||||||
|
item.fieldsDataSize()+len(key)+points*16, weight)
|
||||||
}
|
}
|
||||||
if points != 1 {
|
if points != 1 {
|
||||||
t.Fatalf("expected '%v', got '%v'", 1, points)
|
t.Fatalf("expected '%v', got '%v'", 1, points)
|
||||||
|
@ -167,8 +161,11 @@ func testRandItem(t *testing.T) {
|
||||||
fvalues = append(fvalues, value)
|
fvalues = append(fvalues, value)
|
||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
|
for len(fvalues) < len(values) {
|
||||||
|
fvalues = append(fvalues, 0)
|
||||||
|
}
|
||||||
if !floatsEquals(values, fvalues) {
|
if !floatsEquals(values, fvalues) {
|
||||||
t.Fatalf("expected '%v', got '%v'", 1, len(fvalues))
|
t.Fatalf("expected true")
|
||||||
}
|
}
|
||||||
|
|
||||||
// should not fail, must allow nil receiver
|
// should not fail, must allow nil receiver
|
||||||
|
@ -185,25 +182,25 @@ func testRandItem(t *testing.T) {
|
||||||
}
|
}
|
||||||
item.CopyOverFields(values)
|
item.CopyOverFields(values)
|
||||||
weight, points := item.WeightAndPoints()
|
weight, points := item.WeightAndPoints()
|
||||||
if weight != len(values)*8+len(key)+points*16 {
|
if weight != item.fieldsDataSize()+len(key)+points*16 {
|
||||||
t.Fatalf("expected '%v', got '%v'", len(values)*8+len(key)+points*16, weight)
|
t.Fatalf("expected '%v', got '%v'", item.fieldsDataSize()+len(key)+points*16, weight)
|
||||||
}
|
}
|
||||||
if points != 1 {
|
if points != 1 {
|
||||||
t.Fatalf("expected '%v', got '%v'", 1, points)
|
t.Fatalf("expected '%v', got '%v'", 1, points)
|
||||||
}
|
}
|
||||||
if !floatsEquals(item.fields(), values) {
|
if !floatsEquals(itemFields(item), values) {
|
||||||
t.Fatalf("expected '%v', got '%v'", values, item.fields())
|
t.Fatalf("expected '%v', got '%v'", values, itemFields(item))
|
||||||
}
|
}
|
||||||
item.CopyOverFields(item)
|
item.CopyOverFields(item)
|
||||||
weight, points = item.WeightAndPoints()
|
weight, points = item.WeightAndPoints()
|
||||||
if weight != len(values)*8+len(key)+points*16 {
|
if weight != item.fieldsDataSize()+len(key)+points*16 {
|
||||||
t.Fatalf("expected '%v', got '%v'", len(values)*8+len(key)+points*16, weight)
|
t.Fatalf("expected '%v', got '%v'", item.fieldsDataSize()+len(key)+points*16, weight)
|
||||||
}
|
}
|
||||||
if points != 1 {
|
if points != 1 {
|
||||||
t.Fatalf("expected '%v', got '%v'", 1, points)
|
t.Fatalf("expected '%v', got '%v'", 1, points)
|
||||||
}
|
}
|
||||||
if !floatsEquals(item.fields(), values) {
|
if !floatsEquals(itemFields(item), values) {
|
||||||
t.Fatalf("expected '%v', got '%v'", values, item.fields())
|
t.Fatalf("expected '%v', got '%v'", values, itemFields(item))
|
||||||
}
|
}
|
||||||
if len(values) > 0 && !item.HasFields() {
|
if len(values) > 0 && !item.HasFields() {
|
||||||
t.Fatal("expected true")
|
t.Fatal("expected true")
|
||||||
|
@ -217,8 +214,8 @@ func testRandItem(t *testing.T) {
|
||||||
if points != 1 {
|
if points != 1 {
|
||||||
t.Fatalf("expected '%v', got '%v'", 1, points)
|
t.Fatalf("expected '%v', got '%v'", 1, points)
|
||||||
}
|
}
|
||||||
if len(item.fields()) != 0 {
|
if len(itemFields(item)) != 0 {
|
||||||
t.Fatalf("expected '%#v', got '%#v'", 0, len(item.fields()))
|
t.Fatalf("expected '%#v', got '%#v'", 0, len(itemFields(item)))
|
||||||
}
|
}
|
||||||
if item.ID() != key {
|
if item.ID() != key {
|
||||||
t.Fatalf("expected '%v', got '%v'", key, item.ID())
|
t.Fatalf("expected '%v', got '%v'", key, item.ID())
|
||||||
|
@ -229,7 +226,20 @@ func testRandItem(t *testing.T) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func itemFields(item *Item) []float64 {
|
||||||
|
var values []float64
|
||||||
|
item.ForEachField(-1, func(value float64) bool {
|
||||||
|
values = append(values, value)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
return values
|
||||||
|
}
|
||||||
|
|
||||||
func TestItem(t *testing.T) {
|
func TestItem(t *testing.T) {
|
||||||
|
seed := time.Now().UnixNano()
|
||||||
|
seed = 1550371581595971000
|
||||||
|
println("TestItem seed", seed)
|
||||||
|
rand.Seed(seed)
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
for time.Since(start) < time.Second/2 {
|
for time.Since(start) < time.Second/2 {
|
||||||
testRandItem(t)
|
testRandItem(t)
|
||||||
|
|
|
@ -0,0 +1,284 @@
|
||||||
|
package item
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/h2so5/half"
|
||||||
|
)
|
||||||
|
|
||||||
|
// kind bits bytes values min max
|
||||||
|
// --------------------------------------------------------------------
|
||||||
|
// 0 5 1 32 -16 15
|
||||||
|
// 1 13 2 16384 -4095 4095
|
||||||
|
// 2 21 3 2097152 -1048576 1048575
|
||||||
|
// 3 29 4 536870912 -268435456 268435455
|
||||||
|
// 4 16 3 -- standard 16-bit floating point --
|
||||||
|
// 5 32 5 -- standard 32-bit floating point --
|
||||||
|
// 6 64 9 -- standard 64-bit floating point --
|
||||||
|
|
||||||
|
const maxFieldBytes = 9
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxInt5 = 15
|
||||||
|
maxInt13 = 4095
|
||||||
|
maxInt21 = 1048575
|
||||||
|
maxInt29 = 268435455
|
||||||
|
)
|
||||||
|
|
||||||
|
func appendPacked(dst []byte, f64 float64) []byte {
|
||||||
|
if f64 == 0 {
|
||||||
|
return append(dst, 0)
|
||||||
|
}
|
||||||
|
i64 := int64(f64)
|
||||||
|
if f64 == float64(i64) {
|
||||||
|
// whole number
|
||||||
|
var signed byte
|
||||||
|
if i64 < 0 {
|
||||||
|
i64 *= -1
|
||||||
|
signed = 16
|
||||||
|
}
|
||||||
|
if i64 <= maxInt5 {
|
||||||
|
return append(dst, 0<<5|signed|
|
||||||
|
byte(i64))
|
||||||
|
}
|
||||||
|
if i64 <= maxInt13 {
|
||||||
|
return append(dst, 1<<5|signed|
|
||||||
|
byte(i64>>8), byte(i64))
|
||||||
|
}
|
||||||
|
if i64 <= maxInt21 {
|
||||||
|
return append(dst, 2<<5|signed|
|
||||||
|
byte(i64>>16), byte(i64>>8), byte(i64))
|
||||||
|
}
|
||||||
|
if i64 <= maxInt29 {
|
||||||
|
return append(dst, 3<<5|signed|
|
||||||
|
byte(i64>>24), byte(i64>>16), byte(i64>>8), byte(i64))
|
||||||
|
}
|
||||||
|
// fallthrough
|
||||||
|
}
|
||||||
|
f32 := float32(f64)
|
||||||
|
if f64 == float64(f32) {
|
||||||
|
f16 := half.NewFloat16(f32)
|
||||||
|
if f32 == f16.Float32() {
|
||||||
|
dst = append(dst, 4<<5, 0, 0)
|
||||||
|
*(*half.Float16)(unsafe.Pointer(&dst[len(dst)-2])) = f16
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
dst = append(dst, 5<<5, 0, 0, 0, 0)
|
||||||
|
*(*float32)(unsafe.Pointer(&dst[len(dst)-4])) = f32
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
dst = append(dst, 6<<5, 0, 0, 0, 0, 0, 0, 0, 0)
|
||||||
|
*(*float64)(unsafe.Pointer(&dst[len(dst)-8])) = f64
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipPacked(data []byte, count int) (out []byte, read int) {
|
||||||
|
var i int
|
||||||
|
for i < len(data) {
|
||||||
|
if read >= count {
|
||||||
|
return data[i:], read
|
||||||
|
}
|
||||||
|
kind := data[i] >> 5
|
||||||
|
if kind < 4 {
|
||||||
|
i += int(kind) + 1
|
||||||
|
} else if kind == 4 {
|
||||||
|
i += 3
|
||||||
|
} else if kind == 5 {
|
||||||
|
i += 5
|
||||||
|
} else {
|
||||||
|
i += 9
|
||||||
|
}
|
||||||
|
read++
|
||||||
|
}
|
||||||
|
return nil, read
|
||||||
|
}
|
||||||
|
|
||||||
|
func readPacked(data []byte) ([]byte, float64) {
|
||||||
|
if len(data) == 0 {
|
||||||
|
return nil, 0
|
||||||
|
}
|
||||||
|
if data[0] == 0 {
|
||||||
|
return data[1:], 0
|
||||||
|
}
|
||||||
|
kind := data[0] >> 5
|
||||||
|
switch kind {
|
||||||
|
case 0, 1, 2, 3:
|
||||||
|
// whole number
|
||||||
|
var value float64
|
||||||
|
if kind == 0 {
|
||||||
|
value = float64(
|
||||||
|
uint32(data[0] & 0xF),
|
||||||
|
)
|
||||||
|
} else if kind == 1 {
|
||||||
|
value = float64(
|
||||||
|
uint32(data[0]&0xF)<<8 | uint32(data[1]),
|
||||||
|
)
|
||||||
|
} else if kind == 2 {
|
||||||
|
value = float64(
|
||||||
|
uint32(data[0]&0xF)<<16 | uint32(data[1])<<8 |
|
||||||
|
uint32(data[2]),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
value = float64(
|
||||||
|
uint32(data[0]&0xF)<<24 | uint32(data[1])<<16 |
|
||||||
|
uint32(data[2])<<8 | uint32(data[3]),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if data[0]&0x10 != 0 {
|
||||||
|
value *= -1
|
||||||
|
}
|
||||||
|
return data[kind+1:], value
|
||||||
|
case 4:
|
||||||
|
// 16-bit float
|
||||||
|
return data[3:],
|
||||||
|
float64((*half.Float16)(unsafe.Pointer(&data[1])).Float32())
|
||||||
|
case 5:
|
||||||
|
// 32-bit float
|
||||||
|
return data[5:],
|
||||||
|
float64(*(*float32)(unsafe.Pointer(&data[1])))
|
||||||
|
case 6:
|
||||||
|
// 64-bit float
|
||||||
|
return data[9:], *(*float64)(unsafe.Pointer(&data[1]))
|
||||||
|
}
|
||||||
|
panic("invalid data")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) packedGenerateFieldBytes(values []float64) []byte {
|
||||||
|
var dst []byte
|
||||||
|
for i := 0; i < len(values); i++ {
|
||||||
|
dst = appendPacked(dst, values[i])
|
||||||
|
}
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) packedSetField(index int, value float64) (updated bool) {
|
||||||
|
if false {
|
||||||
|
func() {
|
||||||
|
data := item.fieldsBytes()
|
||||||
|
fmt.Printf("%v >> [%x]", value, data)
|
||||||
|
defer func() {
|
||||||
|
data := item.fieldsBytes()
|
||||||
|
fmt.Printf(" >> [%x]\n", data)
|
||||||
|
}()
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
/////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
// original field bytes
|
||||||
|
headBytes := item.fieldsBytes()
|
||||||
|
|
||||||
|
// quickly skip over head fields.
|
||||||
|
// returns the start of the field at index, and the number of valid
|
||||||
|
// fields that were read.
|
||||||
|
fieldBytes, read := skipPacked(headBytes, index)
|
||||||
|
|
||||||
|
// number of empty/blank bytes that need to be added between the
|
||||||
|
// head bytes and the new field bytes.
|
||||||
|
var blankSpace int
|
||||||
|
|
||||||
|
// data a that follows the new field bytes
|
||||||
|
var tailBytes []byte
|
||||||
|
|
||||||
|
if len(fieldBytes) == 0 {
|
||||||
|
// field at index was not found.
|
||||||
|
if value == 0 {
|
||||||
|
// zero value is the default, so we can assume that the fields was
|
||||||
|
// not updated.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// set the blank space
|
||||||
|
blankSpace = index - read
|
||||||
|
fieldBytes = nil
|
||||||
|
} else {
|
||||||
|
// field at index was found.
|
||||||
|
|
||||||
|
// truncate the head bytes to reflect only the bytes up to
|
||||||
|
// the current field.
|
||||||
|
headBytes = headBytes[:len(headBytes)-len(fieldBytes)]
|
||||||
|
|
||||||
|
// read the current value and get the tail data following the
|
||||||
|
// current field.
|
||||||
|
var cvalue float64
|
||||||
|
tailBytes, cvalue = readPacked(fieldBytes)
|
||||||
|
if cvalue == value {
|
||||||
|
// no change to value
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// truncate the field bytes to exactly match current field.
|
||||||
|
fieldBytes = fieldBytes[:len(fieldBytes)-len(tailBytes)]
|
||||||
|
}
|
||||||
|
|
||||||
|
// create the new field bytes
|
||||||
|
{
|
||||||
|
var buf [maxFieldBytes]byte
|
||||||
|
newFieldBytes := appendPacked(buf[:0], value)
|
||||||
|
if len(newFieldBytes) == len(fieldBytes) {
|
||||||
|
// no change in data size, update in place
|
||||||
|
copy(fieldBytes, newFieldBytes)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
// reassign the field bytes
|
||||||
|
fieldBytes = newFieldBytes
|
||||||
|
}
|
||||||
|
|
||||||
|
// hang on to the item id
|
||||||
|
id := item.ID()
|
||||||
|
|
||||||
|
// create a new byte slice
|
||||||
|
// head+blank+field+tail+id
|
||||||
|
nbytes := make([]byte,
|
||||||
|
len(headBytes)+blankSpace+len(fieldBytes)+len(tailBytes)+len(id))
|
||||||
|
|
||||||
|
// fill the data
|
||||||
|
copy(nbytes, headBytes)
|
||||||
|
copy(nbytes[len(headBytes)+blankSpace:], fieldBytes)
|
||||||
|
copy(nbytes[len(headBytes)+blankSpace+len(fieldBytes):], tailBytes)
|
||||||
|
copy(nbytes[len(headBytes)+blankSpace+len(fieldBytes)+len(tailBytes):], id)
|
||||||
|
|
||||||
|
// update the field size
|
||||||
|
item.setFieldsDataSize(len(nbytes) - len(id))
|
||||||
|
|
||||||
|
// update the data pointer
|
||||||
|
item.data = unsafe.Pointer(&nbytes[0])
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) packedForEachField(count int, iter func(value float64) bool) {
|
||||||
|
data := item.fieldsBytes()
|
||||||
|
if count < 0 {
|
||||||
|
// iterate over of the known the values
|
||||||
|
for len(data) > 0 {
|
||||||
|
var value float64
|
||||||
|
data, value = readPacked(data)
|
||||||
|
if !iter(value) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
var value float64
|
||||||
|
data, value = readPacked(data)
|
||||||
|
if !iter(value) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) packedGetField(index int) float64 {
|
||||||
|
|
||||||
|
var idx int
|
||||||
|
var fvalue float64
|
||||||
|
item.packedForEachField(-1, func(value float64) bool {
|
||||||
|
if idx == index {
|
||||||
|
fvalue = value
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
idx++
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
return fvalue
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
package item
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/rand"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestPacked(t *testing.T) {
|
||||||
|
start := time.Now()
|
||||||
|
for time.Since(start) < time.Second/2 {
|
||||||
|
testPacked(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func testPacked(t *testing.T) {
|
||||||
|
n := rand.Int() % 1024
|
||||||
|
if n%2 == 1 {
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
values := make([]float64, n)
|
||||||
|
for i := 0; i < len(values); i++ {
|
||||||
|
switch rand.Int() % 9 {
|
||||||
|
case 0:
|
||||||
|
values[i] = 0
|
||||||
|
case 1:
|
||||||
|
values[i] = float64((rand.Int() % 32) - 32/2)
|
||||||
|
case 2:
|
||||||
|
values[i] = float64((rand.Int() % 128))
|
||||||
|
case 3:
|
||||||
|
values[i] = float64((rand.Int() % 8191) - 8191/2)
|
||||||
|
case 4:
|
||||||
|
values[i] = float64((rand.Int() % 2097152) - 2097152/2)
|
||||||
|
case 5:
|
||||||
|
values[i] = float64((rand.Int() % 536870912) - 536870912/2)
|
||||||
|
case 6:
|
||||||
|
values[i] = float64(rand.Int() % 500)
|
||||||
|
switch rand.Int() % 4 {
|
||||||
|
case 1:
|
||||||
|
values[i] = 0.25
|
||||||
|
case 2:
|
||||||
|
values[i] = 0.50
|
||||||
|
case 3:
|
||||||
|
values[i] = 0.75
|
||||||
|
}
|
||||||
|
case 7:
|
||||||
|
values[i] = float64(rand.Float32())
|
||||||
|
case 8:
|
||||||
|
values[i] = rand.Float64()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var dst []byte
|
||||||
|
for i := 0; i < len(values); i++ {
|
||||||
|
dst = appendPacked(dst, values[i])
|
||||||
|
}
|
||||||
|
data := dst
|
||||||
|
var pvalues []float64
|
||||||
|
for {
|
||||||
|
var value float64
|
||||||
|
data, value = readPacked(data)
|
||||||
|
if data == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
pvalues = append(pvalues, value)
|
||||||
|
}
|
||||||
|
if !floatsEquals(values, pvalues) {
|
||||||
|
if len(values) != len(pvalues) {
|
||||||
|
t.Fatalf("sizes not equal")
|
||||||
|
}
|
||||||
|
for i := 0; i < len(values); i++ {
|
||||||
|
if values[i] != pvalues[i] {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", values[i], pvalues[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
data = dst
|
||||||
|
var read int
|
||||||
|
|
||||||
|
data, read = skipPacked(data, len(values)/2)
|
||||||
|
if read != len(values)/2 {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", len(values)/2, read)
|
||||||
|
}
|
||||||
|
data, read = skipPacked(data, len(values)/2)
|
||||||
|
if read != len(values)/2 {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", len(values)/2, read)
|
||||||
|
}
|
||||||
|
if len(data) != 0 {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", 0, len(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// func TestPackedItem(t *testing.T) {
|
||||||
|
// item := New("hello", nil, true)
|
||||||
|
// values := []float64{0, 1, 1, 0, 0, 1, 1, 0, 1} //, 1} //, 1, 0, 1, 0, 0, 1}
|
||||||
|
// fmt.Println(values)
|
||||||
|
// for i := 0; i < len(values); i++ {
|
||||||
|
// item.SetField(i, values[i])
|
||||||
|
// }
|
||||||
|
// fmt.Print("[")
|
||||||
|
// for j := 0; j < len(values); j++ {
|
||||||
|
// if j > 0 {
|
||||||
|
// print(" ")
|
||||||
|
// }
|
||||||
|
// fmt.Print(item.GetField(j))
|
||||||
|
// }
|
||||||
|
// print("]")
|
||||||
|
// println(item.ID())
|
||||||
|
|
||||||
|
// // for i := 0; i < len(values); i++ {
|
||||||
|
|
||||||
|
// // fmt.Println(values[i], item.GetField(i))
|
||||||
|
// // }
|
||||||
|
|
||||||
|
// // fmt.Println(item.GetField(0))
|
||||||
|
// // println(">>", item.ID())
|
||||||
|
|
||||||
|
// }
|
|
@ -0,0 +1,95 @@
|
||||||
|
package item
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getFieldAt(data unsafe.Pointer, index int) float64 {
|
||||||
|
return *(*float64)(unsafe.Pointer(uintptr(data) + uintptr(index*8)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func setFieldAt(data unsafe.Pointer, index int, value float64) {
|
||||||
|
*(*float64)(unsafe.Pointer(uintptr(data) + uintptr(index*8))) = value
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) dataBytes() []byte {
|
||||||
|
return *(*[]byte)((unsafe.Pointer)(&reflect.SliceHeader{
|
||||||
|
Data: uintptr(unsafe.Pointer(item.data)),
|
||||||
|
Len: item.fieldsDataSize() + item.idDataSize(),
|
||||||
|
Cap: item.fieldsDataSize() + item.idDataSize(),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func bytesToFloats(f []byte) []float64 {
|
||||||
|
return *(*[]float64)((unsafe.Pointer)(&reflect.SliceHeader{
|
||||||
|
Data: ((*reflect.SliceHeader)(unsafe.Pointer(&f))).Data,
|
||||||
|
Len: len(f) / 8,
|
||||||
|
Cap: len(f) / 8,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) unpackedGenerateFieldBytes(values []float64) []byte {
|
||||||
|
return *(*[]byte)((unsafe.Pointer)(&reflect.SliceHeader{
|
||||||
|
Data: ((*reflect.SliceHeader)(unsafe.Pointer(&values))).Data,
|
||||||
|
Len: len(values) * 8,
|
||||||
|
Cap: len(values) * 8,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) unpackedSetField(index int, value float64) (updated bool) {
|
||||||
|
numFields := item.fieldsDataSize() / 8
|
||||||
|
if index < numFields {
|
||||||
|
// field exists
|
||||||
|
if getFieldAt(item.data, index) == value {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
} else if value == 0 {
|
||||||
|
return false
|
||||||
|
} else {
|
||||||
|
|
||||||
|
// make room for new field
|
||||||
|
oldBytes := item.dataBytes()
|
||||||
|
newData := make([]byte, (index+1)*8+item.idDataSize())
|
||||||
|
// copy the existing fields
|
||||||
|
copy(newData, oldBytes[:item.fieldsDataSize()])
|
||||||
|
// copy the id
|
||||||
|
copy(newData[(index+1)*8:], oldBytes[item.fieldsDataSize():])
|
||||||
|
// update the fields length
|
||||||
|
item.setFieldsDataSize((index + 1) * 8)
|
||||||
|
// update the raw data
|
||||||
|
item.data = unsafe.Pointer(&newData[0])
|
||||||
|
}
|
||||||
|
// set the new field
|
||||||
|
setFieldAt(item.data, index, value)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) unpackedForEachField(
|
||||||
|
count int, iter func(value float64) bool,
|
||||||
|
) {
|
||||||
|
fields := bytesToFloats(item.fieldsBytes())
|
||||||
|
var n int
|
||||||
|
if count < 0 {
|
||||||
|
n = len(fields)
|
||||||
|
} else {
|
||||||
|
n = count
|
||||||
|
}
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
var field float64
|
||||||
|
if i < len(fields) {
|
||||||
|
field = fields[i]
|
||||||
|
}
|
||||||
|
if !iter(field) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (item *Item) unpackedGetField(index int) float64 {
|
||||||
|
numFields := item.fieldsDataSize() / 8
|
||||||
|
if index < numFields {
|
||||||
|
return getFieldAt(item.data, index)
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
|
@ -46,7 +46,7 @@ func randPoints(N int) []*item.Item {
|
||||||
box.min[j] = rand.Float64()
|
box.min[j] = rand.Float64()
|
||||||
}
|
}
|
||||||
box.max = box.min
|
box.max = box.min
|
||||||
boxes[i] = item.New(fmt.Sprintf("%d", i), box)
|
boxes[i] = item.New(fmt.Sprintf("%d", i), box, false)
|
||||||
}
|
}
|
||||||
return boxes
|
return boxes
|
||||||
}
|
}
|
||||||
|
@ -68,7 +68,7 @@ func randBoxes(N int) []*item.Item {
|
||||||
if box.max[0] > 180 || box.max[1] > 90 {
|
if box.max[0] > 180 || box.max[1] > 90 {
|
||||||
i--
|
i--
|
||||||
}
|
}
|
||||||
boxes[i] = item.New(fmt.Sprintf("%d", i), box)
|
boxes[i] = item.New(fmt.Sprintf("%d", i), box, false)
|
||||||
}
|
}
|
||||||
return boxes
|
return boxes
|
||||||
}
|
}
|
||||||
|
@ -264,7 +264,7 @@ func testBoxesVarious(t *testing.T, items []*item.Item, label string) {
|
||||||
nbox.max[j] = box.max[j] + (rand.Float64() - 0.5)
|
nbox.max[j] = box.max[j] + (rand.Float64() - 0.5)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
nboxes[i] = item.New(fmt.Sprintf("%d", i), nbox)
|
nboxes[i] = item.New(fmt.Sprintf("%d", i), nbox, false)
|
||||||
}
|
}
|
||||||
for i := 0; i < N; i++ {
|
for i := 0; i < N; i++ {
|
||||||
tr.Insert(boxMin(nboxes[i]), boxMax(nboxes[i]), nboxes[i])
|
tr.Insert(boxMin(nboxes[i]), boxMax(nboxes[i]), nboxes[i])
|
||||||
|
|
|
@ -0,0 +1,22 @@
|
||||||
|
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||||
|
*.o
|
||||||
|
*.a
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Folders
|
||||||
|
_obj
|
||||||
|
_test
|
||||||
|
|
||||||
|
# Architecture specific extensions/prefixes
|
||||||
|
*.[568vq]
|
||||||
|
[568vq].out
|
||||||
|
|
||||||
|
*.cgo1.go
|
||||||
|
*.cgo2.c
|
||||||
|
_cgo_defun.c
|
||||||
|
_cgo_gotypes.go
|
||||||
|
_cgo_export.*
|
||||||
|
|
||||||
|
_testmain.go
|
||||||
|
|
||||||
|
*.exe
|
|
@ -0,0 +1,121 @@
|
||||||
|
Creative Commons Legal Code
|
||||||
|
|
||||||
|
CC0 1.0 Universal
|
||||||
|
|
||||||
|
CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE
|
||||||
|
LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN
|
||||||
|
ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS
|
||||||
|
INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES
|
||||||
|
REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS
|
||||||
|
PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM
|
||||||
|
THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED
|
||||||
|
HEREUNDER.
|
||||||
|
|
||||||
|
Statement of Purpose
|
||||||
|
|
||||||
|
The laws of most jurisdictions throughout the world automatically confer
|
||||||
|
exclusive Copyright and Related Rights (defined below) upon the creator
|
||||||
|
and subsequent owner(s) (each and all, an "owner") of an original work of
|
||||||
|
authorship and/or a database (each, a "Work").
|
||||||
|
|
||||||
|
Certain owners wish to permanently relinquish those rights to a Work for
|
||||||
|
the purpose of contributing to a commons of creative, cultural and
|
||||||
|
scientific works ("Commons") that the public can reliably and without fear
|
||||||
|
of later claims of infringement build upon, modify, incorporate in other
|
||||||
|
works, reuse and redistribute as freely as possible in any form whatsoever
|
||||||
|
and for any purposes, including without limitation commercial purposes.
|
||||||
|
These owners may contribute to the Commons to promote the ideal of a free
|
||||||
|
culture and the further production of creative, cultural and scientific
|
||||||
|
works, or to gain reputation or greater distribution for their Work in
|
||||||
|
part through the use and efforts of others.
|
||||||
|
|
||||||
|
For these and/or other purposes and motivations, and without any
|
||||||
|
expectation of additional consideration or compensation, the person
|
||||||
|
associating CC0 with a Work (the "Affirmer"), to the extent that he or she
|
||||||
|
is an owner of Copyright and Related Rights in the Work, voluntarily
|
||||||
|
elects to apply CC0 to the Work and publicly distribute the Work under its
|
||||||
|
terms, with knowledge of his or her Copyright and Related Rights in the
|
||||||
|
Work and the meaning and intended legal effect of CC0 on those rights.
|
||||||
|
|
||||||
|
1. Copyright and Related Rights. A Work made available under CC0 may be
|
||||||
|
protected by copyright and related or neighboring rights ("Copyright and
|
||||||
|
Related Rights"). Copyright and Related Rights include, but are not
|
||||||
|
limited to, the following:
|
||||||
|
|
||||||
|
i. the right to reproduce, adapt, distribute, perform, display,
|
||||||
|
communicate, and translate a Work;
|
||||||
|
ii. moral rights retained by the original author(s) and/or performer(s);
|
||||||
|
iii. publicity and privacy rights pertaining to a person's image or
|
||||||
|
likeness depicted in a Work;
|
||||||
|
iv. rights protecting against unfair competition in regards to a Work,
|
||||||
|
subject to the limitations in paragraph 4(a), below;
|
||||||
|
v. rights protecting the extraction, dissemination, use and reuse of data
|
||||||
|
in a Work;
|
||||||
|
vi. database rights (such as those arising under Directive 96/9/EC of the
|
||||||
|
European Parliament and of the Council of 11 March 1996 on the legal
|
||||||
|
protection of databases, and under any national implementation
|
||||||
|
thereof, including any amended or successor version of such
|
||||||
|
directive); and
|
||||||
|
vii. other similar, equivalent or corresponding rights throughout the
|
||||||
|
world based on applicable law or treaty, and any national
|
||||||
|
implementations thereof.
|
||||||
|
|
||||||
|
2. Waiver. To the greatest extent permitted by, but not in contravention
|
||||||
|
of, applicable law, Affirmer hereby overtly, fully, permanently,
|
||||||
|
irrevocably and unconditionally waives, abandons, and surrenders all of
|
||||||
|
Affirmer's Copyright and Related Rights and associated claims and causes
|
||||||
|
of action, whether now known or unknown (including existing as well as
|
||||||
|
future claims and causes of action), in the Work (i) in all territories
|
||||||
|
worldwide, (ii) for the maximum duration provided by applicable law or
|
||||||
|
treaty (including future time extensions), (iii) in any current or future
|
||||||
|
medium and for any number of copies, and (iv) for any purpose whatsoever,
|
||||||
|
including without limitation commercial, advertising or promotional
|
||||||
|
purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
|
||||||
|
member of the public at large and to the detriment of Affirmer's heirs and
|
||||||
|
successors, fully intending that such Waiver shall not be subject to
|
||||||
|
revocation, rescission, cancellation, termination, or any other legal or
|
||||||
|
equitable action to disrupt the quiet enjoyment of the Work by the public
|
||||||
|
as contemplated by Affirmer's express Statement of Purpose.
|
||||||
|
|
||||||
|
3. Public License Fallback. Should any part of the Waiver for any reason
|
||||||
|
be judged legally invalid or ineffective under applicable law, then the
|
||||||
|
Waiver shall be preserved to the maximum extent permitted taking into
|
||||||
|
account Affirmer's express Statement of Purpose. In addition, to the
|
||||||
|
extent the Waiver is so judged Affirmer hereby grants to each affected
|
||||||
|
person a royalty-free, non transferable, non sublicensable, non exclusive,
|
||||||
|
irrevocable and unconditional license to exercise Affirmer's Copyright and
|
||||||
|
Related Rights in the Work (i) in all territories worldwide, (ii) for the
|
||||||
|
maximum duration provided by applicable law or treaty (including future
|
||||||
|
time extensions), (iii) in any current or future medium and for any number
|
||||||
|
of copies, and (iv) for any purpose whatsoever, including without
|
||||||
|
limitation commercial, advertising or promotional purposes (the
|
||||||
|
"License"). The License shall be deemed effective as of the date CC0 was
|
||||||
|
applied by Affirmer to the Work. Should any part of the License for any
|
||||||
|
reason be judged legally invalid or ineffective under applicable law, such
|
||||||
|
partial invalidity or ineffectiveness shall not invalidate the remainder
|
||||||
|
of the License, and in such case Affirmer hereby affirms that he or she
|
||||||
|
will not (i) exercise any of his or her remaining Copyright and Related
|
||||||
|
Rights in the Work or (ii) assert any associated claims and causes of
|
||||||
|
action with respect to the Work, in either case contrary to Affirmer's
|
||||||
|
express Statement of Purpose.
|
||||||
|
|
||||||
|
4. Limitations and Disclaimers.
|
||||||
|
|
||||||
|
a. No trademark or patent rights held by Affirmer are waived, abandoned,
|
||||||
|
surrendered, licensed or otherwise affected by this document.
|
||||||
|
b. Affirmer offers the Work as-is and makes no representations or
|
||||||
|
warranties of any kind concerning the Work, express, implied,
|
||||||
|
statutory or otherwise, including without limitation warranties of
|
||||||
|
title, merchantability, fitness for a particular purpose, non
|
||||||
|
infringement, or the absence of latent or other defects, accuracy, or
|
||||||
|
the present or absence of errors, whether or not discoverable, all to
|
||||||
|
the greatest extent permissible under applicable law.
|
||||||
|
c. Affirmer disclaims responsibility for clearing rights of other persons
|
||||||
|
that may apply to the Work or any use thereof, including without
|
||||||
|
limitation any person's Copyright and Related Rights in the Work.
|
||||||
|
Further, Affirmer disclaims responsibility for obtaining any necessary
|
||||||
|
consents, permissions or other rights required for any use of the
|
||||||
|
Work.
|
||||||
|
d. Affirmer understands and acknowledges that Creative Commons is not a
|
||||||
|
party to this document and has no duty or obligation with respect to
|
||||||
|
this CC0 or use of the Work.
|
|
@ -0,0 +1,6 @@
|
||||||
|
half
|
||||||
|
==========
|
||||||
|
|
||||||
|
IEEE 754 binary16 half precision format for Go
|
||||||
|
|
||||||
|
[![GoDoc](https://godoc.org/github.com/h2so5/half?status.svg)](https://godoc.org/github.com/h2so5/half)
|
|
@ -0,0 +1,59 @@
|
||||||
|
/*
|
||||||
|
|
||||||
|
go-float16 - IEEE 754 binary16 half precision format
|
||||||
|
Written in 2013 by h2so5 <mail@h2so5.net>
|
||||||
|
|
||||||
|
To the extent possible under law, the author(s) have dedicated all copyright and
|
||||||
|
related and neighboring rights to this software to the public domain worldwide.
|
||||||
|
This software is distributed without any warranty.
|
||||||
|
You should have received a copy of the CC0 Public Domain Dedication along with this software.
|
||||||
|
If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Package half is an IEEE 754 binary16 half precision format.
|
||||||
|
package half
|
||||||
|
|
||||||
|
import "math"
|
||||||
|
|
||||||
|
// A Float16 represents a 16-bit floating point number.
|
||||||
|
type Float16 uint16
|
||||||
|
|
||||||
|
// NewFloat16 allocates and returns a new Float16 set to f.
|
||||||
|
func NewFloat16(f float32) Float16 {
|
||||||
|
i := math.Float32bits(f)
|
||||||
|
sign := uint16((i >> 31) & 0x1)
|
||||||
|
exp := (i >> 23) & 0xff
|
||||||
|
exp16 := int16(exp) - 127 + 15
|
||||||
|
frac := uint16(i>>13) & 0x3ff
|
||||||
|
if exp == 0 {
|
||||||
|
exp16 = 0
|
||||||
|
} else if exp == 0xff {
|
||||||
|
exp16 = 0x1f
|
||||||
|
} else {
|
||||||
|
if exp16 > 0x1e {
|
||||||
|
exp16 = 0x1f
|
||||||
|
frac = 0
|
||||||
|
} else if exp16 < 0x01 {
|
||||||
|
exp16 = 0
|
||||||
|
frac = 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f16 := (sign << 15) | uint16(exp16<<10) | frac
|
||||||
|
return Float16(f16)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Float32 returns the float32 representation of f.
|
||||||
|
func (f Float16) Float32() float32 {
|
||||||
|
sign := uint32((f >> 15) & 0x1)
|
||||||
|
exp := (f >> 10) & 0x1f
|
||||||
|
exp32 := uint32(exp) + 127 - 15
|
||||||
|
if exp == 0 {
|
||||||
|
exp32 = 0
|
||||||
|
} else if exp == 0x1f {
|
||||||
|
exp32 = 0xff
|
||||||
|
}
|
||||||
|
frac := uint32(f & 0x3ff)
|
||||||
|
i := (sign << 31) | (exp32 << 23) | (frac << 13)
|
||||||
|
return math.Float32frombits(i)
|
||||||
|
}
|
|
@ -0,0 +1,55 @@
|
||||||
|
/*
|
||||||
|
|
||||||
|
go-float16 - IEEE 754 binary16 half precision format
|
||||||
|
Written in 2013 by h2so5 <mail@h2so5.net>
|
||||||
|
|
||||||
|
To the extent possible under law, the author(s) have dedicated all copyright and
|
||||||
|
related and neighboring rights to this software to the public domain worldwide.
|
||||||
|
This software is distributed without any warranty.
|
||||||
|
You should have received a copy of the CC0 Public Domain Dedication along with this software.
|
||||||
|
If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
package half
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getFloatTable() map[Float16]float32 {
|
||||||
|
table := map[Float16]float32{
|
||||||
|
0x3c00: 1,
|
||||||
|
0x4000: 2,
|
||||||
|
0xc000: -2,
|
||||||
|
0x7bfe: 65472,
|
||||||
|
0x7bff: 65504,
|
||||||
|
0xfbff: -65504,
|
||||||
|
0x0000: 0,
|
||||||
|
0x8000: float32(math.Copysign(0, -1)),
|
||||||
|
0x7c00: float32(math.Inf(1)),
|
||||||
|
0xfc00: float32(math.Inf(-1)),
|
||||||
|
0x5b8f: 241.875,
|
||||||
|
0x48c8: 9.5625,
|
||||||
|
}
|
||||||
|
return table
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFloat32(t *testing.T) {
|
||||||
|
for k, v := range getFloatTable() {
|
||||||
|
f := k.Float32()
|
||||||
|
if f != v {
|
||||||
|
t.Errorf("ToFloat32(%d) = %f, want %f.", k, f, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewFloat16(t *testing.T) {
|
||||||
|
for k, v := range getFloatTable() {
|
||||||
|
i := NewFloat16(v)
|
||||||
|
if i != k {
|
||||||
|
t.Errorf("FromFloat32(%f) = %d, want %d.", v, i, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -88,43 +88,14 @@ The dot and wildcard characters can be escaped with '\\'.
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also query an array for the first match by using `#[...]`, or find all matches with `#[...]#`.
|
You can also query an array for the first match by using `#[...]`, or find all matches with `#[...]#`.
|
||||||
Queries support the `==`, `!=`, `<`, `<=`, `>`, `>=` comparison operators and the simple pattern matching `%` operator.
|
Queries support the `==`, `!=`, `<`, `<=`, `>`, `>=` comparison operators and the simple pattern matching `%` (like) and `!%` (not like) operators.
|
||||||
|
|
||||||
```
|
```
|
||||||
friends.#[last=="Murphy"].first >> "Dale"
|
friends.#[last=="Murphy"].first >> "Dale"
|
||||||
friends.#[last=="Murphy"]#.first >> ["Dale","Jane"]
|
friends.#[last=="Murphy"]#.first >> ["Dale","Jane"]
|
||||||
friends.#[age>45]#.last >> ["Craig","Murphy"]
|
friends.#[age>45]#.last >> ["Craig","Murphy"]
|
||||||
friends.#[first%"D*"].last >> "Murphy"
|
friends.#[first%"D*"].last >> "Murphy"
|
||||||
```
|
friends.#[first!%"D*"].last >> "Craig"
|
||||||
|
|
||||||
## JSON Lines
|
|
||||||
|
|
||||||
There's support for [JSON Lines](http://jsonlines.org/) using the `..` prefix, which treats a multilined document as an array.
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
```
|
|
||||||
{"name": "Gilbert", "age": 61}
|
|
||||||
{"name": "Alexa", "age": 34}
|
|
||||||
{"name": "May", "age": 57}
|
|
||||||
{"name": "Deloise", "age": 44}
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
..# >> 4
|
|
||||||
..1 >> {"name": "Alexa", "age": 34}
|
|
||||||
..3 >> {"name": "Deloise", "age": 44}
|
|
||||||
..#.name >> ["Gilbert","Alexa","May","Deloise"]
|
|
||||||
..#[name="May"].age >> 57
|
|
||||||
```
|
|
||||||
|
|
||||||
The `ForEachLines` function will iterate through JSON lines.
|
|
||||||
|
|
||||||
```go
|
|
||||||
gjson.ForEachLine(json, func(line gjson.Result) bool{
|
|
||||||
println(line.String())
|
|
||||||
return true
|
|
||||||
})
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Result Type
|
## Result Type
|
||||||
|
@ -193,6 +164,114 @@ result.Int() int64 // -9223372036854775808 to 9223372036854775807
|
||||||
result.Uint() int64 // 0 to 18446744073709551615
|
result.Uint() int64 // 0 to 18446744073709551615
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Modifiers and path chaining
|
||||||
|
|
||||||
|
New in version 1.2 is support for modifier functions and path chaining.
|
||||||
|
|
||||||
|
A modifier is a path component that performs custom processing on the
|
||||||
|
json.
|
||||||
|
|
||||||
|
Multiple paths can be "chained" together using the pipe character.
|
||||||
|
This is useful for getting results from a modified query.
|
||||||
|
|
||||||
|
For example, using the built-in `@reverse` modifier on the above json document,
|
||||||
|
we'll get `children` array and reverse the order:
|
||||||
|
|
||||||
|
```
|
||||||
|
"children|@reverse" >> ["Jack","Alex","Sara"]
|
||||||
|
"children|@reverse|#" >> "Jack"
|
||||||
|
```
|
||||||
|
|
||||||
|
There are currently three built-in modifiers:
|
||||||
|
|
||||||
|
- `@reverse`: Reverse an array or the members of an object.
|
||||||
|
- `@ugly`: Remove all whitespace from a json document.
|
||||||
|
- `@pretty`: Make the json document more human readable.
|
||||||
|
|
||||||
|
### Modifier arguments
|
||||||
|
|
||||||
|
A modifier may accept an optional argument. The argument can be a valid JSON
|
||||||
|
document or just characters.
|
||||||
|
|
||||||
|
For example, the `@pretty` modifier takes a json object as its argument.
|
||||||
|
|
||||||
|
```
|
||||||
|
@pretty:{"sortKeys":true}
|
||||||
|
```
|
||||||
|
|
||||||
|
Which makes the json pretty and orders all of its keys.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"age":37,
|
||||||
|
"children": ["Sara","Alex","Jack"],
|
||||||
|
"fav.movie": "Deer Hunter",
|
||||||
|
"friends": [
|
||||||
|
{"age": 44, "first": "Dale", "last": "Murphy"},
|
||||||
|
{"age": 68, "first": "Roger", "last": "Craig"},
|
||||||
|
{"age": 47, "first": "Jane", "last": "Murphy"}
|
||||||
|
],
|
||||||
|
"name": {"first": "Tom", "last": "Anderson"}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
*The full list of `@pretty` options are `sortKeys`, `indent`, `prefix`, and `width`.
|
||||||
|
Please see [Pretty Options](https://github.com/tidwall/pretty#customized-output) for more information.*
|
||||||
|
|
||||||
|
### Custom modifiers
|
||||||
|
|
||||||
|
You can also add custom modifiers.
|
||||||
|
|
||||||
|
For example, here we create a modifier that makes the entire json document upper
|
||||||
|
or lower case.
|
||||||
|
|
||||||
|
```go
|
||||||
|
gjson.AddModifier("case", func(json, arg string) string {
|
||||||
|
if arg == "upper" {
|
||||||
|
return strings.ToUpper(json)
|
||||||
|
}
|
||||||
|
if arg == "lower" {
|
||||||
|
return strings.ToLower(json)
|
||||||
|
}
|
||||||
|
return json
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
"children|@case:upper" >> ["SARA","ALEX","JACK"]
|
||||||
|
"children|@case:lower|@reverse" >> ["jack","alex","sara"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## JSON Lines
|
||||||
|
|
||||||
|
There's support for [JSON Lines](http://jsonlines.org/) using the `..` prefix, which treats a multilined document as an array.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```
|
||||||
|
{"name": "Gilbert", "age": 61}
|
||||||
|
{"name": "Alexa", "age": 34}
|
||||||
|
{"name": "May", "age": 57}
|
||||||
|
{"name": "Deloise", "age": 44}
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
..# >> 4
|
||||||
|
..1 >> {"name": "Alexa", "age": 34}
|
||||||
|
..3 >> {"name": "Deloise", "age": 44}
|
||||||
|
..#.name >> ["Gilbert","Alexa","May","Deloise"]
|
||||||
|
..#[name="May"].age >> 57
|
||||||
|
```
|
||||||
|
|
||||||
|
The `ForEachLines` function will iterate through JSON lines.
|
||||||
|
|
||||||
|
```go
|
||||||
|
gjson.ForEachLine(json, func(line gjson.Result) bool{
|
||||||
|
println(line.String())
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
## Get nested array values
|
## Get nested array values
|
||||||
|
|
||||||
Suppose you want all the last names from the following json:
|
Suppose you want all the last names from the following json:
|
||||||
|
|
|
@ -15,6 +15,7 @@ import (
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/tidwall/match"
|
"github.com/tidwall/match"
|
||||||
|
"github.com/tidwall/pretty"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Type is Result type
|
// Type is Result type
|
||||||
|
@ -695,6 +696,8 @@ func parseLiteral(json string, i int) (int, string) {
|
||||||
type arrayPathResult struct {
|
type arrayPathResult struct {
|
||||||
part string
|
part string
|
||||||
path string
|
path string
|
||||||
|
pipe string
|
||||||
|
piped bool
|
||||||
more bool
|
more bool
|
||||||
alogok bool
|
alogok bool
|
||||||
arrch bool
|
arrch bool
|
||||||
|
@ -710,6 +713,14 @@ type arrayPathResult struct {
|
||||||
|
|
||||||
func parseArrayPath(path string) (r arrayPathResult) {
|
func parseArrayPath(path string) (r arrayPathResult) {
|
||||||
for i := 0; i < len(path); i++ {
|
for i := 0; i < len(path); i++ {
|
||||||
|
if !DisableChaining {
|
||||||
|
if path[i] == '|' {
|
||||||
|
r.part = path[:i]
|
||||||
|
r.pipe = path[i+1:]
|
||||||
|
r.piped = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
if path[i] == '.' {
|
if path[i] == '.' {
|
||||||
r.part = path[:i]
|
r.part = path[:i]
|
||||||
r.path = path[i+1:]
|
r.path = path[i+1:]
|
||||||
|
@ -755,7 +766,7 @@ func parseArrayPath(path string) (r arrayPathResult) {
|
||||||
if i < len(path) {
|
if i < len(path) {
|
||||||
s = i
|
s = i
|
||||||
if path[i] == '!' {
|
if path[i] == '!' {
|
||||||
if i < len(path)-1 && path[i+1] == '=' {
|
if i < len(path)-1 && (path[i+1] == '=' || path[i+1] == '%') {
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
} else if path[i] == '<' || path[i] == '>' {
|
} else if path[i] == '<' || path[i] == '>' {
|
||||||
|
@ -828,15 +839,28 @@ func parseArrayPath(path string) (r arrayPathResult) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DisableChaining will disable the chaining (pipe) syntax
|
||||||
|
var DisableChaining = false
|
||||||
|
|
||||||
type objectPathResult struct {
|
type objectPathResult struct {
|
||||||
part string
|
part string
|
||||||
path string
|
path string
|
||||||
|
pipe string
|
||||||
|
piped bool
|
||||||
wild bool
|
wild bool
|
||||||
more bool
|
more bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseObjectPath(path string) (r objectPathResult) {
|
func parseObjectPath(path string) (r objectPathResult) {
|
||||||
for i := 0; i < len(path); i++ {
|
for i := 0; i < len(path); i++ {
|
||||||
|
if !DisableChaining {
|
||||||
|
if path[i] == '|' {
|
||||||
|
r.part = path[:i]
|
||||||
|
r.pipe = path[i+1:]
|
||||||
|
r.piped = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
if path[i] == '.' {
|
if path[i] == '.' {
|
||||||
r.part = path[:i]
|
r.part = path[:i]
|
||||||
r.path = path[i+1:]
|
r.path = path[i+1:]
|
||||||
|
@ -934,6 +958,10 @@ func parseObject(c *parseContext, i int, path string) (int, bool) {
|
||||||
var pmatch, kesc, vesc, ok, hit bool
|
var pmatch, kesc, vesc, ok, hit bool
|
||||||
var key, val string
|
var key, val string
|
||||||
rp := parseObjectPath(path)
|
rp := parseObjectPath(path)
|
||||||
|
if !rp.more && rp.piped {
|
||||||
|
c.pipe = rp.pipe
|
||||||
|
c.piped = true
|
||||||
|
}
|
||||||
for i < len(c.json) {
|
for i < len(c.json) {
|
||||||
for ; i < len(c.json); i++ {
|
for ; i < len(c.json); i++ {
|
||||||
if c.json[i] == '"' {
|
if c.json[i] == '"' {
|
||||||
|
@ -1099,6 +1127,8 @@ func queryMatches(rp *arrayPathResult, value Result) bool {
|
||||||
return value.Str >= rpv
|
return value.Str >= rpv
|
||||||
case "%":
|
case "%":
|
||||||
return match.Match(value.Str, rpv)
|
return match.Match(value.Str, rpv)
|
||||||
|
case "!%":
|
||||||
|
return !match.Match(value.Str, rpv)
|
||||||
}
|
}
|
||||||
case Number:
|
case Number:
|
||||||
rpvn, _ := strconv.ParseFloat(rpv, 64)
|
rpvn, _ := strconv.ParseFloat(rpv, 64)
|
||||||
|
@ -1157,6 +1187,10 @@ func parseArray(c *parseContext, i int, path string) (int, bool) {
|
||||||
partidx = int(n)
|
partidx = int(n)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if !rp.more && rp.piped {
|
||||||
|
c.pipe = rp.pipe
|
||||||
|
c.piped = true
|
||||||
|
}
|
||||||
for i < len(c.json)+1 {
|
for i < len(c.json)+1 {
|
||||||
if !rp.arrch {
|
if !rp.arrch {
|
||||||
pmatch = partidx == h
|
pmatch = partidx == h
|
||||||
|
@ -1351,6 +1385,8 @@ func ForEachLine(json string, iterator func(line Result) bool) {
|
||||||
type parseContext struct {
|
type parseContext struct {
|
||||||
json string
|
json string
|
||||||
value Result
|
value Result
|
||||||
|
pipe string
|
||||||
|
piped bool
|
||||||
calcd bool
|
calcd bool
|
||||||
lines bool
|
lines bool
|
||||||
}
|
}
|
||||||
|
@ -1388,6 +1424,22 @@ type parseContext struct {
|
||||||
// If you are consuming JSON from an unpredictable source then you may want to
|
// If you are consuming JSON from an unpredictable source then you may want to
|
||||||
// use the Valid function first.
|
// use the Valid function first.
|
||||||
func Get(json, path string) Result {
|
func Get(json, path string) Result {
|
||||||
|
if !DisableModifiers {
|
||||||
|
if len(path) > 1 && path[0] == '@' {
|
||||||
|
// possible modifier
|
||||||
|
var ok bool
|
||||||
|
var rjson string
|
||||||
|
path, rjson, ok = execModifier(json, path)
|
||||||
|
if ok {
|
||||||
|
if len(path) > 0 && path[0] == '|' {
|
||||||
|
res := Get(rjson, path[1:])
|
||||||
|
res.Index = 0
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
return Parse(rjson)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
var i int
|
var i int
|
||||||
var c = &parseContext{json: json}
|
var c = &parseContext{json: json}
|
||||||
if len(path) >= 2 && path[0] == '.' && path[1] == '.' {
|
if len(path) >= 2 && path[0] == '.' && path[1] == '.' {
|
||||||
|
@ -1407,6 +1459,11 @@ func Get(json, path string) Result {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if c.piped {
|
||||||
|
res := c.value.Get(c.pipe)
|
||||||
|
res.Index = 0
|
||||||
|
return res
|
||||||
|
}
|
||||||
fillIndex(json, c)
|
fillIndex(json, c)
|
||||||
return c.value
|
return c.value
|
||||||
}
|
}
|
||||||
|
@ -1626,7 +1683,11 @@ func GetMany(json string, path ...string) []Result {
|
||||||
// The return value is a Result array where the number of items
|
// The return value is a Result array where the number of items
|
||||||
// will be equal to the number of input paths.
|
// will be equal to the number of input paths.
|
||||||
func GetManyBytes(json []byte, path ...string) []Result {
|
func GetManyBytes(json []byte, path ...string) []Result {
|
||||||
return GetMany(string(json), path...)
|
res := make([]Result, len(path))
|
||||||
|
for i, path := range path {
|
||||||
|
res[i] = GetBytes(json, path)
|
||||||
|
}
|
||||||
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
var fieldsmu sync.RWMutex
|
var fieldsmu sync.RWMutex
|
||||||
|
@ -2032,7 +2093,7 @@ func validnull(data []byte, i int) (outi int, ok bool) {
|
||||||
// value := gjson.Get(json, "name.last")
|
// value := gjson.Get(json, "name.last")
|
||||||
//
|
//
|
||||||
func Valid(json string) bool {
|
func Valid(json string) bool {
|
||||||
_, ok := validpayload([]byte(json), 0)
|
_, ok := validpayload(stringBytes(json), 0)
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2108,3 +2169,146 @@ func floatToInt(f float64) (n int64, ok bool) {
|
||||||
}
|
}
|
||||||
return 0, false
|
return 0, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// execModifier parses the path to find a matching modifier function.
|
||||||
|
// then input expects that the path already starts with a '@'
|
||||||
|
func execModifier(json, path string) (pathOut, res string, ok bool) {
|
||||||
|
name := path[1:]
|
||||||
|
var hasArgs bool
|
||||||
|
for i := 1; i < len(path); i++ {
|
||||||
|
if path[i] == ':' {
|
||||||
|
pathOut = path[i+1:]
|
||||||
|
name = path[1:i]
|
||||||
|
hasArgs = len(pathOut) > 0
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if !DisableChaining {
|
||||||
|
if path[i] == '|' {
|
||||||
|
pathOut = path[i:]
|
||||||
|
name = path[1:i]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if fn, ok := modifiers[name]; ok {
|
||||||
|
var args string
|
||||||
|
if hasArgs {
|
||||||
|
var parsedArgs bool
|
||||||
|
switch pathOut[0] {
|
||||||
|
case '{', '[', '"':
|
||||||
|
res := Parse(pathOut)
|
||||||
|
if res.Exists() {
|
||||||
|
_, args = parseSquash(pathOut, 0)
|
||||||
|
pathOut = pathOut[len(args):]
|
||||||
|
parsedArgs = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !parsedArgs {
|
||||||
|
idx := -1
|
||||||
|
if !DisableChaining {
|
||||||
|
idx = strings.IndexByte(pathOut, '|')
|
||||||
|
}
|
||||||
|
if idx == -1 {
|
||||||
|
args = pathOut
|
||||||
|
pathOut = ""
|
||||||
|
} else {
|
||||||
|
args = pathOut[:idx]
|
||||||
|
pathOut = pathOut[idx:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return pathOut, fn(json, args), true
|
||||||
|
}
|
||||||
|
return pathOut, res, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// DisableModifiers will disable the modifier syntax
|
||||||
|
var DisableModifiers = false
|
||||||
|
|
||||||
|
var modifiers = map[string]func(json, arg string) string{
|
||||||
|
"pretty": modPretty,
|
||||||
|
"ugly": modUgly,
|
||||||
|
"reverse": modReverse,
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddModifier binds a custom modifier command to the GJSON syntax.
|
||||||
|
// This operation is not thread safe and should be executed prior to
|
||||||
|
// using all other gjson function.
|
||||||
|
func AddModifier(name string, fn func(json, arg string) string) {
|
||||||
|
modifiers[name] = fn
|
||||||
|
}
|
||||||
|
|
||||||
|
// ModifierExists returns true when the specified modifier exists.
|
||||||
|
func ModifierExists(name string, fn func(json, arg string) string) bool {
|
||||||
|
_, ok := modifiers[name]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// @pretty modifier makes the json look nice.
|
||||||
|
func modPretty(json, arg string) string {
|
||||||
|
if len(arg) > 0 {
|
||||||
|
opts := *pretty.DefaultOptions
|
||||||
|
Parse(arg).ForEach(func(key, value Result) bool {
|
||||||
|
switch key.String() {
|
||||||
|
case "sortKeys":
|
||||||
|
opts.SortKeys = value.Bool()
|
||||||
|
case "indent":
|
||||||
|
opts.Indent = value.String()
|
||||||
|
case "prefix":
|
||||||
|
opts.Prefix = value.String()
|
||||||
|
case "width":
|
||||||
|
opts.Width = int(value.Int())
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
return bytesString(pretty.PrettyOptions(stringBytes(json), &opts))
|
||||||
|
}
|
||||||
|
return bytesString(pretty.Pretty(stringBytes(json)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// @ugly modifier removes all whitespace.
|
||||||
|
func modUgly(json, arg string) string {
|
||||||
|
return bytesString(pretty.Ugly(stringBytes(json)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// @reverse reverses array elements or root object members.
|
||||||
|
func modReverse(json, arg string) string {
|
||||||
|
res := Parse(json)
|
||||||
|
if res.IsArray() {
|
||||||
|
var values []Result
|
||||||
|
res.ForEach(func(_, value Result) bool {
|
||||||
|
values = append(values, value)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
out := make([]byte, 0, len(json))
|
||||||
|
out = append(out, '[')
|
||||||
|
for i, j := len(values)-1, 0; i >= 0; i, j = i-1, j+1 {
|
||||||
|
if j > 0 {
|
||||||
|
out = append(out, ',')
|
||||||
|
}
|
||||||
|
out = append(out, values[i].Raw...)
|
||||||
|
}
|
||||||
|
out = append(out, ']')
|
||||||
|
return bytesString(out)
|
||||||
|
}
|
||||||
|
if res.IsObject() {
|
||||||
|
var keyValues []Result
|
||||||
|
res.ForEach(func(key, value Result) bool {
|
||||||
|
keyValues = append(keyValues, key, value)
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
out := make([]byte, 0, len(json))
|
||||||
|
out = append(out, '{')
|
||||||
|
for i, j := len(keyValues)-2, 0; i >= 0; i, j = i-2, j+1 {
|
||||||
|
if j > 0 {
|
||||||
|
out = append(out, ',')
|
||||||
|
}
|
||||||
|
out = append(out, keyValues[i+0].Raw...)
|
||||||
|
out = append(out, ':')
|
||||||
|
out = append(out, keyValues[i+1].Raw...)
|
||||||
|
}
|
||||||
|
out = append(out, '}')
|
||||||
|
return bytesString(out)
|
||||||
|
}
|
||||||
|
return json
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//+build appengine
|
//+build appengine js
|
||||||
|
|
||||||
package gjson
|
package gjson
|
||||||
|
|
||||||
|
@ -8,3 +8,11 @@ func getBytes(json []byte, path string) Result {
|
||||||
func fillIndex(json string, c *parseContext) {
|
func fillIndex(json string, c *parseContext) {
|
||||||
// noop. Use zero for the Index value.
|
// noop. Use zero for the Index value.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func stringBytes(s string) []byte {
|
||||||
|
return []byte(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func bytesString(b []byte) string {
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
//+build !appengine
|
//+build !appengine
|
||||||
|
//+build !js
|
||||||
|
|
||||||
package gjson
|
package gjson
|
||||||
|
|
||||||
|
@ -15,12 +16,6 @@ func getBytes(json []byte, path string) Result {
|
||||||
if json != nil {
|
if json != nil {
|
||||||
// unsafe cast to string
|
// unsafe cast to string
|
||||||
result = Get(*(*string)(unsafe.Pointer(&json)), path)
|
result = Get(*(*string)(unsafe.Pointer(&json)), path)
|
||||||
result = fromBytesGet(result)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func fromBytesGet(result Result) Result {
|
|
||||||
// safely get the string headers
|
// safely get the string headers
|
||||||
rawhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Raw))
|
rawhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Raw))
|
||||||
strhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Str))
|
strhi := *(*reflect.StringHeader)(unsafe.Pointer(&result.Str))
|
||||||
|
@ -55,6 +50,7 @@ func fromBytesGet(result Result) Result {
|
||||||
result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh)))
|
result.Raw = string(*(*[]byte)(unsafe.Pointer(&rawh)))
|
||||||
result.Str = string(*(*[]byte)(unsafe.Pointer(&strh)))
|
result.Str = string(*(*[]byte)(unsafe.Pointer(&strh)))
|
||||||
}
|
}
|
||||||
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,3 +67,15 @@ func fillIndex(json string, c *parseContext) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func stringBytes(s string) []byte {
|
||||||
|
return *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader{
|
||||||
|
Data: (*reflect.StringHeader)(unsafe.Pointer(&s)).Data,
|
||||||
|
Len: len(s),
|
||||||
|
Cap: len(s),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func bytesString(b []byte) string {
|
||||||
|
return *(*string)(unsafe.Pointer(&b))
|
||||||
|
}
|
||||||
|
|
|
@ -11,6 +11,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/tidwall/pretty"
|
||||||
)
|
)
|
||||||
|
|
||||||
// TestRandomData is a fuzzing test that throws random data at the Parse
|
// TestRandomData is a fuzzing test that throws random data at the Parse
|
||||||
|
@ -403,6 +405,10 @@ func TestBasic2(t *testing.T) {
|
||||||
if mtok.String() != "aaaa" {
|
if mtok.String() != "aaaa" {
|
||||||
t.Fatalf("expected %v, got %v", "aaaa", mtok.String())
|
t.Fatalf("expected %v, got %v", "aaaa", mtok.String())
|
||||||
}
|
}
|
||||||
|
mtok = get(basicJSON, `loggy.programmers.#[firstName !% "Bre*"].email`)
|
||||||
|
if mtok.String() != "bbbb" {
|
||||||
|
t.Fatalf("expected %v, got %v", "bbbb", mtok.String())
|
||||||
|
}
|
||||||
mtok = get(basicJSON, `loggy.programmers.#[firstName == "Brett"].email`)
|
mtok = get(basicJSON, `loggy.programmers.#[firstName == "Brett"].email`)
|
||||||
if mtok.String() != "aaaa" {
|
if mtok.String() != "aaaa" {
|
||||||
t.Fatalf("expected %v, got %v", "aaaa", mtok.String())
|
t.Fatalf("expected %v, got %v", "aaaa", mtok.String())
|
||||||
|
@ -1357,7 +1363,7 @@ null
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNumUint64String(t *testing.T) {
|
func TestNumUint64String(t *testing.T) {
|
||||||
i := 9007199254740993 //2^53 + 1
|
var i int64 = 9007199254740993 //2^53 + 1
|
||||||
j := fmt.Sprintf(`{"data": [ %d, "hello" ] }`, i)
|
j := fmt.Sprintf(`{"data": [ %d, "hello" ] }`, i)
|
||||||
res := Get(j, "data.0")
|
res := Get(j, "data.0")
|
||||||
if res.String() != "9007199254740993" {
|
if res.String() != "9007199254740993" {
|
||||||
|
@ -1366,7 +1372,7 @@ func TestNumUint64String(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNumInt64String(t *testing.T) {
|
func TestNumInt64String(t *testing.T) {
|
||||||
i := -9007199254740993
|
var i int64 = -9007199254740993
|
||||||
j := fmt.Sprintf(`{"data":[ "hello", %d ]}`, i)
|
j := fmt.Sprintf(`{"data":[ "hello", %d ]}`, i)
|
||||||
res := Get(j, "data.1")
|
res := Get(j, "data.1")
|
||||||
if res.String() != "-9007199254740993" {
|
if res.String() != "-9007199254740993" {
|
||||||
|
@ -1384,7 +1390,7 @@ func TestNumBigString(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNumFloatString(t *testing.T) {
|
func TestNumFloatString(t *testing.T) {
|
||||||
i := -9007199254740993
|
var i int64 = -9007199254740993
|
||||||
j := fmt.Sprintf(`{"data":[ "hello", %d ]}`, i) //No quotes around value!!
|
j := fmt.Sprintf(`{"data":[ "hello", %d ]}`, i) //No quotes around value!!
|
||||||
res := Get(j, "data.1")
|
res := Get(j, "data.1")
|
||||||
if res.String() != "-9007199254740993" {
|
if res.String() != "-9007199254740993" {
|
||||||
|
@ -1427,3 +1433,74 @@ func TestArrayValues(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BenchmarkValid(b *testing.B) {
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
Valid(complicatedJSON)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkValidBytes(b *testing.B) {
|
||||||
|
complicatedJSON := []byte(complicatedJSON)
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
ValidBytes(complicatedJSON)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkGoStdlibValidBytes(b *testing.B) {
|
||||||
|
complicatedJSON := []byte(complicatedJSON)
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
json.Valid(complicatedJSON)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestModifier(t *testing.T) {
|
||||||
|
json := `{"other":{"hello":"world"},"arr":[1,2,3,4,5,6]}`
|
||||||
|
opts := *pretty.DefaultOptions
|
||||||
|
opts.SortKeys = true
|
||||||
|
exp := string(pretty.PrettyOptions([]byte(json), &opts))
|
||||||
|
res := Get(json, `@pretty:{"sortKeys":true}`).String()
|
||||||
|
if res != exp {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", exp, res)
|
||||||
|
}
|
||||||
|
res = Get(res, "@pretty|@reverse|@ugly").String()
|
||||||
|
if res != json {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", json, res)
|
||||||
|
}
|
||||||
|
res = Get(res, "@pretty|@reverse|arr|@reverse|2").String()
|
||||||
|
if res != "4" {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", "4", res)
|
||||||
|
}
|
||||||
|
AddModifier("case", func(json, arg string) string {
|
||||||
|
if arg == "upper" {
|
||||||
|
return strings.ToUpper(json)
|
||||||
|
}
|
||||||
|
if arg == "lower" {
|
||||||
|
return strings.ToLower(json)
|
||||||
|
}
|
||||||
|
return json
|
||||||
|
})
|
||||||
|
res = Get(json, "other|@case:upper").String()
|
||||||
|
if res != `{"HELLO":"WORLD"}` {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", `{"HELLO":"WORLD"}`, res)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestChaining(t *testing.T) {
|
||||||
|
json := `{
|
||||||
|
"friends": [
|
||||||
|
{"first": "Dale", "last": "Murphy", "age": 44},
|
||||||
|
{"first": "Roger", "last": "Craig", "age": 68},
|
||||||
|
{"first": "Jane", "last": "Murphy", "age": 47}
|
||||||
|
]
|
||||||
|
}`
|
||||||
|
res := Get(json, "friends|0|first").String()
|
||||||
|
if res != "Dale" {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", "Dale", res)
|
||||||
|
}
|
||||||
|
res = Get(json, "friends|@reverse|0|age").String()
|
||||||
|
if res != "47" {
|
||||||
|
t.Fatalf("expected '%v', got '%v'", "47", res)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue