forked from mirror/ledisdb
Merge pull request #373 from lunny/lunny/gomod
Use go mod instead of dep to manage the project
This commit is contained in:
commit
9baea07f40
|
@ -1,7 +1,7 @@
|
||||||
language: go
|
language: go
|
||||||
go:
|
go:
|
||||||
- 1.10.x
|
|
||||||
- 1.11.x
|
- 1.11.x
|
||||||
|
- 1.12.x
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- make test
|
- make test
|
||||||
|
|
|
@ -1,86 +0,0 @@
|
||||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
|
||||||
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/cupcake/rdb"
|
|
||||||
packages = [".","crc64","nopdecoder"]
|
|
||||||
revision = "43ba34106c765f2111c0dc7b74cdf8ee437411e0"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/edsrzf/mmap-go"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "0bce6a6887123b67a60366d2c9fe2dfb74289d2e"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/glendc/gopher-json"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "dc4743023d0c166c1b844da8fc688e57ec65fe0b"
|
|
||||||
version = "0.1.0"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/golang/snappy"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "553a641470496b2327abcac10b36396bd98e45c9"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/pelletier/go-toml"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "16398bac157da96aa88f98a2df640c7f32af1da2"
|
|
||||||
version = "v1.0.1"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/peterh/liner"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "3681c2a912330352991ecdd642f257efe5b85518"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/siddontang/go"
|
|
||||||
packages = ["bson","filelock","hack","ioutil2","log","num","snappy","sync2"]
|
|
||||||
revision = "cb568a3e5cc06256f91a2da5a87455f717eb33f4"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/siddontang/goredis"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "760763f78400635ed7b9b115511b8ed06035e908"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/siddontang/rdb"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "fc89ed2e418d27e3ea76e708e54276d2b44ae9cf"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/syndtr/goleveldb"
|
|
||||||
packages = ["leveldb","leveldb/cache","leveldb/comparer","leveldb/errors","leveldb/filter","leveldb/iterator","leveldb/journal","leveldb/memdb","leveldb/opt","leveldb/storage","leveldb/table","leveldb/util"]
|
|
||||||
revision = "cfa635847112c5dc4782e128fa7e0d05fdbfb394"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/ugorji/go"
|
|
||||||
packages = ["codec"]
|
|
||||||
revision = "84cb69a8af8316eed8cf4a3c9368a56977850062"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/yuin/gopher-lua"
|
|
||||||
packages = [".","ast","parse","pm"]
|
|
||||||
revision = "609c9cd2697344dec90fe0543c6493e3b8da3435"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "golang.org/x/net"
|
|
||||||
packages = ["context"]
|
|
||||||
revision = "fb018015d54fd2e3bfd5362a041991d350fde9d7"
|
|
||||||
|
|
||||||
[solve-meta]
|
|
||||||
analyzer-name = "dep"
|
|
||||||
analyzer-version = 1
|
|
||||||
inputs-digest = "f11307c5e37b6809e5ced4d0ed85e2c52d7d6ee04c098c3644518596b62c7280"
|
|
||||||
solver-name = "gps-cdcl"
|
|
||||||
solver-version = 1
|
|
56
Gopkg.toml
56
Gopkg.toml
|
@ -1,56 +0,0 @@
|
||||||
|
|
||||||
# Gopkg.toml example
|
|
||||||
#
|
|
||||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
|
||||||
# for detailed Gopkg.toml documentation.
|
|
||||||
#
|
|
||||||
# required = ["github.com/user/thing/cmd/thing"]
|
|
||||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
|
||||||
#
|
|
||||||
# [[constraint]]
|
|
||||||
# name = "github.com/user/project"
|
|
||||||
# version = "1.0.0"
|
|
||||||
#
|
|
||||||
# [[constraint]]
|
|
||||||
# name = "github.com/user/project2"
|
|
||||||
# branch = "dev"
|
|
||||||
# source = "github.com/myfork/project2"
|
|
||||||
#
|
|
||||||
# [[override]]
|
|
||||||
# name = "github.com/x/y"
|
|
||||||
# version = "2.4.0"
|
|
||||||
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/edsrzf/mmap-go"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/glendc/gopher-json"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/pelletier/go-toml"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/peterh/liner"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/siddontang/go"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/siddontang/goredis"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/siddontang/rdb"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/syndtr/goleveldb"
|
|
||||||
revision = "cfa635847112c5dc4782e128fa7e0d05fdbfb394"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/ugorji/go"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "github.com/yuin/gopher-lua"
|
|
||||||
|
|
||||||
[[constraint]]
|
|
||||||
name = "golang.org/x/net"
|
|
23
Makefile
23
Makefile
|
@ -10,20 +10,20 @@ export CGO_LDFLAGS
|
||||||
export LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH
|
||||||
export DYLD_LIBRARY_PATH
|
export DYLD_LIBRARY_PATH
|
||||||
export GO_BUILD_TAGS
|
export GO_BUILD_TAGS
|
||||||
|
export GO111MODULE=on
|
||||||
|
|
||||||
all: build
|
all: build
|
||||||
|
|
||||||
build:
|
build:
|
||||||
go build -o bin/ledis-server -tags '$(GO_BUILD_TAGS)' cmd/ledis-server/*
|
go build -mod=vendor -o bin/ledis-server -tags '$(GO_BUILD_TAGS)' cmd/ledis-server/*
|
||||||
go build -o bin/ledis-cli -tags '$(GO_BUILD_TAGS)' cmd/ledis-cli/*
|
go build -mod=vendor -o bin/ledis-cli -tags '$(GO_BUILD_TAGS)' cmd/ledis-cli/*
|
||||||
go build -o bin/ledis-benchmark -tags '$(GO_BUILD_TAGS)' cmd/ledis-benchmark/*
|
go build -mod=vendor -o bin/ledis-benchmark -tags '$(GO_BUILD_TAGS)' cmd/ledis-benchmark/*
|
||||||
go build -o bin/ledis-dump -tags '$(GO_BUILD_TAGS)' cmd/ledis-dump/*
|
go build -mod=vendor -o bin/ledis-dump -tags '$(GO_BUILD_TAGS)' cmd/ledis-dump/*
|
||||||
go build -o bin/ledis-load -tags '$(GO_BUILD_TAGS)' cmd/ledis-load/*
|
go build -mod=vendor -o bin/ledis-load -tags '$(GO_BUILD_TAGS)' cmd/ledis-load/*
|
||||||
go build -o bin/ledis-repair -tags '$(GO_BUILD_TAGS)' cmd/ledis-repair/*
|
go build -mod=vendor -o bin/ledis-repair -tags '$(GO_BUILD_TAGS)' cmd/ledis-repair/*
|
||||||
|
|
||||||
test:
|
test:
|
||||||
go test --race -tags '$(GO_BUILD_TAGS)' -timeout 2m $$(go list ./... | grep -v -e /vendor/)
|
go test -mod=vendor --race -tags '$(GO_BUILD_TAGS)' -timeout 2m $$(go list ./... | grep -v -e /vendor/)
|
||||||
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
go clean -i ./...
|
go clean -i ./...
|
||||||
|
@ -32,9 +32,6 @@ fmt:
|
||||||
gofmt -w -s . 2>&1 | grep -vE 'vendor' | awk '{print} END{if(NR>0) {exit 1}}'
|
gofmt -w -s . 2>&1 | grep -vE 'vendor' | awk '{print} END{if(NR>0) {exit 1}}'
|
||||||
|
|
||||||
sync_vendor:
|
sync_vendor:
|
||||||
@which dep >/dev/null || go get -u github.com/golang/dep/cmd/dep
|
go mod tidy -v && go mod vendor
|
||||||
dep ensure
|
|
||||||
|
|
||||||
update_vendor:
|
update_vendor: sync_vendor
|
||||||
@which dep >/dev/null || go get -u github.com/golang/dep/cmd/dep
|
|
||||||
dep ensure -update
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
module github.com/siddontang/ledisdb
|
||||||
|
|
||||||
|
go 1.12
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/BurntSushi/toml v0.3.1 // indirect
|
||||||
|
github.com/alicebob/gopher-json v0.0.0-20180125190556-5a6b3ba71ee6 // indirect
|
||||||
|
github.com/alicebob/miniredis v2.5.0+incompatible // indirect
|
||||||
|
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712
|
||||||
|
github.com/glendc/gopher-json v0.0.0-20170414221815-dc4743023d0c
|
||||||
|
github.com/golang/snappy v0.0.0-20170215233205-553a64147049 // indirect
|
||||||
|
github.com/gomodule/redigo v2.0.0+incompatible // indirect
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
|
||||||
|
github.com/onsi/ginkgo v1.12.0 // indirect
|
||||||
|
github.com/pelletier/go-toml v1.0.1
|
||||||
|
github.com/peterh/liner v1.0.1-0.20171122030339-3681c2a91233
|
||||||
|
github.com/siddontang/go v0.0.0-20170517070808-cb568a3e5cc0
|
||||||
|
github.com/siddontang/goredis v0.0.0-20150324035039-760763f78400
|
||||||
|
github.com/siddontang/rdb v0.0.0-20150307021120-fc89ed2e418d
|
||||||
|
github.com/syndtr/goleveldb v0.0.0-20160425020131-cfa635847112
|
||||||
|
github.com/ugorji/go v0.0.0-20171122102828-84cb69a8af83
|
||||||
|
github.com/yuin/gopher-lua v0.0.0-20171031051903-609c9cd26973
|
||||||
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
|
||||||
|
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22 // indirect
|
||||||
|
gopkg.in/yaml.v2 v2.2.8 // indirect
|
||||||
|
)
|
|
@ -0,0 +1,71 @@
|
||||||
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/alicebob/gopher-json v0.0.0-20180125190556-5a6b3ba71ee6 h1:45bxf7AZMwWcqkLzDAQugVEwedisr5nRJ1r+7LYnv0U=
|
||||||
|
github.com/alicebob/gopher-json v0.0.0-20180125190556-5a6b3ba71ee6/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc=
|
||||||
|
github.com/alicebob/miniredis v2.5.0+incompatible h1:yBHoLpsyjupjz3NL3MhKMVkR41j82Yjf3KFv7ApYzUI=
|
||||||
|
github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+DxYx+6BMjkBbe5ONFIF1MXffk=
|
||||||
|
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76 h1:Lgdd/Qp96Qj8jqLpq2cI1I1X7BJnu06efS+XkhRoLUQ=
|
||||||
|
github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76/go.mod h1:vYwsqCOLxGiisLwp9rITslkFNpZD5rz43tf41QFkTWY=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712 h1:aaQcKT9WumO6JEJcRyTqFVq4XUZiUcKR2/GI31TOcz8=
|
||||||
|
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/glendc/gopher-json v0.0.0-20170414221815-dc4743023d0c h1:iRTj5SRYwbvsygdwVp+y9kZT145Y1s6xOPpeOEIeGc4=
|
||||||
|
github.com/glendc/gopher-json v0.0.0-20170414221815-dc4743023d0c/go.mod h1:Gja1A+xZ9BoviGJNA2E9vFkPjjsl+CoJxSXiQM1UXtw=
|
||||||
|
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
|
||||||
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/snappy v0.0.0-20170215233205-553a64147049 h1:K9KHZbXKpGydfDN0aZrsoHpLJlZsBrGMFWbgLDGnPZk=
|
||||||
|
github.com/golang/snappy v0.0.0-20170215233205-553a64147049/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
|
github.com/gomodule/redigo v2.0.0+incompatible h1:K/R+8tc58AaqLkqG2Ol3Qk+DR/TlNuhuh457pBFPtt0=
|
||||||
|
github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4=
|
||||||
|
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
||||||
|
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||||
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
|
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||||
|
github.com/onsi/ginkgo v1.12.0 h1:Iw5WCbBcaAAd0fpRb1c9r5YCylv4XDoCSigm1zLevwU=
|
||||||
|
github.com/onsi/ginkgo v1.12.0/go.mod h1:oUhWkIvk5aDxtKvDDuw8gItl8pKl42LzjC9KZE0HfGg=
|
||||||
|
github.com/onsi/gomega v1.7.1 h1:K0jcRCwNQM3vFGh1ppMtDh/+7ApJrjldlX8fA0jDTLQ=
|
||||||
|
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||||
|
github.com/pelletier/go-toml v1.0.1 h1:0nx4vKBl23+hEaCOV1mFhKS9vhhBtFYWC7rQY0vJAyE=
|
||||||
|
github.com/pelletier/go-toml v1.0.1/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
|
github.com/peterh/liner v1.0.1-0.20171122030339-3681c2a91233 h1:jmJndGFBPjNWW+MAYarU/Nl8QrQVzbw4B/AYE0LzETo=
|
||||||
|
github.com/peterh/liner v1.0.1-0.20171122030339-3681c2a91233/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc=
|
||||||
|
github.com/siddontang/go v0.0.0-20170517070808-cb568a3e5cc0 h1:QIF48X1cihydXibm+4wfAc0r/qyPyuFiPFRNphdMpEE=
|
||||||
|
github.com/siddontang/go v0.0.0-20170517070808-cb568a3e5cc0/go.mod h1:3yhqj7WBBfRhbBlzyOC3gUxftwsU0u8gqevxwIHQpMw=
|
||||||
|
github.com/siddontang/goredis v0.0.0-20150324035039-760763f78400 h1:091wFNQB3PXcL5+me0joH7EiyqQaI0wGMpEjVCkK04U=
|
||||||
|
github.com/siddontang/goredis v0.0.0-20150324035039-760763f78400/go.mod h1:DDcKzU3qCuvj/tPnimWSsZZzvk9qvkvrIL5naVBPh5s=
|
||||||
|
github.com/siddontang/rdb v0.0.0-20150307021120-fc89ed2e418d h1:NVwnfyR3rENtlz62bcrkXME3INVUa4lcdGt+opvxExs=
|
||||||
|
github.com/siddontang/rdb v0.0.0-20150307021120-fc89ed2e418d/go.mod h1:AMEsy7v5z92TR1JKMkLLoaOQk++LVnOKL3ScbJ8GNGA=
|
||||||
|
github.com/syndtr/goleveldb v0.0.0-20160425020131-cfa635847112 h1:NBrpnvz0pDPf3+HXZ1C9GcJd1DTpWDLcLWZhNq6uP7o=
|
||||||
|
github.com/syndtr/goleveldb v0.0.0-20160425020131-cfa635847112/go.mod h1:Z4AUp2Km+PwemOoO/VB5AOx9XSsIItzFjoJlOSiYmn0=
|
||||||
|
github.com/ugorji/go v0.0.0-20171122102828-84cb69a8af83 h1:9AUN7+NK4IV+A11igqjQM5i8obiOAQo4SXgjaxe+orI=
|
||||||
|
github.com/ugorji/go v0.0.0-20171122102828-84cb69a8af83/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ=
|
||||||
|
github.com/yuin/gopher-lua v0.0.0-20171031051903-609c9cd26973 h1:iCnkJ/qjKZGdZnlcj1N55AxPDan814kpc3s1cDpQKd8=
|
||||||
|
github.com/yuin/gopher-lua v0.0.0-20171031051903-609c9cd26973/go.mod h1:aEV29XrmTYFr3CiRxZeGHpkvbwq+prZduBqMaascyCU=
|
||||||
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd h1:nTDtHvHSdCn1m6ITfMRqtOd/9+7a3s8RBNOZ3eYZzJA=
|
||||||
|
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA=
|
||||||
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e h1:N7DeIrjYszNmSW409R3frPPwglRwMkXSBzwVbkOjLLA=
|
||||||
|
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
|
||||||
|
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||||
|
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22 h1:VpOs+IwYnYBaFnrNAeB8UUWtL3vEUnzSCL1nVjPhqrw=
|
||||||
|
gopkg.in/mgo.v2 v2.0.0-20190816093944-a6b53ec6cb22/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||||
|
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||||
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
@ -0,0 +1,25 @@
|
||||||
|
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||||
|
*.o
|
||||||
|
*.a
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Folders
|
||||||
|
_obj
|
||||||
|
_test
|
||||||
|
|
||||||
|
# Architecture specific extensions/prefixes
|
||||||
|
*.[568vq]
|
||||||
|
[568vq].out
|
||||||
|
|
||||||
|
*.cgo1.go
|
||||||
|
*.cgo2.c
|
||||||
|
_cgo_defun.c
|
||||||
|
_cgo_gotypes.go
|
||||||
|
_cgo_export.*
|
||||||
|
|
||||||
|
_testmain.go
|
||||||
|
|
||||||
|
*.exe
|
||||||
|
|
||||||
|
# Project-specific files
|
||||||
|
diff
|
|
@ -0,0 +1,6 @@
|
||||||
|
language: go
|
||||||
|
go:
|
||||||
|
- 1.1
|
||||||
|
- tip
|
||||||
|
before_install:
|
||||||
|
- go get gopkg.in/check.v1
|
|
@ -0,0 +1,21 @@
|
||||||
|
Copyright (c) 2012 Jonathan Rudenberg
|
||||||
|
Copyright (c) 2012 Sripathi Krishnan
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining
|
||||||
|
a copy of this software and associated documentation files (the
|
||||||
|
"Software"), to deal in the Software without restriction, including
|
||||||
|
without limitation the rights to use, copy, modify, merge, publish,
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be
|
||||||
|
included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||||
|
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||||
|
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||||
|
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||||
|
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
@ -0,0 +1,17 @@
|
||||||
|
# rdb [![Build Status](https://travis-ci.org/cupcake/rdb.png?branch=master)](https://travis-ci.org/cupcake/rdb)
|
||||||
|
|
||||||
|
rdb is a Go package that implements parsing and encoding of the
|
||||||
|
[Redis](http://redis.io) [RDB file
|
||||||
|
format](https://github.com/sripathikrishnan/redis-rdb-tools/blob/master/docs/RDB_File_Format.textile).
|
||||||
|
|
||||||
|
This package was heavily inspired by
|
||||||
|
[redis-rdb-tools](https://github.com/sripathikrishnan/redis-rdb-tools) by
|
||||||
|
[Sripathi Krishnan](https://github.com/sripathikrishnan).
|
||||||
|
|
||||||
|
[**Documentation**](http://godoc.org/github.com/cupcake/rdb)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
```
|
||||||
|
go get github.com/cupcake/rdb
|
||||||
|
```
|
|
@ -0,0 +1,8 @@
|
||||||
|
*.out
|
||||||
|
*.5
|
||||||
|
*.6
|
||||||
|
*.8
|
||||||
|
*.swp
|
||||||
|
_obj
|
||||||
|
_test
|
||||||
|
testdata
|
|
@ -0,0 +1,12 @@
|
||||||
|
mmap-go
|
||||||
|
=======
|
||||||
|
|
||||||
|
mmap-go is a portable mmap package for the [Go programming language](http://golang.org).
|
||||||
|
It has been tested on Linux (386, amd64), OS X, and Windows (386). It should also
|
||||||
|
work on other Unix-like platforms, but hasn't been tested with them. I'm interested
|
||||||
|
to hear about the results.
|
||||||
|
|
||||||
|
I haven't been able to add more features without adding significant complexity,
|
||||||
|
so mmap-go doesn't support mprotect, mincore, and maybe a few other things.
|
||||||
|
If you're running on a Unix-like platform and need some of these features,
|
||||||
|
I suggest Gustavo Niemeyer's [gommap](http://labix.org/gommap).
|
|
@ -0,0 +1,7 @@
|
||||||
|
# gopher-json [![GoDoc](https://godoc.org/layeh.com/gopher-json?status.svg)](https://godoc.org/layeh.com/gopher-json)
|
||||||
|
|
||||||
|
Package json is a simple JSON encoder/decoder for [gopher-lua](https://github.com/yuin/gopher-lua).
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Public domain.
|
|
@ -0,0 +1,16 @@
|
||||||
|
cmd/snappytool/snappytool
|
||||||
|
testdata/bench
|
||||||
|
|
||||||
|
# These explicitly listed benchmark data files are for an obsolete version of
|
||||||
|
# snappy_test.go.
|
||||||
|
testdata/alice29.txt
|
||||||
|
testdata/asyoulik.txt
|
||||||
|
testdata/fireworks.jpeg
|
||||||
|
testdata/geo.protodata
|
||||||
|
testdata/html
|
||||||
|
testdata/html_x_4
|
||||||
|
testdata/kppkn.gtb
|
||||||
|
testdata/lcet10.txt
|
||||||
|
testdata/paper-100k.pdf
|
||||||
|
testdata/plrabn12.txt
|
||||||
|
testdata/urls.10K
|
|
@ -0,0 +1,15 @@
|
||||||
|
# This is the official list of Snappy-Go authors for copyright purposes.
|
||||||
|
# This file is distinct from the CONTRIBUTORS files.
|
||||||
|
# See the latter for an explanation.
|
||||||
|
|
||||||
|
# Names should be added to this file as
|
||||||
|
# Name or Organization <email address>
|
||||||
|
# The email address is not required for organizations.
|
||||||
|
|
||||||
|
# Please keep the list sorted.
|
||||||
|
|
||||||
|
Damian Gryski <dgryski@gmail.com>
|
||||||
|
Google Inc.
|
||||||
|
Jan Mercl <0xjnml@gmail.com>
|
||||||
|
Rodolfo Carvalho <rhcarvalho@gmail.com>
|
||||||
|
Sebastien Binet <seb.binet@gmail.com>
|
|
@ -0,0 +1,37 @@
|
||||||
|
# This is the official list of people who can contribute
|
||||||
|
# (and typically have contributed) code to the Snappy-Go repository.
|
||||||
|
# The AUTHORS file lists the copyright holders; this file
|
||||||
|
# lists people. For example, Google employees are listed here
|
||||||
|
# but not in AUTHORS, because Google holds the copyright.
|
||||||
|
#
|
||||||
|
# The submission process automatically checks to make sure
|
||||||
|
# that people submitting code are listed in this file (by email address).
|
||||||
|
#
|
||||||
|
# Names should be added to this file only after verifying that
|
||||||
|
# the individual or the individual's organization has agreed to
|
||||||
|
# the appropriate Contributor License Agreement, found here:
|
||||||
|
#
|
||||||
|
# http://code.google.com/legal/individual-cla-v1.0.html
|
||||||
|
# http://code.google.com/legal/corporate-cla-v1.0.html
|
||||||
|
#
|
||||||
|
# The agreement for individuals can be filled out on the web.
|
||||||
|
#
|
||||||
|
# When adding J Random Contributor's name to this file,
|
||||||
|
# either J's name or J's organization's name should be
|
||||||
|
# added to the AUTHORS file, depending on whether the
|
||||||
|
# individual or corporate CLA was used.
|
||||||
|
|
||||||
|
# Names should be added to this file like so:
|
||||||
|
# Name <email address>
|
||||||
|
|
||||||
|
# Please keep the list sorted.
|
||||||
|
|
||||||
|
Damian Gryski <dgryski@gmail.com>
|
||||||
|
Jan Mercl <0xjnml@gmail.com>
|
||||||
|
Kai Backman <kaib@golang.org>
|
||||||
|
Marc-Antoine Ruel <maruel@chromium.org>
|
||||||
|
Nigel Tao <nigeltao@golang.org>
|
||||||
|
Rob Pike <r@golang.org>
|
||||||
|
Rodolfo Carvalho <rhcarvalho@gmail.com>
|
||||||
|
Russ Cox <rsc@golang.org>
|
||||||
|
Sebastien Binet <seb.binet@gmail.com>
|
|
@ -0,0 +1,107 @@
|
||||||
|
The Snappy compression format in the Go programming language.
|
||||||
|
|
||||||
|
To download and install from source:
|
||||||
|
$ go get github.com/golang/snappy
|
||||||
|
|
||||||
|
Unless otherwise noted, the Snappy-Go source files are distributed
|
||||||
|
under the BSD-style license found in the LICENSE file.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Benchmarks.
|
||||||
|
|
||||||
|
The golang/snappy benchmarks include compressing (Z) and decompressing (U) ten
|
||||||
|
or so files, the same set used by the C++ Snappy code (github.com/google/snappy
|
||||||
|
and note the "google", not "golang"). On an "Intel(R) Core(TM) i7-3770 CPU @
|
||||||
|
3.40GHz", Go's GOARCH=amd64 numbers as of 2016-05-29:
|
||||||
|
|
||||||
|
"go test -test.bench=."
|
||||||
|
|
||||||
|
_UFlat0-8 2.19GB/s ± 0% html
|
||||||
|
_UFlat1-8 1.41GB/s ± 0% urls
|
||||||
|
_UFlat2-8 23.5GB/s ± 2% jpg
|
||||||
|
_UFlat3-8 1.91GB/s ± 0% jpg_200
|
||||||
|
_UFlat4-8 14.0GB/s ± 1% pdf
|
||||||
|
_UFlat5-8 1.97GB/s ± 0% html4
|
||||||
|
_UFlat6-8 814MB/s ± 0% txt1
|
||||||
|
_UFlat7-8 785MB/s ± 0% txt2
|
||||||
|
_UFlat8-8 857MB/s ± 0% txt3
|
||||||
|
_UFlat9-8 719MB/s ± 1% txt4
|
||||||
|
_UFlat10-8 2.84GB/s ± 0% pb
|
||||||
|
_UFlat11-8 1.05GB/s ± 0% gaviota
|
||||||
|
|
||||||
|
_ZFlat0-8 1.04GB/s ± 0% html
|
||||||
|
_ZFlat1-8 534MB/s ± 0% urls
|
||||||
|
_ZFlat2-8 15.7GB/s ± 1% jpg
|
||||||
|
_ZFlat3-8 740MB/s ± 3% jpg_200
|
||||||
|
_ZFlat4-8 9.20GB/s ± 1% pdf
|
||||||
|
_ZFlat5-8 991MB/s ± 0% html4
|
||||||
|
_ZFlat6-8 379MB/s ± 0% txt1
|
||||||
|
_ZFlat7-8 352MB/s ± 0% txt2
|
||||||
|
_ZFlat8-8 396MB/s ± 1% txt3
|
||||||
|
_ZFlat9-8 327MB/s ± 1% txt4
|
||||||
|
_ZFlat10-8 1.33GB/s ± 1% pb
|
||||||
|
_ZFlat11-8 605MB/s ± 1% gaviota
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
"go test -test.bench=. -tags=noasm"
|
||||||
|
|
||||||
|
_UFlat0-8 621MB/s ± 2% html
|
||||||
|
_UFlat1-8 494MB/s ± 1% urls
|
||||||
|
_UFlat2-8 23.2GB/s ± 1% jpg
|
||||||
|
_UFlat3-8 1.12GB/s ± 1% jpg_200
|
||||||
|
_UFlat4-8 4.35GB/s ± 1% pdf
|
||||||
|
_UFlat5-8 609MB/s ± 0% html4
|
||||||
|
_UFlat6-8 296MB/s ± 0% txt1
|
||||||
|
_UFlat7-8 288MB/s ± 0% txt2
|
||||||
|
_UFlat8-8 309MB/s ± 1% txt3
|
||||||
|
_UFlat9-8 280MB/s ± 1% txt4
|
||||||
|
_UFlat10-8 753MB/s ± 0% pb
|
||||||
|
_UFlat11-8 400MB/s ± 0% gaviota
|
||||||
|
|
||||||
|
_ZFlat0-8 409MB/s ± 1% html
|
||||||
|
_ZFlat1-8 250MB/s ± 1% urls
|
||||||
|
_ZFlat2-8 12.3GB/s ± 1% jpg
|
||||||
|
_ZFlat3-8 132MB/s ± 0% jpg_200
|
||||||
|
_ZFlat4-8 2.92GB/s ± 0% pdf
|
||||||
|
_ZFlat5-8 405MB/s ± 1% html4
|
||||||
|
_ZFlat6-8 179MB/s ± 1% txt1
|
||||||
|
_ZFlat7-8 170MB/s ± 1% txt2
|
||||||
|
_ZFlat8-8 189MB/s ± 1% txt3
|
||||||
|
_ZFlat9-8 164MB/s ± 1% txt4
|
||||||
|
_ZFlat10-8 479MB/s ± 1% pb
|
||||||
|
_ZFlat11-8 270MB/s ± 1% gaviota
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
For comparison (Go's encoded output is byte-for-byte identical to C++'s), here
|
||||||
|
are the numbers from C++ Snappy's
|
||||||
|
|
||||||
|
make CXXFLAGS="-O2 -DNDEBUG -g" clean snappy_unittest.log && cat snappy_unittest.log
|
||||||
|
|
||||||
|
BM_UFlat/0 2.4GB/s html
|
||||||
|
BM_UFlat/1 1.4GB/s urls
|
||||||
|
BM_UFlat/2 21.8GB/s jpg
|
||||||
|
BM_UFlat/3 1.5GB/s jpg_200
|
||||||
|
BM_UFlat/4 13.3GB/s pdf
|
||||||
|
BM_UFlat/5 2.1GB/s html4
|
||||||
|
BM_UFlat/6 1.0GB/s txt1
|
||||||
|
BM_UFlat/7 959.4MB/s txt2
|
||||||
|
BM_UFlat/8 1.0GB/s txt3
|
||||||
|
BM_UFlat/9 864.5MB/s txt4
|
||||||
|
BM_UFlat/10 2.9GB/s pb
|
||||||
|
BM_UFlat/11 1.2GB/s gaviota
|
||||||
|
|
||||||
|
BM_ZFlat/0 944.3MB/s html (22.31 %)
|
||||||
|
BM_ZFlat/1 501.6MB/s urls (47.78 %)
|
||||||
|
BM_ZFlat/2 14.3GB/s jpg (99.95 %)
|
||||||
|
BM_ZFlat/3 538.3MB/s jpg_200 (73.00 %)
|
||||||
|
BM_ZFlat/4 8.3GB/s pdf (83.30 %)
|
||||||
|
BM_ZFlat/5 903.5MB/s html4 (22.52 %)
|
||||||
|
BM_ZFlat/6 336.0MB/s txt1 (57.88 %)
|
||||||
|
BM_ZFlat/7 312.3MB/s txt2 (61.91 %)
|
||||||
|
BM_ZFlat/8 353.1MB/s txt3 (54.99 %)
|
||||||
|
BM_ZFlat/9 289.9MB/s txt4 (66.26 %)
|
||||||
|
BM_ZFlat/10 1.2GB/s pb (19.68 %)
|
||||||
|
BM_ZFlat/11 527.4MB/s gaviota (37.72 %)
|
|
@ -0,0 +1 @@
|
||||||
|
test_program/test_program_bin
|
|
@ -0,0 +1,23 @@
|
||||||
|
sudo: false
|
||||||
|
language: go
|
||||||
|
go:
|
||||||
|
- 1.7.6
|
||||||
|
- 1.8.3
|
||||||
|
- 1.9
|
||||||
|
- tip
|
||||||
|
matrix:
|
||||||
|
allow_failures:
|
||||||
|
- go: tip
|
||||||
|
fast_finish: true
|
||||||
|
script:
|
||||||
|
- if [ -n "$(go fmt ./...)" ]; then exit 1; fi
|
||||||
|
- ./test.sh
|
||||||
|
- ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git
|
||||||
|
before_install:
|
||||||
|
- go get github.com/axw/gocov/gocov
|
||||||
|
- go get github.com/mattn/goveralls
|
||||||
|
- if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
|
||||||
|
branches:
|
||||||
|
only: [master]
|
||||||
|
after_success:
|
||||||
|
- $HOME/gopath/bin/goveralls -service=travis-ci -coverprofile=coverage.out -repotoken $COVERALLS_TOKEN
|
|
@ -0,0 +1,119 @@
|
||||||
|
# go-toml
|
||||||
|
|
||||||
|
Go library for the [TOML](https://github.com/mojombo/toml) format.
|
||||||
|
|
||||||
|
This library supports TOML version
|
||||||
|
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
|
||||||
|
|
||||||
|
[![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml)
|
||||||
|
[![license](https://img.shields.io/github/license/pelletier/go-toml.svg)](https://github.com/pelletier/go-toml/blob/master/LICENSE)
|
||||||
|
[![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml)
|
||||||
|
[![Coverage Status](https://coveralls.io/repos/github/pelletier/go-toml/badge.svg?branch=master)](https://coveralls.io/github/pelletier/go-toml?branch=master)
|
||||||
|
[![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml)
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
Go-toml provides the following features for using data parsed from TOML documents:
|
||||||
|
|
||||||
|
* Load TOML documents from files and string data
|
||||||
|
* Easily navigate TOML structure using Tree
|
||||||
|
* Mashaling and unmarshaling to and from data structures
|
||||||
|
* Line & column position data for all parsed elements
|
||||||
|
* [Query support similar to JSON-Path](query/)
|
||||||
|
* Syntax errors contain line and column numbers
|
||||||
|
|
||||||
|
## Import
|
||||||
|
|
||||||
|
```go
|
||||||
|
import "github.com/pelletier/go-toml"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage example
|
||||||
|
|
||||||
|
Read a TOML document:
|
||||||
|
|
||||||
|
```go
|
||||||
|
config, _ := toml.Load(`
|
||||||
|
[postgres]
|
||||||
|
user = "pelletier"
|
||||||
|
password = "mypassword"`)
|
||||||
|
// retrieve data directly
|
||||||
|
user := config.Get("postgres.user").(string)
|
||||||
|
|
||||||
|
// or using an intermediate object
|
||||||
|
postgresConfig := config.Get("postgres").(*toml.Tree)
|
||||||
|
password := postgresConfig.Get("password").(string)
|
||||||
|
```
|
||||||
|
|
||||||
|
Or use Unmarshal:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type Postgres struct {
|
||||||
|
User string
|
||||||
|
Password string
|
||||||
|
}
|
||||||
|
type Config struct {
|
||||||
|
Postgres Postgres
|
||||||
|
}
|
||||||
|
|
||||||
|
doc := []byte(`
|
||||||
|
[postgres]
|
||||||
|
user = "pelletier"
|
||||||
|
password = "mypassword"`)
|
||||||
|
|
||||||
|
config := Config{}
|
||||||
|
toml.Unmarshal(doc, &config)
|
||||||
|
fmt.Println("user=", config.Postgres.User)
|
||||||
|
```
|
||||||
|
|
||||||
|
Or use a query:
|
||||||
|
|
||||||
|
```go
|
||||||
|
// use a query to gather elements without walking the tree
|
||||||
|
q, _ := query.Compile("$..[user,password]")
|
||||||
|
results := q.Execute(config)
|
||||||
|
for ii, item := range results.Values() {
|
||||||
|
fmt.Println("Query result %d: %v", ii, item)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
The documentation and additional examples are available at
|
||||||
|
[godoc.org](http://godoc.org/github.com/pelletier/go-toml).
|
||||||
|
|
||||||
|
## Tools
|
||||||
|
|
||||||
|
Go-toml provides two handy command line tools:
|
||||||
|
|
||||||
|
* `tomll`: Reads TOML files and lint them.
|
||||||
|
|
||||||
|
```
|
||||||
|
go install github.com/pelletier/go-toml/cmd/tomll
|
||||||
|
tomll --help
|
||||||
|
```
|
||||||
|
* `tomljson`: Reads a TOML file and outputs its JSON representation.
|
||||||
|
|
||||||
|
```
|
||||||
|
go install github.com/pelletier/go-toml/cmd/tomljson
|
||||||
|
tomljson --help
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contribute
|
||||||
|
|
||||||
|
Feel free to report bugs and patches using GitHub's pull requests system on
|
||||||
|
[pelletier/go-toml](https://github.com/pelletier/go-toml). Any feedback would be
|
||||||
|
much appreciated!
|
||||||
|
|
||||||
|
### Run tests
|
||||||
|
|
||||||
|
You have to make sure two kind of tests run:
|
||||||
|
|
||||||
|
1. The Go unit tests
|
||||||
|
2. The TOML examples base
|
||||||
|
|
||||||
|
You can run both of them using `./test.sh`.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
The MIT License (MIT). Read [LICENSE](LICENSE).
|
|
@ -0,0 +1,164 @@
|
||||||
|
{
|
||||||
|
"array": {
|
||||||
|
"key1": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
"key2": [
|
||||||
|
"red",
|
||||||
|
"yellow",
|
||||||
|
"green"
|
||||||
|
],
|
||||||
|
"key3": [
|
||||||
|
[
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
],
|
||||||
|
[
|
||||||
|
3,
|
||||||
|
4,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"key4": [
|
||||||
|
[
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"a",
|
||||||
|
"b",
|
||||||
|
"c"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"key5": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
"key6": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"boolean": {
|
||||||
|
"False": false,
|
||||||
|
"True": true
|
||||||
|
},
|
||||||
|
"datetime": {
|
||||||
|
"key1": "1979-05-27T07:32:00Z",
|
||||||
|
"key2": "1979-05-27T00:32:00-07:00",
|
||||||
|
"key3": "1979-05-27T00:32:00.999999-07:00"
|
||||||
|
},
|
||||||
|
"float": {
|
||||||
|
"both": {
|
||||||
|
"key": 6.626e-34
|
||||||
|
},
|
||||||
|
"exponent": {
|
||||||
|
"key1": 5e+22,
|
||||||
|
"key2": 1000000,
|
||||||
|
"key3": -0.02
|
||||||
|
},
|
||||||
|
"fractional": {
|
||||||
|
"key1": 1,
|
||||||
|
"key2": 3.1415,
|
||||||
|
"key3": -0.01
|
||||||
|
},
|
||||||
|
"underscores": {
|
||||||
|
"key1": 9224617.445991227,
|
||||||
|
"key2": 1e+100
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fruit": [{
|
||||||
|
"name": "apple",
|
||||||
|
"physical": {
|
||||||
|
"color": "red",
|
||||||
|
"shape": "round"
|
||||||
|
},
|
||||||
|
"variety": [{
|
||||||
|
"name": "red delicious"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "granny smith"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "banana",
|
||||||
|
"variety": [{
|
||||||
|
"name": "plantain"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"integer": {
|
||||||
|
"key1": 99,
|
||||||
|
"key2": 42,
|
||||||
|
"key3": 0,
|
||||||
|
"key4": -17,
|
||||||
|
"underscores": {
|
||||||
|
"key1": 1000,
|
||||||
|
"key2": 5349221,
|
||||||
|
"key3": 12345
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"products": [{
|
||||||
|
"name": "Hammer",
|
||||||
|
"sku": 738594937
|
||||||
|
},
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
"color": "gray",
|
||||||
|
"name": "Nail",
|
||||||
|
"sku": 284758393
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"string": {
|
||||||
|
"basic": {
|
||||||
|
"basic": "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
|
||||||
|
},
|
||||||
|
"literal": {
|
||||||
|
"multiline": {
|
||||||
|
"lines": "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n",
|
||||||
|
"regex2": "I [dw]on't need \\d{2} apples"
|
||||||
|
},
|
||||||
|
"quoted": "Tom \"Dubs\" Preston-Werner",
|
||||||
|
"regex": "\u003c\\i\\c*\\s*\u003e",
|
||||||
|
"winpath": "C:\\Users\\nodejs\\templates",
|
||||||
|
"winpath2": "\\\\ServerX\\admin$\\system32\\"
|
||||||
|
},
|
||||||
|
"multiline": {
|
||||||
|
"continued": {
|
||||||
|
"key1": "The quick brown fox jumps over the lazy dog.",
|
||||||
|
"key2": "The quick brown fox jumps over the lazy dog.",
|
||||||
|
"key3": "The quick brown fox jumps over the lazy dog."
|
||||||
|
},
|
||||||
|
"key1": "One\nTwo",
|
||||||
|
"key2": "One\nTwo",
|
||||||
|
"key3": "One\nTwo"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"table": {
|
||||||
|
"inline": {
|
||||||
|
"name": {
|
||||||
|
"first": "Tom",
|
||||||
|
"last": "Preston-Werner"
|
||||||
|
},
|
||||||
|
"point": {
|
||||||
|
"x": 1,
|
||||||
|
"y": 2
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"key": "value",
|
||||||
|
"subtable": {
|
||||||
|
"key": "another value"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"x": {
|
||||||
|
"y": {
|
||||||
|
"z": {
|
||||||
|
"w": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
reference_ref=${1:-master}
|
||||||
|
reference_git=${2:-.}
|
||||||
|
|
||||||
|
if ! `hash benchstat 2>/dev/null`; then
|
||||||
|
echo "Installing benchstat"
|
||||||
|
go get golang.org/x/perf/cmd/benchstat
|
||||||
|
go install golang.org/x/perf/cmd/benchstat
|
||||||
|
fi
|
||||||
|
|
||||||
|
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
|
||||||
|
ref_tempdir="${tempdir}/ref"
|
||||||
|
ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt"
|
||||||
|
local_benchmark="`pwd`/benchmark-local.txt"
|
||||||
|
|
||||||
|
echo "=== ${reference_ref} (${ref_tempdir})"
|
||||||
|
git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null
|
||||||
|
pushd ${ref_tempdir} >/dev/null
|
||||||
|
git checkout ${reference_ref} >/dev/null 2>/dev/null
|
||||||
|
go test -bench=. -benchmem | tee ${ref_benchmark}
|
||||||
|
popd >/dev/null
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== local"
|
||||||
|
go test -bench=. -benchmem | tee ${local_benchmark}
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== diff"
|
||||||
|
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}
|
|
@ -0,0 +1,244 @@
|
||||||
|
################################################################################
|
||||||
|
## Comment
|
||||||
|
|
||||||
|
# Speak your mind with the hash symbol. They go from the symbol to the end of
|
||||||
|
# the line.
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Table
|
||||||
|
|
||||||
|
# Tables (also known as hash tables or dictionaries) are collections of
|
||||||
|
# key/value pairs. They appear in square brackets on a line by themselves.
|
||||||
|
|
||||||
|
[table]
|
||||||
|
|
||||||
|
key = "value" # Yeah, you can do this.
|
||||||
|
|
||||||
|
# Nested tables are denoted by table names with dots in them. Name your tables
|
||||||
|
# whatever crap you please, just don't use #, ., [ or ].
|
||||||
|
|
||||||
|
[table.subtable]
|
||||||
|
|
||||||
|
key = "another value"
|
||||||
|
|
||||||
|
# You don't need to specify all the super-tables if you don't want to. TOML
|
||||||
|
# knows how to do it for you.
|
||||||
|
|
||||||
|
# [x] you
|
||||||
|
# [x.y] don't
|
||||||
|
# [x.y.z] need these
|
||||||
|
[x.y.z.w] # for this to work
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Inline Table
|
||||||
|
|
||||||
|
# Inline tables provide a more compact syntax for expressing tables. They are
|
||||||
|
# especially useful for grouped data that can otherwise quickly become verbose.
|
||||||
|
# Inline tables are enclosed in curly braces `{` and `}`. No newlines are
|
||||||
|
# allowed between the curly braces unless they are valid within a value.
|
||||||
|
|
||||||
|
[table.inline]
|
||||||
|
|
||||||
|
name = { first = "Tom", last = "Preston-Werner" }
|
||||||
|
point = { x = 1, y = 2 }
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## String
|
||||||
|
|
||||||
|
# There are four ways to express strings: basic, multi-line basic, literal, and
|
||||||
|
# multi-line literal. All strings must contain only valid UTF-8 characters.
|
||||||
|
|
||||||
|
[string.basic]
|
||||||
|
|
||||||
|
basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
|
||||||
|
|
||||||
|
[string.multiline]
|
||||||
|
|
||||||
|
# The following strings are byte-for-byte equivalent:
|
||||||
|
key1 = "One\nTwo"
|
||||||
|
key2 = """One\nTwo"""
|
||||||
|
key3 = """
|
||||||
|
One
|
||||||
|
Two"""
|
||||||
|
|
||||||
|
[string.multiline.continued]
|
||||||
|
|
||||||
|
# The following strings are byte-for-byte equivalent:
|
||||||
|
key1 = "The quick brown fox jumps over the lazy dog."
|
||||||
|
|
||||||
|
key2 = """
|
||||||
|
The quick brown \
|
||||||
|
|
||||||
|
|
||||||
|
fox jumps over \
|
||||||
|
the lazy dog."""
|
||||||
|
|
||||||
|
key3 = """\
|
||||||
|
The quick brown \
|
||||||
|
fox jumps over \
|
||||||
|
the lazy dog.\
|
||||||
|
"""
|
||||||
|
|
||||||
|
[string.literal]
|
||||||
|
|
||||||
|
# What you see is what you get.
|
||||||
|
winpath = 'C:\Users\nodejs\templates'
|
||||||
|
winpath2 = '\\ServerX\admin$\system32\'
|
||||||
|
quoted = 'Tom "Dubs" Preston-Werner'
|
||||||
|
regex = '<\i\c*\s*>'
|
||||||
|
|
||||||
|
|
||||||
|
[string.literal.multiline]
|
||||||
|
|
||||||
|
regex2 = '''I [dw]on't need \d{2} apples'''
|
||||||
|
lines = '''
|
||||||
|
The first newline is
|
||||||
|
trimmed in raw strings.
|
||||||
|
All other whitespace
|
||||||
|
is preserved.
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Integer
|
||||||
|
|
||||||
|
# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
|
||||||
|
# Negative numbers are prefixed with a minus sign.
|
||||||
|
|
||||||
|
[integer]
|
||||||
|
|
||||||
|
key1 = +99
|
||||||
|
key2 = 42
|
||||||
|
key3 = 0
|
||||||
|
key4 = -17
|
||||||
|
|
||||||
|
[integer.underscores]
|
||||||
|
|
||||||
|
# For large numbers, you may use underscores to enhance readability. Each
|
||||||
|
# underscore must be surrounded by at least one digit.
|
||||||
|
key1 = 1_000
|
||||||
|
key2 = 5_349_221
|
||||||
|
key3 = 1_2_3_4_5 # valid but inadvisable
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Float
|
||||||
|
|
||||||
|
# A float consists of an integer part (which may be prefixed with a plus or
|
||||||
|
# minus sign) followed by a fractional part and/or an exponent part.
|
||||||
|
|
||||||
|
[float.fractional]
|
||||||
|
|
||||||
|
key1 = +1.0
|
||||||
|
key2 = 3.1415
|
||||||
|
key3 = -0.01
|
||||||
|
|
||||||
|
[float.exponent]
|
||||||
|
|
||||||
|
key1 = 5e+22
|
||||||
|
key2 = 1e6
|
||||||
|
key3 = -2E-2
|
||||||
|
|
||||||
|
[float.both]
|
||||||
|
|
||||||
|
key = 6.626e-34
|
||||||
|
|
||||||
|
[float.underscores]
|
||||||
|
|
||||||
|
key1 = 9_224_617.445_991_228_313
|
||||||
|
key2 = 1e1_00
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Boolean
|
||||||
|
|
||||||
|
# Booleans are just the tokens you're used to. Always lowercase.
|
||||||
|
|
||||||
|
[boolean]
|
||||||
|
|
||||||
|
True = true
|
||||||
|
False = false
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Datetime
|
||||||
|
|
||||||
|
# Datetimes are RFC 3339 dates.
|
||||||
|
|
||||||
|
[datetime]
|
||||||
|
|
||||||
|
key1 = 1979-05-27T07:32:00Z
|
||||||
|
key2 = 1979-05-27T00:32:00-07:00
|
||||||
|
key3 = 1979-05-27T00:32:00.999999-07:00
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Array
|
||||||
|
|
||||||
|
# Arrays are square brackets with other primitives inside. Whitespace is
|
||||||
|
# ignored. Elements are separated by commas. Data types may not be mixed.
|
||||||
|
|
||||||
|
[array]
|
||||||
|
|
||||||
|
key1 = [ 1, 2, 3 ]
|
||||||
|
key2 = [ "red", "yellow", "green" ]
|
||||||
|
key3 = [ [ 1, 2 ], [3, 4, 5] ]
|
||||||
|
#key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
|
||||||
|
|
||||||
|
# Arrays can also be multiline. So in addition to ignoring whitespace, arrays
|
||||||
|
# also ignore newlines between the brackets. Terminating commas are ok before
|
||||||
|
# the closing bracket.
|
||||||
|
|
||||||
|
key5 = [
|
||||||
|
1, 2, 3
|
||||||
|
]
|
||||||
|
key6 = [
|
||||||
|
1,
|
||||||
|
2, # this is ok
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
################################################################################
|
||||||
|
## Array of Tables
|
||||||
|
|
||||||
|
# These can be expressed by using a table name in double brackets. Each table
|
||||||
|
# with the same double bracketed name will be an element in the array. The
|
||||||
|
# tables are inserted in the order encountered.
|
||||||
|
|
||||||
|
[[products]]
|
||||||
|
|
||||||
|
name = "Hammer"
|
||||||
|
sku = 738594937
|
||||||
|
|
||||||
|
[[products]]
|
||||||
|
|
||||||
|
[[products]]
|
||||||
|
|
||||||
|
name = "Nail"
|
||||||
|
sku = 284758393
|
||||||
|
color = "gray"
|
||||||
|
|
||||||
|
|
||||||
|
# You can create nested arrays of tables as well.
|
||||||
|
|
||||||
|
[[fruit]]
|
||||||
|
name = "apple"
|
||||||
|
|
||||||
|
[fruit.physical]
|
||||||
|
color = "red"
|
||||||
|
shape = "round"
|
||||||
|
|
||||||
|
[[fruit.variety]]
|
||||||
|
name = "red delicious"
|
||||||
|
|
||||||
|
[[fruit.variety]]
|
||||||
|
name = "granny smith"
|
||||||
|
|
||||||
|
[[fruit]]
|
||||||
|
name = "banana"
|
||||||
|
|
||||||
|
[[fruit.variety]]
|
||||||
|
name = "plantain"
|
|
@ -0,0 +1,121 @@
|
||||||
|
---
|
||||||
|
array:
|
||||||
|
key1:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
key2:
|
||||||
|
- red
|
||||||
|
- yellow
|
||||||
|
- green
|
||||||
|
key3:
|
||||||
|
- - 1
|
||||||
|
- 2
|
||||||
|
- - 3
|
||||||
|
- 4
|
||||||
|
- 5
|
||||||
|
key4:
|
||||||
|
- - 1
|
||||||
|
- 2
|
||||||
|
- - a
|
||||||
|
- b
|
||||||
|
- c
|
||||||
|
key5:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
key6:
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
boolean:
|
||||||
|
'False': false
|
||||||
|
'True': true
|
||||||
|
datetime:
|
||||||
|
key1: '1979-05-27T07:32:00Z'
|
||||||
|
key2: '1979-05-27T00:32:00-07:00'
|
||||||
|
key3: '1979-05-27T00:32:00.999999-07:00'
|
||||||
|
float:
|
||||||
|
both:
|
||||||
|
key: 6.626e-34
|
||||||
|
exponent:
|
||||||
|
key1: 5.0e+22
|
||||||
|
key2: 1000000
|
||||||
|
key3: -0.02
|
||||||
|
fractional:
|
||||||
|
key1: 1
|
||||||
|
key2: 3.1415
|
||||||
|
key3: -0.01
|
||||||
|
underscores:
|
||||||
|
key1: 9224617.445991227
|
||||||
|
key2: 1.0e+100
|
||||||
|
fruit:
|
||||||
|
- name: apple
|
||||||
|
physical:
|
||||||
|
color: red
|
||||||
|
shape: round
|
||||||
|
variety:
|
||||||
|
- name: red delicious
|
||||||
|
- name: granny smith
|
||||||
|
- name: banana
|
||||||
|
variety:
|
||||||
|
- name: plantain
|
||||||
|
integer:
|
||||||
|
key1: 99
|
||||||
|
key2: 42
|
||||||
|
key3: 0
|
||||||
|
key4: -17
|
||||||
|
underscores:
|
||||||
|
key1: 1000
|
||||||
|
key2: 5349221
|
||||||
|
key3: 12345
|
||||||
|
products:
|
||||||
|
- name: Hammer
|
||||||
|
sku: 738594937
|
||||||
|
- {}
|
||||||
|
- color: gray
|
||||||
|
name: Nail
|
||||||
|
sku: 284758393
|
||||||
|
string:
|
||||||
|
basic:
|
||||||
|
basic: "I'm a string. \"You can quote me\". Name\tJosé\nLocation\tSF."
|
||||||
|
literal:
|
||||||
|
multiline:
|
||||||
|
lines: |
|
||||||
|
The first newline is
|
||||||
|
trimmed in raw strings.
|
||||||
|
All other whitespace
|
||||||
|
is preserved.
|
||||||
|
regex2: I [dw]on't need \d{2} apples
|
||||||
|
quoted: Tom "Dubs" Preston-Werner
|
||||||
|
regex: "<\\i\\c*\\s*>"
|
||||||
|
winpath: C:\Users\nodejs\templates
|
||||||
|
winpath2: "\\\\ServerX\\admin$\\system32\\"
|
||||||
|
multiline:
|
||||||
|
continued:
|
||||||
|
key1: The quick brown fox jumps over the lazy dog.
|
||||||
|
key2: The quick brown fox jumps over the lazy dog.
|
||||||
|
key3: The quick brown fox jumps over the lazy dog.
|
||||||
|
key1: |-
|
||||||
|
One
|
||||||
|
Two
|
||||||
|
key2: |-
|
||||||
|
One
|
||||||
|
Two
|
||||||
|
key3: |-
|
||||||
|
One
|
||||||
|
Two
|
||||||
|
table:
|
||||||
|
inline:
|
||||||
|
name:
|
||||||
|
first: Tom
|
||||||
|
last: Preston-Werner
|
||||||
|
point:
|
||||||
|
x: 1
|
||||||
|
y: 2
|
||||||
|
key: value
|
||||||
|
subtable:
|
||||||
|
key: another value
|
||||||
|
x:
|
||||||
|
y:
|
||||||
|
z:
|
||||||
|
w: {}
|
|
@ -0,0 +1,29 @@
|
||||||
|
# This is a TOML document. Boom.
|
||||||
|
|
||||||
|
title = "TOML Example"
|
||||||
|
|
||||||
|
[owner]
|
||||||
|
name = "Tom Preston-Werner"
|
||||||
|
organization = "GitHub"
|
||||||
|
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||||
|
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||||
|
|
||||||
|
[database]
|
||||||
|
server = "192.168.1.1"
|
||||||
|
ports = [ 8001, 8001, 8002 ]
|
||||||
|
connection_max = 5000
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
[servers]
|
||||||
|
|
||||||
|
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||||
|
[servers.alpha]
|
||||||
|
ip = "10.0.0.1"
|
||||||
|
dc = "eqdc10"
|
||||||
|
|
||||||
|
[servers.beta]
|
||||||
|
ip = "10.0.0.2"
|
||||||
|
dc = "eqdc10"
|
||||||
|
|
||||||
|
[clients]
|
||||||
|
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
|
@ -0,0 +1,29 @@
|
||||||
|
# This is a TOML document. Boom.
|
||||||
|
|
||||||
|
title = "TOML Example"
|
||||||
|
|
||||||
|
[owner]
|
||||||
|
name = "Tom Preston-Werner"
|
||||||
|
organization = "GitHub"
|
||||||
|
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||||
|
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||||
|
|
||||||
|
[database]
|
||||||
|
server = "192.168.1.1"
|
||||||
|
ports = [ 8001, 8001, 8002 ]
|
||||||
|
connection_max = 5000
|
||||||
|
enabled = true
|
||||||
|
|
||||||
|
[servers]
|
||||||
|
|
||||||
|
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||||
|
[servers.alpha]
|
||||||
|
ip = "10.0.0.1"
|
||||||
|
dc = "eqdc10"
|
||||||
|
|
||||||
|
[servers.beta]
|
||||||
|
ip = "10.0.0.2"
|
||||||
|
dc = "eqdc10"
|
||||||
|
|
||||||
|
[clients]
|
||||||
|
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
|
@ -0,0 +1,38 @@
|
||||||
|
title = "TOML Marshal Testing"
|
||||||
|
|
||||||
|
[basic]
|
||||||
|
bool = true
|
||||||
|
date = 1979-05-27T07:32:00Z
|
||||||
|
float = 123.4
|
||||||
|
int = 5000
|
||||||
|
string = "Bite me"
|
||||||
|
uint = 5001
|
||||||
|
|
||||||
|
[basic_lists]
|
||||||
|
bools = [true,false,true]
|
||||||
|
dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z]
|
||||||
|
floats = [12.3,45.6,78.9]
|
||||||
|
ints = [8001,8001,8002]
|
||||||
|
strings = ["One","Two","Three"]
|
||||||
|
uints = [5002,5003]
|
||||||
|
|
||||||
|
[basic_map]
|
||||||
|
one = "one"
|
||||||
|
two = "two"
|
||||||
|
|
||||||
|
[subdoc]
|
||||||
|
|
||||||
|
[subdoc.first]
|
||||||
|
name = "First"
|
||||||
|
|
||||||
|
[subdoc.second]
|
||||||
|
name = "Second"
|
||||||
|
|
||||||
|
[[subdoclist]]
|
||||||
|
name = "List.First"
|
||||||
|
|
||||||
|
[[subdoclist]]
|
||||||
|
name = "List.Second"
|
||||||
|
|
||||||
|
[[subdocptrs]]
|
||||||
|
name = "Second"
|
|
@ -0,0 +1,90 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# fail out of the script if anything here fails
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# set the path to the present working directory
|
||||||
|
export GOPATH=`pwd`
|
||||||
|
|
||||||
|
function git_clone() {
|
||||||
|
path=$1
|
||||||
|
branch=$2
|
||||||
|
version=$3
|
||||||
|
if [ ! -d "src/$path" ]; then
|
||||||
|
mkdir -p src/$path
|
||||||
|
git clone https://$path.git src/$path
|
||||||
|
fi
|
||||||
|
pushd src/$path
|
||||||
|
git checkout "$branch"
|
||||||
|
git reset --hard "$version"
|
||||||
|
popd
|
||||||
|
}
|
||||||
|
|
||||||
|
# Remove potential previous runs
|
||||||
|
rm -rf src test_program_bin toml-test
|
||||||
|
|
||||||
|
# Run go vet
|
||||||
|
go vet ./...
|
||||||
|
|
||||||
|
go get github.com/pelletier/go-buffruneio
|
||||||
|
go get github.com/davecgh/go-spew/spew
|
||||||
|
go get gopkg.in/yaml.v2
|
||||||
|
go get github.com/BurntSushi/toml
|
||||||
|
|
||||||
|
# get code for BurntSushi TOML validation
|
||||||
|
# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize)
|
||||||
|
git_clone github.com/BurntSushi/toml master HEAD
|
||||||
|
git_clone github.com/BurntSushi/toml-test master HEAD #was: 0.2.0 HEAD
|
||||||
|
|
||||||
|
# build the BurntSushi test application
|
||||||
|
go build -o toml-test github.com/BurntSushi/toml-test
|
||||||
|
|
||||||
|
# vendorize the current lib for testing
|
||||||
|
# NOTE: this basically mocks an install without having to go back out to github for code
|
||||||
|
mkdir -p src/github.com/pelletier/go-toml/cmd
|
||||||
|
mkdir -p src/github.com/pelletier/go-toml/query
|
||||||
|
cp *.go *.toml src/github.com/pelletier/go-toml
|
||||||
|
cp -R cmd/* src/github.com/pelletier/go-toml/cmd
|
||||||
|
cp -R query/* src/github.com/pelletier/go-toml/query
|
||||||
|
go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
|
||||||
|
|
||||||
|
# Run basic unit tests
|
||||||
|
go test github.com/pelletier/go-toml -covermode=count -coverprofile=coverage.out
|
||||||
|
go test github.com/pelletier/go-toml/cmd/tomljson
|
||||||
|
go test github.com/pelletier/go-toml/query
|
||||||
|
|
||||||
|
# run the entire BurntSushi test suite
|
||||||
|
if [[ $# -eq 0 ]] ; then
|
||||||
|
echo "Running all BurntSushi tests"
|
||||||
|
./toml-test ./test_program_bin | tee test_out
|
||||||
|
else
|
||||||
|
# run a specific test
|
||||||
|
test=$1
|
||||||
|
test_path='src/github.com/BurntSushi/toml-test/tests'
|
||||||
|
valid_test="$test_path/valid/$test"
|
||||||
|
invalid_test="$test_path/invalid/$test"
|
||||||
|
|
||||||
|
if [ -e "$valid_test.toml" ]; then
|
||||||
|
echo "Valid Test TOML for $test:"
|
||||||
|
echo "===="
|
||||||
|
cat "$valid_test.toml"
|
||||||
|
|
||||||
|
echo "Valid Test JSON for $test:"
|
||||||
|
echo "===="
|
||||||
|
cat "$valid_test.json"
|
||||||
|
|
||||||
|
echo "Go-TOML Output for $test:"
|
||||||
|
echo "===="
|
||||||
|
cat "$valid_test.toml" | ./test_program_bin
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -e "$invalid_test.toml" ]; then
|
||||||
|
echo "Invalid Test TOML for $test:"
|
||||||
|
echo "===="
|
||||||
|
cat "$invalid_test.toml"
|
||||||
|
|
||||||
|
echo "Go-TOML Output for $test:"
|
||||||
|
echo "===="
|
||||||
|
echo "go-toml Output:"
|
||||||
|
cat "$invalid_test.toml" | ./test_program_bin
|
||||||
|
fi
|
||||||
|
fi
|
|
@ -0,0 +1,21 @@
|
||||||
|
Copyright © 2012 Peter Harris
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a
|
||||||
|
copy of this software and associated documentation files (the "Software"),
|
||||||
|
to deal in the Software without restriction, including without limitation
|
||||||
|
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||||
|
and/or sell copies of the Software, and to permit persons to whom the
|
||||||
|
Software is furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice (including the next
|
||||||
|
paragraph) shall be included in all copies or substantial portions of the
|
||||||
|
Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
||||||
|
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||||
|
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||||
|
DEALINGS IN THE SOFTWARE.
|
||||||
|
|
|
@ -0,0 +1,100 @@
|
||||||
|
Liner
|
||||||
|
=====
|
||||||
|
|
||||||
|
Liner is a command line editor with history. It was inspired by linenoise;
|
||||||
|
everything Unix-like is a VT100 (or is trying very hard to be). If your
|
||||||
|
terminal is not pretending to be a VT100, change it. Liner also support
|
||||||
|
Windows.
|
||||||
|
|
||||||
|
Liner is released under the X11 license (which is similar to the new BSD
|
||||||
|
license).
|
||||||
|
|
||||||
|
Line Editing
|
||||||
|
------------
|
||||||
|
|
||||||
|
The following line editing commands are supported on platforms and terminals
|
||||||
|
that Liner supports:
|
||||||
|
|
||||||
|
Keystroke | Action
|
||||||
|
--------- | ------
|
||||||
|
Ctrl-A, Home | Move cursor to beginning of line
|
||||||
|
Ctrl-E, End | Move cursor to end of line
|
||||||
|
Ctrl-B, Left | Move cursor one character left
|
||||||
|
Ctrl-F, Right| Move cursor one character right
|
||||||
|
Ctrl-Left, Alt-B | Move cursor to previous word
|
||||||
|
Ctrl-Right, Alt-F | Move cursor to next word
|
||||||
|
Ctrl-D, Del | (if line is *not* empty) Delete character under cursor
|
||||||
|
Ctrl-D | (if line *is* empty) End of File - usually quits application
|
||||||
|
Ctrl-C | Reset input (create new empty prompt)
|
||||||
|
Ctrl-L | Clear screen (line is unmodified)
|
||||||
|
Ctrl-T | Transpose previous character with current character
|
||||||
|
Ctrl-H, BackSpace | Delete character before cursor
|
||||||
|
Ctrl-W | Delete word leading up to cursor
|
||||||
|
Ctrl-K | Delete from cursor to end of line
|
||||||
|
Ctrl-U | Delete from start of line to cursor
|
||||||
|
Ctrl-P, Up | Previous match from history
|
||||||
|
Ctrl-N, Down | Next match from history
|
||||||
|
Ctrl-R | Reverse Search history (Ctrl-S forward, Ctrl-G cancel)
|
||||||
|
Ctrl-Y | Paste from Yank buffer (Alt-Y to paste next yank instead)
|
||||||
|
Tab | Next completion
|
||||||
|
Shift-Tab | (after Tab) Previous completion
|
||||||
|
|
||||||
|
Getting started
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/peterh/liner"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
history_fn = filepath.Join(os.TempDir(), ".liner_example_history")
|
||||||
|
names = []string{"john", "james", "mary", "nancy"}
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
line := liner.NewLiner()
|
||||||
|
defer line.Close()
|
||||||
|
|
||||||
|
line.SetCtrlCAborts(true)
|
||||||
|
|
||||||
|
line.SetCompleter(func(line string) (c []string) {
|
||||||
|
for _, n := range names {
|
||||||
|
if strings.HasPrefix(n, strings.ToLower(line)) {
|
||||||
|
c = append(c, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
})
|
||||||
|
|
||||||
|
if f, err := os.Open(history_fn); err == nil {
|
||||||
|
line.ReadHistory(f)
|
||||||
|
f.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
if name, err := line.Prompt("What is your name? "); err == nil {
|
||||||
|
log.Print("Got: ", name)
|
||||||
|
line.AppendHistory(name)
|
||||||
|
} else if err == liner.ErrPromptAborted {
|
||||||
|
log.Print("Aborted")
|
||||||
|
} else {
|
||||||
|
log.Print("Error reading line: ", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f, err := os.Create(history_fn); err != nil {
|
||||||
|
log.Print("Error writing history file: ", err)
|
||||||
|
} else {
|
||||||
|
line.WriteHistory(f)
|
||||||
|
f.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For documentation, see http://godoc.org/github.com/peterh/liner
|
|
@ -0,0 +1,13 @@
|
||||||
|
// Copyright 2012 Gary Burd
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||||
|
// not use this file except in compliance with the License. You may obtain
|
||||||
|
// a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
// License for the specific language governing permissions and limitations
|
||||||
|
// under the License.
|
|
@ -0,0 +1,3 @@
|
||||||
|
# rdb
|
||||||
|
|
||||||
|
Handling Redis RDB format.
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2014 Wandoujia Inc.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
|
@ -0,0 +1,201 @@
|
||||||
|
# Codec
|
||||||
|
|
||||||
|
High Performance, Feature-Rich Idiomatic Go codec/encoding library for
|
||||||
|
binc, msgpack, cbor, json.
|
||||||
|
|
||||||
|
Supported Serialization formats are:
|
||||||
|
|
||||||
|
- msgpack: https://github.com/msgpack/msgpack
|
||||||
|
- binc: http://github.com/ugorji/binc
|
||||||
|
- cbor: http://cbor.io http://tools.ietf.org/html/rfc7049
|
||||||
|
- json: http://json.org http://tools.ietf.org/html/rfc7159
|
||||||
|
- simple:
|
||||||
|
|
||||||
|
To install:
|
||||||
|
|
||||||
|
go get github.com/ugorji/go/codec
|
||||||
|
|
||||||
|
This package will carefully use 'unsafe' for performance reasons in specific places.
|
||||||
|
You can build without unsafe use by passing the safe or appengine tag
|
||||||
|
i.e. 'go install -tags=safe ...'. Note that unsafe is only supported for the last 3
|
||||||
|
go sdk versions e.g. current go release is go 1.9, so we support unsafe use only from
|
||||||
|
go 1.7+ . This is because supporting unsafe requires knowledge of implementation details.
|
||||||
|
|
||||||
|
Online documentation: http://godoc.org/github.com/ugorji/go/codec
|
||||||
|
Detailed Usage/How-to Primer: http://ugorji.net/blog/go-codec-primer
|
||||||
|
|
||||||
|
The idiomatic Go support is as seen in other encoding packages in
|
||||||
|
the standard library (ie json, xml, gob, etc).
|
||||||
|
|
||||||
|
Rich Feature Set includes:
|
||||||
|
|
||||||
|
- Simple but extremely powerful and feature-rich API
|
||||||
|
- Support for go1.4 and above, while selectively using newer APIs for later releases
|
||||||
|
- Excellent code coverage ( > 90% )
|
||||||
|
- Very High Performance.
|
||||||
|
Our extensive benchmarks show us outperforming Gob, Json, Bson, etc by 2-4X.
|
||||||
|
- Careful selected use of 'unsafe' for targeted performance gains.
|
||||||
|
100% mode exists where 'unsafe' is not used at all.
|
||||||
|
- Lock-free (sans mutex) concurrency for scaling to 100's of cores
|
||||||
|
- Multiple conversions:
|
||||||
|
Package coerces types where appropriate
|
||||||
|
e.g. decode an int in the stream into a float, etc.
|
||||||
|
- Corner Cases:
|
||||||
|
Overflows, nil maps/slices, nil values in streams are handled correctly
|
||||||
|
- Standard field renaming via tags
|
||||||
|
- Support for omitting empty fields during an encoding
|
||||||
|
- Encoding from any value and decoding into pointer to any value
|
||||||
|
(struct, slice, map, primitives, pointers, interface{}, etc)
|
||||||
|
- Extensions to support efficient encoding/decoding of any named types
|
||||||
|
- Support encoding.(Binary|Text)(M|Unm)arshaler interfaces
|
||||||
|
- Decoding without a schema (into a interface{}).
|
||||||
|
Includes Options to configure what specific map or slice type to use
|
||||||
|
when decoding an encoded list or map into a nil interface{}
|
||||||
|
- Encode a struct as an array, and decode struct from an array in the data stream
|
||||||
|
- Comprehensive support for anonymous fields
|
||||||
|
- Fast (no-reflection) encoding/decoding of common maps and slices
|
||||||
|
- Code-generation for faster performance.
|
||||||
|
- Support binary (e.g. messagepack, cbor) and text (e.g. json) formats
|
||||||
|
- Support indefinite-length formats to enable true streaming
|
||||||
|
(for formats which support it e.g. json, cbor)
|
||||||
|
- Support canonical encoding, where a value is ALWAYS encoded as same sequence of bytes.
|
||||||
|
This mostly applies to maps, where iteration order is non-deterministic.
|
||||||
|
- NIL in data stream decoded as zero value
|
||||||
|
- Never silently skip data when decoding.
|
||||||
|
User decides whether to return an error or silently skip data when keys or indexes
|
||||||
|
in the data stream do not map to fields in the struct.
|
||||||
|
- Encode/Decode from/to chan types (for iterative streaming support)
|
||||||
|
- Drop-in replacement for encoding/json. `json:` key in struct tag supported.
|
||||||
|
- Provides a RPC Server and Client Codec for net/rpc communication protocol.
|
||||||
|
- Handle unique idiosyncrasies of codecs e.g.
|
||||||
|
- For messagepack, configure how ambiguities in handling raw bytes are resolved
|
||||||
|
- For messagepack, provide rpc server/client codec to support
|
||||||
|
msgpack-rpc protocol defined at:
|
||||||
|
https://github.com/msgpack-rpc/msgpack-rpc/blob/master/spec.md
|
||||||
|
|
||||||
|
## Extension Support
|
||||||
|
|
||||||
|
Users can register a function to handle the encoding or decoding of
|
||||||
|
their custom types.
|
||||||
|
|
||||||
|
There are no restrictions on what the custom type can be. Some examples:
|
||||||
|
|
||||||
|
type BisSet []int
|
||||||
|
type BitSet64 uint64
|
||||||
|
type UUID string
|
||||||
|
type MyStructWithUnexportedFields struct { a int; b bool; c []int; }
|
||||||
|
type GifImage struct { ... }
|
||||||
|
|
||||||
|
As an illustration, MyStructWithUnexportedFields would normally be
|
||||||
|
encoded as an empty map because it has no exported fields, while UUID
|
||||||
|
would be encoded as a string. However, with extension support, you can
|
||||||
|
encode any of these however you like.
|
||||||
|
|
||||||
|
## Custom Encoding and Decoding
|
||||||
|
|
||||||
|
This package maintains symmetry in the encoding and decoding halfs.
|
||||||
|
We determine how to encode or decode by walking this decision tree
|
||||||
|
|
||||||
|
- is type a codec.Selfer?
|
||||||
|
- is there an extension registered for the type?
|
||||||
|
- is format binary, and is type a encoding.BinaryMarshaler and BinaryUnmarshaler?
|
||||||
|
- is format specifically json, and is type a encoding/json.Marshaler and Unmarshaler?
|
||||||
|
- is format text-based, and type an encoding.TextMarshaler?
|
||||||
|
- else we use a pair of functions based on the "kind" of the type e.g. map, slice, int64, etc
|
||||||
|
|
||||||
|
This symmetry is important to reduce chances of issues happening because the
|
||||||
|
encoding and decoding sides are out of sync e.g. decoded via very specific
|
||||||
|
encoding.TextUnmarshaler but encoded via kind-specific generalized mode.
|
||||||
|
|
||||||
|
Consequently, if a type only defines one-half of the symmetry
|
||||||
|
(e.g. it implements UnmarshalJSON() but not MarshalJSON() ),
|
||||||
|
then that type doesn't satisfy the check and we will continue walking down the
|
||||||
|
decision tree.
|
||||||
|
|
||||||
|
## RPC
|
||||||
|
|
||||||
|
RPC Client and Server Codecs are implemented, so the codecs can be used
|
||||||
|
with the standard net/rpc package.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Typical usage model:
|
||||||
|
|
||||||
|
// create and configure Handle
|
||||||
|
var (
|
||||||
|
bh codec.BincHandle
|
||||||
|
mh codec.MsgpackHandle
|
||||||
|
ch codec.CborHandle
|
||||||
|
)
|
||||||
|
|
||||||
|
mh.MapType = reflect.TypeOf(map[string]interface{}(nil))
|
||||||
|
|
||||||
|
// configure extensions
|
||||||
|
// e.g. for msgpack, define functions and enable Time support for tag 1
|
||||||
|
// mh.SetExt(reflect.TypeOf(time.Time{}), 1, myExt)
|
||||||
|
|
||||||
|
// create and use decoder/encoder
|
||||||
|
var (
|
||||||
|
r io.Reader
|
||||||
|
w io.Writer
|
||||||
|
b []byte
|
||||||
|
h = &bh // or mh to use msgpack
|
||||||
|
)
|
||||||
|
|
||||||
|
dec = codec.NewDecoder(r, h)
|
||||||
|
dec = codec.NewDecoderBytes(b, h)
|
||||||
|
err = dec.Decode(&v)
|
||||||
|
|
||||||
|
enc = codec.NewEncoder(w, h)
|
||||||
|
enc = codec.NewEncoderBytes(&b, h)
|
||||||
|
err = enc.Encode(v)
|
||||||
|
|
||||||
|
//RPC Server
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
conn, err := listener.Accept()
|
||||||
|
rpcCodec := codec.GoRpc.ServerCodec(conn, h)
|
||||||
|
//OR rpcCodec := codec.MsgpackSpecRpc.ServerCodec(conn, h)
|
||||||
|
rpc.ServeCodec(rpcCodec)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
//RPC Communication (client side)
|
||||||
|
conn, err = net.Dial("tcp", "localhost:5555")
|
||||||
|
rpcCodec := codec.GoRpc.ClientCodec(conn, h)
|
||||||
|
//OR rpcCodec := codec.MsgpackSpecRpc.ClientCodec(conn, h)
|
||||||
|
client := rpc.NewClientWithCodec(rpcCodec)
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
To run tests, use the following:
|
||||||
|
|
||||||
|
go test
|
||||||
|
|
||||||
|
To run the full suite of tests, use the following:
|
||||||
|
|
||||||
|
go test -tags alltests -run Suite
|
||||||
|
|
||||||
|
You can run the tag 'safe' to run tests or build in safe mode. e.g.
|
||||||
|
|
||||||
|
go test -tags safe -run Json
|
||||||
|
go test -tags "alltests safe" -run Suite
|
||||||
|
|
||||||
|
## Running Benchmarks
|
||||||
|
|
||||||
|
Please see http://github.com/ugorji/go-codec-bench .
|
||||||
|
|
||||||
|
## Caveats
|
||||||
|
|
||||||
|
Struct fields matching the following are ignored during encoding and decoding
|
||||||
|
|
||||||
|
- struct tag value set to -
|
||||||
|
- func, complex numbers, unsafe pointers
|
||||||
|
- unexported and not embedded
|
||||||
|
- unexported embedded non-struct
|
||||||
|
- unexported embedded pointers (from go1.10)
|
||||||
|
|
||||||
|
Every other field in a struct will be encoded/decoded.
|
||||||
|
|
||||||
|
Embedded fields are encoded as if they exist in the top-level struct,
|
||||||
|
with some caveats. See Encode documentation.
|
|
@ -0,0 +1,490 @@
|
||||||
|
// +build !notfastpath
|
||||||
|
|
||||||
|
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||||
|
|
||||||
|
// Code generated from fast-path.go.tmpl - DO NOT EDIT.
|
||||||
|
|
||||||
|
package codec
|
||||||
|
|
||||||
|
// Fast path functions try to create a fast path encode or decode implementation
|
||||||
|
// for common maps and slices.
|
||||||
|
//
|
||||||
|
// We define the functions and register then in this single file
|
||||||
|
// so as not to pollute the encode.go and decode.go, and create a dependency in there.
|
||||||
|
// This file can be omitted without causing a build failure.
|
||||||
|
//
|
||||||
|
// The advantage of fast paths is:
|
||||||
|
// - Many calls bypass reflection altogether
|
||||||
|
//
|
||||||
|
// Currently support
|
||||||
|
// - slice of all builtin types,
|
||||||
|
// - map of all builtin types to string or interface value
|
||||||
|
// - symmetrical maps of all builtin types (e.g. str-str, uint8-uint8)
|
||||||
|
// This should provide adequate "typical" implementations.
|
||||||
|
//
|
||||||
|
// Note that fast track decode functions must handle values for which an address cannot be obtained.
|
||||||
|
// For example:
|
||||||
|
// m2 := map[string]int{}
|
||||||
|
// p2 := []interface{}{m2}
|
||||||
|
// // decoding into p2 will bomb if fast track functions do not treat like unaddressable.
|
||||||
|
//
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
const fastpathEnabled = true
|
||||||
|
|
||||||
|
type fastpathT struct {}
|
||||||
|
|
||||||
|
var fastpathTV fastpathT
|
||||||
|
|
||||||
|
type fastpathE struct {
|
||||||
|
rtid uintptr
|
||||||
|
rt reflect.Type
|
||||||
|
encfn func(*Encoder, *codecFnInfo, reflect.Value)
|
||||||
|
decfn func(*Decoder, *codecFnInfo, reflect.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
type fastpathA [{{ .FastpathLen }}]fastpathE
|
||||||
|
|
||||||
|
func (x *fastpathA) index(rtid uintptr) int {
|
||||||
|
// use binary search to grab the index (adapted from sort/search.go)
|
||||||
|
h, i, j := 0, 0, {{ .FastpathLen }} // len(x)
|
||||||
|
for i < j {
|
||||||
|
h = i + (j-i)/2
|
||||||
|
if x[h].rtid < rtid {
|
||||||
|
i = h + 1
|
||||||
|
} else {
|
||||||
|
j = h
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if i < {{ .FastpathLen }} && x[i].rtid == rtid {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
type fastpathAslice []fastpathE
|
||||||
|
|
||||||
|
func (x fastpathAslice) Len() int { return len(x) }
|
||||||
|
func (x fastpathAslice) Less(i, j int) bool { return x[i].rtid < x[j].rtid }
|
||||||
|
func (x fastpathAslice) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||||
|
|
||||||
|
var fastpathAV fastpathA
|
||||||
|
|
||||||
|
// due to possible initialization loop error, make fastpath in an init()
|
||||||
|
func init() {
|
||||||
|
i := 0
|
||||||
|
fn := func(v interface{},
|
||||||
|
fe func(*Encoder, *codecFnInfo, reflect.Value),
|
||||||
|
fd func(*Decoder, *codecFnInfo, reflect.Value)) (f fastpathE) {
|
||||||
|
xrt := reflect.TypeOf(v)
|
||||||
|
xptr := rt2id(xrt)
|
||||||
|
fastpathAV[i] = fastpathE{xptr, xrt, fe, fd}
|
||||||
|
i++
|
||||||
|
return
|
||||||
|
}
|
||||||
|
{{/* do not register []uint8 in fast-path */}}
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}{{if ne .Elem "uint8"}}
|
||||||
|
fn([]{{ .Elem }}(nil), (*Encoder).{{ .MethodNamePfx "fastpathEnc" false }}R, (*Decoder).{{ .MethodNamePfx "fastpathDec" false }}R){{end}}{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}
|
||||||
|
fn(map[{{ .MapKey }}]{{ .Elem }}(nil), (*Encoder).{{ .MethodNamePfx "fastpathEnc" false }}R, (*Decoder).{{ .MethodNamePfx "fastpathDec" false }}R){{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
sort.Sort(fastpathAslice(fastpathAV[:]))
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- encode
|
||||||
|
|
||||||
|
// -- -- fast path type switch
|
||||||
|
func fastpathEncodeTypeSwitch(iv interface{}, e *Encoder) bool {
|
||||||
|
switch v := iv.(type) {
|
||||||
|
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}{{if ne .Elem "uint8"}}
|
||||||
|
case []{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, e)
|
||||||
|
case *[]{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, e){{/*
|
||||||
|
*/}}{{end}}{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}
|
||||||
|
case map[{{ .MapKey }}]{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, e)
|
||||||
|
case *map[{{ .MapKey }}]{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, e){{/*
|
||||||
|
*/}}{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
default:
|
||||||
|
_ = v // TODO: workaround https://github.com/golang/go/issues/12927 (remove after go 1.6 release)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
{{/*
|
||||||
|
**** removing this block, as they are never called directly ****
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
**** removing this block, as they are never called directly ****
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
func fastpathEncodeTypeSwitchSlice(iv interface{}, e *Encoder) bool {
|
||||||
|
switch v := iv.(type) {
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}
|
||||||
|
case []{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, e)
|
||||||
|
case *[]{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, e)
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
default:
|
||||||
|
_ = v // TODO: workaround https://github.com/golang/go/issues/12927 (remove after go 1.6 release)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func fastpathEncodeTypeSwitchMap(iv interface{}, e *Encoder) bool {
|
||||||
|
switch v := iv.(type) {
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}
|
||||||
|
case map[{{ .MapKey }}]{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(v, e)
|
||||||
|
case *map[{{ .MapKey }}]{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(*v, e)
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
default:
|
||||||
|
_ = v // TODO: workaround https://github.com/golang/go/issues/12927 (remove after go 1.6 release)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
**** removing this block, as they are never called directly ****
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
**** removing this block, as they are never called directly ****
|
||||||
|
*/}}
|
||||||
|
|
||||||
|
// -- -- fast path functions
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}
|
||||||
|
func (e *Encoder) {{ .MethodNamePfx "fastpathEnc" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||||
|
if f.ti.mbs {
|
||||||
|
fastpathTV.{{ .MethodNamePfx "EncAsMap" false }}V(rv2i(rv).([]{{ .Elem }}), e)
|
||||||
|
} else {
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(rv2i(rv).([]{{ .Elem }}), e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func (_ fastpathT) {{ .MethodNamePfx "Enc" false }}V(v []{{ .Elem }}, e *Encoder) {
|
||||||
|
if v == nil { e.e.EncodeNil(); return }
|
||||||
|
ee, esep := e.e, e.hh.hasElemSeparators()
|
||||||
|
ee.WriteArrayStart(len(v))
|
||||||
|
for _, v2 := range v {
|
||||||
|
if esep { ee.WriteArrayElem() }
|
||||||
|
{{ encmd .Elem "v2"}}
|
||||||
|
}
|
||||||
|
ee.WriteArrayEnd()
|
||||||
|
}
|
||||||
|
func (_ fastpathT) {{ .MethodNamePfx "EncAsMap" false }}V(v []{{ .Elem }}, e *Encoder) {
|
||||||
|
ee, esep := e.e, e.hh.hasElemSeparators()
|
||||||
|
if len(v)%2 == 1 {
|
||||||
|
e.errorf("mapBySlice requires even slice length, but got %v", len(v))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
ee.WriteMapStart(len(v) / 2)
|
||||||
|
for j, v2 := range v {
|
||||||
|
if esep {
|
||||||
|
if j%2 == 0 {
|
||||||
|
ee.WriteMapElemKey()
|
||||||
|
} else {
|
||||||
|
ee.WriteMapElemValue()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
{{ encmd .Elem "v2"}}
|
||||||
|
}
|
||||||
|
ee.WriteMapEnd()
|
||||||
|
}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}
|
||||||
|
func (e *Encoder) {{ .MethodNamePfx "fastpathEnc" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Enc" false }}V(rv2i(rv).(map[{{ .MapKey }}]{{ .Elem }}), e)
|
||||||
|
}
|
||||||
|
func (_ fastpathT) {{ .MethodNamePfx "Enc" false }}V(v map[{{ .MapKey }}]{{ .Elem }}, e *Encoder) {
|
||||||
|
if v == nil { e.e.EncodeNil(); return }
|
||||||
|
ee, esep := e.e, e.hh.hasElemSeparators()
|
||||||
|
ee.WriteMapStart(len(v))
|
||||||
|
{{if eq .MapKey "string"}}asSymbols := e.h.AsSymbols&AsSymbolMapStringKeysFlag != 0
|
||||||
|
{{end}}if e.h.Canonical {
|
||||||
|
{{if eq .MapKey "interface{}"}}{{/* out of band
|
||||||
|
*/}}var mksv []byte = make([]byte, 0, len(v)*16) // temporary byte slice for the encoding
|
||||||
|
e2 := NewEncoderBytes(&mksv, e.hh)
|
||||||
|
v2 := make([]bytesI, len(v))
|
||||||
|
var i, l int
|
||||||
|
var vp *bytesI {{/* put loop variables outside. seems currently needed for better perf */}}
|
||||||
|
for k2, _ := range v {
|
||||||
|
l = len(mksv)
|
||||||
|
e2.MustEncode(k2)
|
||||||
|
vp = &v2[i]
|
||||||
|
vp.v = mksv[l:]
|
||||||
|
vp.i = k2
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
sort.Sort(bytesISlice(v2))
|
||||||
|
for j := range v2 {
|
||||||
|
if esep { ee.WriteMapElemKey() }
|
||||||
|
e.asis(v2[j].v)
|
||||||
|
if esep { ee.WriteMapElemValue() }
|
||||||
|
e.encode(v[v2[j].i])
|
||||||
|
} {{else}}{{ $x := sorttype .MapKey true}}v2 := make([]{{ $x }}, len(v))
|
||||||
|
var i int
|
||||||
|
for k, _ := range v {
|
||||||
|
v2[i] = {{ $x }}(k)
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
sort.Sort({{ sorttype .MapKey false}}(v2))
|
||||||
|
for _, k2 := range v2 {
|
||||||
|
if esep { ee.WriteMapElemKey() }
|
||||||
|
{{if eq .MapKey "string"}}if asSymbols {
|
||||||
|
ee.EncodeSymbol(k2)
|
||||||
|
} else {
|
||||||
|
ee.EncodeString(cUTF8, k2)
|
||||||
|
}{{else}}{{ $y := printf "%s(k2)" .MapKey }}{{ encmd .MapKey $y }}{{end}}
|
||||||
|
if esep { ee.WriteMapElemValue() }
|
||||||
|
{{ $y := printf "v[%s(k2)]" .MapKey }}{{ encmd .Elem $y }}
|
||||||
|
} {{end}}
|
||||||
|
} else {
|
||||||
|
for k2, v2 := range v {
|
||||||
|
if esep { ee.WriteMapElemKey() }
|
||||||
|
{{if eq .MapKey "string"}}if asSymbols {
|
||||||
|
ee.EncodeSymbol(k2)
|
||||||
|
} else {
|
||||||
|
ee.EncodeString(cUTF8, k2)
|
||||||
|
}{{else}}{{ encmd .MapKey "k2"}}{{end}}
|
||||||
|
if esep { ee.WriteMapElemValue() }
|
||||||
|
{{ encmd .Elem "v2"}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ee.WriteMapEnd()
|
||||||
|
}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
// -- decode
|
||||||
|
|
||||||
|
// -- -- fast path type switch
|
||||||
|
func fastpathDecodeTypeSwitch(iv interface{}, d *Decoder) bool {
|
||||||
|
var changed bool
|
||||||
|
switch v := iv.(type) {
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}{{if ne .Elem "uint8"}}
|
||||||
|
case []{{ .Elem }}:
|
||||||
|
var v2 []{{ .Elem }}
|
||||||
|
v2, changed = fastpathTV.{{ .MethodNamePfx "Dec" false }}V(v, false, d)
|
||||||
|
if changed && len(v) > 0 && len(v2) > 0 && !(len(v2) == len(v) && &v2[0] == &v[0]) {
|
||||||
|
copy(v, v2)
|
||||||
|
}
|
||||||
|
case *[]{{ .Elem }}:
|
||||||
|
var v2 []{{ .Elem }}
|
||||||
|
v2, changed = fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*v, true, d)
|
||||||
|
if changed {
|
||||||
|
*v = v2
|
||||||
|
}{{/*
|
||||||
|
*/}}{{end}}{{end}}{{end}}{{end}}
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}{{/*
|
||||||
|
// maps only change if nil, and in that case, there's no point copying
|
||||||
|
*/}}
|
||||||
|
case map[{{ .MapKey }}]{{ .Elem }}:
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Dec" false }}V(v, false, d)
|
||||||
|
case *map[{{ .MapKey }}]{{ .Elem }}:
|
||||||
|
var v2 map[{{ .MapKey }}]{{ .Elem }}
|
||||||
|
v2, changed = fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*v, true, d)
|
||||||
|
if changed {
|
||||||
|
*v = v2
|
||||||
|
}{{/*
|
||||||
|
*/}}{{end}}{{end}}{{end}}
|
||||||
|
default:
|
||||||
|
_ = v // TODO: workaround https://github.com/golang/go/issues/12927 (remove after go 1.6 release)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func fastpathDecodeSetZeroTypeSwitch(iv interface{}) bool {
|
||||||
|
switch v := iv.(type) {
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}
|
||||||
|
case *[]{{ .Elem }}:
|
||||||
|
*v = nil {{/*
|
||||||
|
*/}}{{end}}{{end}}{{end}}
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}
|
||||||
|
case *map[{{ .MapKey }}]{{ .Elem }}:
|
||||||
|
*v = nil {{/*
|
||||||
|
*/}}{{end}}{{end}}{{end}}
|
||||||
|
default:
|
||||||
|
_ = v // TODO: workaround https://github.com/golang/go/issues/12927 (remove after go 1.6 release)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// -- -- fast path functions
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if not .MapKey }}
|
||||||
|
{{/*
|
||||||
|
Slices can change if they
|
||||||
|
- did not come from an array
|
||||||
|
- are addressable (from a ptr)
|
||||||
|
- are settable (e.g. contained in an interface{})
|
||||||
|
*/}}
|
||||||
|
func (d *Decoder) {{ .MethodNamePfx "fastpathDec" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||||
|
if array := f.seq == seqTypeArray; !array && rv.Kind() == reflect.Ptr {
|
||||||
|
vp := rv2i(rv).(*[]{{ .Elem }})
|
||||||
|
v, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*vp, !array, d)
|
||||||
|
if changed { *vp = v }
|
||||||
|
} else {
|
||||||
|
v := rv2i(rv).([]{{ .Elem }})
|
||||||
|
v2, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(v, !array, d)
|
||||||
|
if changed && len(v) > 0 && len(v2) > 0 && !(len(v2) == len(v) && &v2[0] == &v[0]) {
|
||||||
|
copy(v, v2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func (f fastpathT) {{ .MethodNamePfx "Dec" false }}X(vp *[]{{ .Elem }}, d *Decoder) {
|
||||||
|
v, changed := f.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d)
|
||||||
|
if changed { *vp = v }
|
||||||
|
}
|
||||||
|
func (_ fastpathT) {{ .MethodNamePfx "Dec" false }}V(v []{{ .Elem }}, canChange bool, d *Decoder) (_ []{{ .Elem }}, changed bool) {
|
||||||
|
dd := d.d{{/*
|
||||||
|
// if dd.isContainerType(valueTypeNil) { dd.TryDecodeAsNil()
|
||||||
|
*/}}
|
||||||
|
slh, containerLenS := d.decSliceHelperStart()
|
||||||
|
if containerLenS == 0 {
|
||||||
|
if canChange {
|
||||||
|
if v == nil { v = []{{ .Elem }}{} } else if len(v) != 0 { v = v[:0] }
|
||||||
|
changed = true
|
||||||
|
}
|
||||||
|
slh.End()
|
||||||
|
return v, changed
|
||||||
|
}
|
||||||
|
hasLen := containerLenS > 0
|
||||||
|
var xlen int
|
||||||
|
if hasLen && canChange {
|
||||||
|
if containerLenS > cap(v) {
|
||||||
|
xlen = decInferLen(containerLenS, d.h.MaxInitLen, {{ .Size }})
|
||||||
|
if xlen <= cap(v) {
|
||||||
|
v = v[:xlen]
|
||||||
|
} else {
|
||||||
|
v = make([]{{ .Elem }}, xlen)
|
||||||
|
}
|
||||||
|
changed = true
|
||||||
|
} else if containerLenS != len(v) {
|
||||||
|
v = v[:containerLenS]
|
||||||
|
changed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
j := 0
|
||||||
|
for ; (hasLen && j < containerLenS) || !(hasLen || dd.CheckBreak()); j++ {
|
||||||
|
if j == 0 && len(v) == 0 && canChange {
|
||||||
|
if hasLen {
|
||||||
|
xlen = decInferLen(containerLenS, d.h.MaxInitLen, {{ .Size }})
|
||||||
|
} else {
|
||||||
|
xlen = 8
|
||||||
|
}
|
||||||
|
v = make([]{{ .Elem }}, xlen)
|
||||||
|
changed = true
|
||||||
|
}
|
||||||
|
// if indefinite, etc, then expand the slice if necessary
|
||||||
|
var decodeIntoBlank bool
|
||||||
|
if j >= len(v) {
|
||||||
|
if canChange {
|
||||||
|
v = append(v, {{ zerocmd .Elem }})
|
||||||
|
changed = true
|
||||||
|
} else {
|
||||||
|
d.arrayCannotExpand(len(v), j+1)
|
||||||
|
decodeIntoBlank = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
slh.ElemContainerState(j)
|
||||||
|
if decodeIntoBlank {
|
||||||
|
d.swallow()
|
||||||
|
} else if dd.TryDecodeAsNil() {
|
||||||
|
v[j] = {{ zerocmd .Elem }}
|
||||||
|
} else {
|
||||||
|
{{ if eq .Elem "interface{}" }}d.decode(&v[j]){{ else }}v[j] = {{ decmd .Elem }}{{ end }}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if canChange {
|
||||||
|
if j < len(v) {
|
||||||
|
v = v[:j]
|
||||||
|
changed = true
|
||||||
|
} else if j == 0 && v == nil {
|
||||||
|
v = make([]{{ .Elem }}, 0)
|
||||||
|
changed = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
slh.End()
|
||||||
|
return v, changed
|
||||||
|
}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values}}{{if not .Primitive}}{{if .MapKey }}
|
||||||
|
{{/*
|
||||||
|
Maps can change if they are
|
||||||
|
- addressable (from a ptr)
|
||||||
|
- settable (e.g. contained in an interface{})
|
||||||
|
*/}}
|
||||||
|
func (d *Decoder) {{ .MethodNamePfx "fastpathDec" false }}R(f *codecFnInfo, rv reflect.Value) {
|
||||||
|
if rv.Kind() == reflect.Ptr {
|
||||||
|
vp := rv2i(rv).(*map[{{ .MapKey }}]{{ .Elem }})
|
||||||
|
v, changed := fastpathTV.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d);
|
||||||
|
if changed { *vp = v }
|
||||||
|
} else {
|
||||||
|
fastpathTV.{{ .MethodNamePfx "Dec" false }}V(rv2i(rv).(map[{{ .MapKey }}]{{ .Elem }}), false, d)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func (f fastpathT) {{ .MethodNamePfx "Dec" false }}X(vp *map[{{ .MapKey }}]{{ .Elem }}, d *Decoder) {
|
||||||
|
v, changed := f.{{ .MethodNamePfx "Dec" false }}V(*vp, true, d)
|
||||||
|
if changed { *vp = v }
|
||||||
|
}
|
||||||
|
func (_ fastpathT) {{ .MethodNamePfx "Dec" false }}V(v map[{{ .MapKey }}]{{ .Elem }}, canChange bool,
|
||||||
|
d *Decoder) (_ map[{{ .MapKey }}]{{ .Elem }}, changed bool) {
|
||||||
|
dd, esep := d.d, d.hh.hasElemSeparators(){{/*
|
||||||
|
// if dd.isContainerType(valueTypeNil) {dd.TryDecodeAsNil()
|
||||||
|
*/}}
|
||||||
|
containerLen := dd.ReadMapStart()
|
||||||
|
if canChange && v == nil {
|
||||||
|
xlen := decInferLen(containerLen, d.h.MaxInitLen, {{ .Size }})
|
||||||
|
v = make(map[{{ .MapKey }}]{{ .Elem }}, xlen)
|
||||||
|
changed = true
|
||||||
|
}
|
||||||
|
if containerLen == 0 {
|
||||||
|
dd.ReadMapEnd()
|
||||||
|
return v, changed
|
||||||
|
}
|
||||||
|
{{ if eq .Elem "interface{}" }}mapGet := v != nil && !d.h.MapValueReset && !d.h.InterfaceReset
|
||||||
|
{{end}}var mk {{ .MapKey }}
|
||||||
|
var mv {{ .Elem }}
|
||||||
|
hasLen := containerLen > 0
|
||||||
|
for j := 0; (hasLen && j < containerLen) || !(hasLen || dd.CheckBreak()); j++ {
|
||||||
|
if esep { dd.ReadMapElemKey() }
|
||||||
|
{{ if eq .MapKey "interface{}" }}mk = nil
|
||||||
|
d.decode(&mk)
|
||||||
|
if bv, bok := mk.([]byte); bok {
|
||||||
|
mk = d.string(bv) {{/* // maps cannot have []byte as key. switch to string. */}}
|
||||||
|
}{{ else }}mk = {{ decmd .MapKey }}{{ end }}
|
||||||
|
if esep { dd.ReadMapElemValue() }
|
||||||
|
if dd.TryDecodeAsNil() {
|
||||||
|
if v == nil {} else if d.h.DeleteOnNilMapValue { delete(v, mk) } else { v[mk] = {{ zerocmd .Elem }} }
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
{{ if eq .Elem "interface{}" }}if mapGet { mv = v[mk] } else { mv = nil }
|
||||||
|
d.decode(&mv){{ else }}mv = {{ decmd .Elem }}{{ end }}
|
||||||
|
if v != nil { v[mk] = mv }
|
||||||
|
}
|
||||||
|
dd.ReadMapEnd()
|
||||||
|
return v, changed
|
||||||
|
}
|
||||||
|
{{end}}{{end}}{{end}}
|
|
@ -0,0 +1,77 @@
|
||||||
|
{{var "v"}} := {{if not isArray}}*{{end}}{{ .Varname }}
|
||||||
|
{{var "h"}}, {{var "l"}} := z.DecSliceHelperStart() {{/* // helper, containerLenS */}}{{if not isArray}}
|
||||||
|
var {{var "c"}} bool {{/* // changed */}}
|
||||||
|
_ = {{var "c"}}{{end}}
|
||||||
|
if {{var "l"}} == 0 {
|
||||||
|
{{if isSlice }}if {{var "v"}} == nil {
|
||||||
|
{{var "v"}} = []{{ .Typ }}{}
|
||||||
|
{{var "c"}} = true
|
||||||
|
} else if len({{var "v"}}) != 0 {
|
||||||
|
{{var "v"}} = {{var "v"}}[:0]
|
||||||
|
{{var "c"}} = true
|
||||||
|
} {{end}} {{if isChan }}if {{var "v"}} == nil {
|
||||||
|
{{var "v"}} = make({{ .CTyp }}, 0)
|
||||||
|
{{var "c"}} = true
|
||||||
|
} {{end}}
|
||||||
|
} else {
|
||||||
|
{{var "hl"}} := {{var "l"}} > 0
|
||||||
|
var {{var "rl"}} int; _ = {{var "rl"}}
|
||||||
|
{{if isSlice }} if {{var "hl"}} {
|
||||||
|
if {{var "l"}} > cap({{var "v"}}) {
|
||||||
|
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||||
|
if {{var "rl"}} <= cap({{var "v"}}) {
|
||||||
|
{{var "v"}} = {{var "v"}}[:{{var "rl"}}]
|
||||||
|
} else {
|
||||||
|
{{var "v"}} = make([]{{ .Typ }}, {{var "rl"}})
|
||||||
|
}
|
||||||
|
{{var "c"}} = true
|
||||||
|
} else if {{var "l"}} != len({{var "v"}}) {
|
||||||
|
{{var "v"}} = {{var "v"}}[:{{var "l"}}]
|
||||||
|
{{var "c"}} = true
|
||||||
|
}
|
||||||
|
} {{end}}
|
||||||
|
var {{var "j"}} int
|
||||||
|
// var {{var "dn"}} bool
|
||||||
|
for ; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ {
|
||||||
|
{{if not isArray}} if {{var "j"}} == 0 && len({{var "v"}}) == 0 {
|
||||||
|
if {{var "hl"}} {
|
||||||
|
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
|
||||||
|
} else {
|
||||||
|
{{var "rl"}} = 8
|
||||||
|
}
|
||||||
|
{{var "v"}} = make([]{{ .Typ }}, {{var "rl"}})
|
||||||
|
{{var "c"}} = true
|
||||||
|
}{{end}}
|
||||||
|
{{var "h"}}.ElemContainerState({{var "j"}})
|
||||||
|
// {{var "dn"}} = r.TryDecodeAsNil()
|
||||||
|
{{if isChan}}{{ $x := printf "%[1]vv%[2]v" .TempVar .Rand }}var {{var $x}} {{ .Typ }}
|
||||||
|
{{ decLineVar $x }}
|
||||||
|
{{var "v"}} <- {{ $x }}
|
||||||
|
{{else}}
|
||||||
|
// if indefinite, etc, then expand the slice if necessary
|
||||||
|
var {{var "db"}} bool
|
||||||
|
if {{var "j"}} >= len({{var "v"}}) {
|
||||||
|
{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }}); {{var "c"}} = true
|
||||||
|
{{else}} z.DecArrayCannotExpand(len(v), {{var "j"}}+1); {{var "db"}} = true
|
||||||
|
{{end}}
|
||||||
|
}
|
||||||
|
if {{var "db"}} {
|
||||||
|
z.DecSwallow()
|
||||||
|
} else {
|
||||||
|
{{ $x := printf "%[1]vv%[2]v[%[1]vj%[2]v]" .TempVar .Rand }}{{ decLineVar $x }}
|
||||||
|
}
|
||||||
|
{{end}}
|
||||||
|
}
|
||||||
|
{{if isSlice}} if {{var "j"}} < len({{var "v"}}) {
|
||||||
|
{{var "v"}} = {{var "v"}}[:{{var "j"}}]
|
||||||
|
{{var "c"}} = true
|
||||||
|
} else if {{var "j"}} == 0 && {{var "v"}} == nil {
|
||||||
|
{{var "v"}} = make([]{{ .Typ }}, 0)
|
||||||
|
{{var "c"}} = true
|
||||||
|
} {{end}}
|
||||||
|
}
|
||||||
|
{{var "h"}}.End()
|
||||||
|
{{if not isArray }}if {{var "c"}} {
|
||||||
|
*{{ .Varname }} = {{var "v"}}
|
||||||
|
}{{end}}
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
{{var "v"}} := *{{ .Varname }}
|
||||||
|
{{var "l"}} := r.ReadMapStart()
|
||||||
|
{{var "bh"}} := z.DecBasicHandle()
|
||||||
|
if {{var "v"}} == nil {
|
||||||
|
{{var "rl"}} := z.DecInferLen({{var "l"}}, {{var "bh"}}.MaxInitLen, {{ .Size }})
|
||||||
|
{{var "v"}} = make(map[{{ .KTyp }}]{{ .Typ }}, {{var "rl"}})
|
||||||
|
*{{ .Varname }} = {{var "v"}}
|
||||||
|
}
|
||||||
|
var {{var "mk"}} {{ .KTyp }}
|
||||||
|
var {{var "mv"}} {{ .Typ }}
|
||||||
|
var {{var "mg"}}, {{var "mdn"}} {{if decElemKindPtr}}, {{var "ms"}}, {{var "mok"}}{{end}} bool
|
||||||
|
if {{var "bh"}}.MapValueReset {
|
||||||
|
{{if decElemKindPtr}}{{var "mg"}} = true
|
||||||
|
{{else if decElemKindIntf}}if !{{var "bh"}}.InterfaceReset { {{var "mg"}} = true }
|
||||||
|
{{else if not decElemKindImmutable}}{{var "mg"}} = true
|
||||||
|
{{end}} }
|
||||||
|
if {{var "l"}} != 0 {
|
||||||
|
{{var "hl"}} := {{var "l"}} > 0
|
||||||
|
for {{var "j"}} := 0; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ {
|
||||||
|
r.ReadMapElemKey() {{/* z.DecSendContainerState(codecSelfer_containerMapKey{{ .Sfx }}) */}}
|
||||||
|
{{ $x := printf "%vmk%v" .TempVar .Rand }}{{ decLineVarK $x }}
|
||||||
|
{{ if eq .KTyp "interface{}" }}{{/* // special case if a byte array. */}}if {{var "bv"}}, {{var "bok"}} := {{var "mk"}}.([]byte); {{var "bok"}} {
|
||||||
|
{{var "mk"}} = string({{var "bv"}})
|
||||||
|
}{{ end }}{{if decElemKindPtr}}
|
||||||
|
{{var "ms"}} = true{{end}}
|
||||||
|
if {{var "mg"}} {
|
||||||
|
{{if decElemKindPtr}}{{var "mv"}}, {{var "mok"}} = {{var "v"}}[{{var "mk"}}]
|
||||||
|
if {{var "mok"}} {
|
||||||
|
{{var "ms"}} = false
|
||||||
|
} {{else}}{{var "mv"}} = {{var "v"}}[{{var "mk"}}] {{end}}
|
||||||
|
} {{if not decElemKindImmutable}}else { {{var "mv"}} = {{decElemZero}} }{{end}}
|
||||||
|
r.ReadMapElemValue() {{/* z.DecSendContainerState(codecSelfer_containerMapValue{{ .Sfx }}) */}}
|
||||||
|
{{var "mdn"}} = false
|
||||||
|
{{ $x := printf "%vmv%v" .TempVar .Rand }}{{ $y := printf "%vmdn%v" .TempVar .Rand }}{{ decLineVar $x $y }}
|
||||||
|
if {{var "mdn"}} {
|
||||||
|
if {{ var "bh" }}.DeleteOnNilMapValue { delete({{var "v"}}, {{var "mk"}}) } else { {{var "v"}}[{{var "mk"}}] = {{decElemZero}} }
|
||||||
|
} else if {{if decElemKindPtr}} {{var "ms"}} && {{end}} {{var "v"}} != nil {
|
||||||
|
{{var "v"}}[{{var "mk"}}] = {{var "mv"}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} // else len==0: TODO: Should we clear map entries?
|
||||||
|
r.ReadMapEnd() {{/* z.DecSendContainerState(codecSelfer_containerMapEnd{{ .Sfx }}) */}}
|
|
@ -0,0 +1,244 @@
|
||||||
|
/* // +build ignore */
|
||||||
|
|
||||||
|
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||||
|
|
||||||
|
// Code generated from gen-helper.go.tmpl - DO NOT EDIT.
|
||||||
|
|
||||||
|
package codec
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GenVersion is the current version of codecgen.
|
||||||
|
const GenVersion = {{ .Version }}
|
||||||
|
|
||||||
|
// This file is used to generate helper code for codecgen.
|
||||||
|
// The values here i.e. genHelper(En|De)coder are not to be used directly by
|
||||||
|
// library users. They WILL change continuously and without notice.
|
||||||
|
//
|
||||||
|
// To help enforce this, we create an unexported type with exported members.
|
||||||
|
// The only way to get the type is via the one exported type that we control (somewhat).
|
||||||
|
//
|
||||||
|
// When static codecs are created for types, they will use this value
|
||||||
|
// to perform encoding or decoding of primitives or known slice or map types.
|
||||||
|
|
||||||
|
// GenHelperEncoder is exported so that it can be used externally by codecgen.
|
||||||
|
//
|
||||||
|
// Library users: DO NOT USE IT DIRECTLY. IT WILL CHANGE CONTINOUSLY WITHOUT NOTICE.
|
||||||
|
func GenHelperEncoder(e *Encoder) (ge genHelperEncoder, ee encDriver) {
|
||||||
|
ge = genHelperEncoder{e:e}
|
||||||
|
ee = e.e
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// GenHelperDecoder is exported so that it can be used externally by codecgen.
|
||||||
|
//
|
||||||
|
// Library users: DO NOT USE IT DIRECTLY. IT WILL CHANGE CONTINOUSLY WITHOUT NOTICE.
|
||||||
|
func GenHelperDecoder(d *Decoder) (gd genHelperDecoder, dd decDriver) {
|
||||||
|
gd = genHelperDecoder{d:d}
|
||||||
|
dd = d.d
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
type genHelperEncoder struct {
|
||||||
|
e *Encoder
|
||||||
|
F fastpathT
|
||||||
|
}
|
||||||
|
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
type genHelperDecoder struct {
|
||||||
|
d *Decoder
|
||||||
|
F fastpathT
|
||||||
|
}
|
||||||
|
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncBasicHandle() *BasicHandle {
|
||||||
|
return f.e.h
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncBinary() bool {
|
||||||
|
return f.e.cf.be // f.e.hh.isBinaryEncoding()
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncFallback(iv interface{}) {
|
||||||
|
// println(">>>>>>>>> EncFallback")
|
||||||
|
// f.e.encodeI(iv, false, false)
|
||||||
|
f.e.encodeValue(reflect.ValueOf(iv), nil, false)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncTextMarshal(iv encoding.TextMarshaler) {
|
||||||
|
bs, fnerr := iv.MarshalText()
|
||||||
|
f.e.marshal(bs, fnerr, false, cUTF8)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncJSONMarshal(iv jsonMarshaler) {
|
||||||
|
bs, fnerr := iv.MarshalJSON()
|
||||||
|
f.e.marshal(bs, fnerr, true, cUTF8)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncBinaryMarshal(iv encoding.BinaryMarshaler) {
|
||||||
|
bs, fnerr := iv.MarshalBinary()
|
||||||
|
f.e.marshal(bs, fnerr, false, cRAW)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncRaw(iv Raw) { f.e.rawBytes(iv) }
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
//
|
||||||
|
// Deprecated: builtin no longer supported - so we make this method a no-op,
|
||||||
|
// but leave in-place so that old generated files continue to work without regeneration.
|
||||||
|
func (f genHelperEncoder) TimeRtidIfBinc() (v uintptr) { return }
|
||||||
|
// func (f genHelperEncoder) TimeRtidIfBinc() uintptr {
|
||||||
|
// if _, ok := f.e.hh.(*BincHandle); ok {
|
||||||
|
// return timeTypId
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) IsJSONHandle() bool {
|
||||||
|
return f.e.cf.js
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) I2Rtid(v interface{}) uintptr {
|
||||||
|
return i2rtid(v)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) Extension(rtid uintptr) (xfn *extTypeTagFn) {
|
||||||
|
return f.e.h.getExt(rtid)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncExtension(v interface{}, xfFn *extTypeTagFn) {
|
||||||
|
f.e.e.EncodeExt(v, xfFn.tag, xfFn.ext, f.e)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) HasExtensions() bool {
|
||||||
|
return len(f.e.h.extHandle) != 0
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperEncoder) EncExt(v interface{}) (r bool) {
|
||||||
|
xfFn := f.e.h.getExt(i2rtid(v))
|
||||||
|
if xfFn != nil {
|
||||||
|
f.e.e.EncodeExt(v, xfFn.tag, xfFn.ext, f.e)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------- DECODER FOLLOWS -----------------
|
||||||
|
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecBasicHandle() *BasicHandle {
|
||||||
|
return f.d.h
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecBinary() bool {
|
||||||
|
return f.d.be // f.d.hh.isBinaryEncoding()
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecSwallow() { f.d.swallow() }
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecScratchBuffer() []byte {
|
||||||
|
return f.d.b[:]
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecFallback(iv interface{}, chkPtr bool) {
|
||||||
|
// println(">>>>>>>>> DecFallback")
|
||||||
|
rv := reflect.ValueOf(iv)
|
||||||
|
if chkPtr {
|
||||||
|
rv = f.d.ensureDecodeable(rv)
|
||||||
|
}
|
||||||
|
f.d.decodeValue(rv, nil, false)
|
||||||
|
// f.d.decodeValueFallback(rv)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecSliceHelperStart() (decSliceHelper, int) {
|
||||||
|
return f.d.decSliceHelperStart()
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecStructFieldNotFound(index int, name string) {
|
||||||
|
f.d.structFieldNotFound(index, name)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecArrayCannotExpand(sliceLen, streamLen int) {
|
||||||
|
f.d.arrayCannotExpand(sliceLen, streamLen)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecTextUnmarshal(tm encoding.TextUnmarshaler) {
|
||||||
|
fnerr := tm.UnmarshalText(f.d.d.DecodeStringAsBytes())
|
||||||
|
if fnerr != nil {
|
||||||
|
panic(fnerr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecJSONUnmarshal(tm jsonUnmarshaler) {
|
||||||
|
// bs := f.dd.DecodeStringAsBytes()
|
||||||
|
// grab the bytes to be read, as UnmarshalJSON needs the full JSON so as to unmarshal it itself.
|
||||||
|
fnerr := tm.UnmarshalJSON(f.d.nextValueBytes())
|
||||||
|
if fnerr != nil {
|
||||||
|
panic(fnerr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecBinaryUnmarshal(bm encoding.BinaryUnmarshaler) {
|
||||||
|
fnerr := bm.UnmarshalBinary(f.d.d.DecodeBytes(nil, true))
|
||||||
|
if fnerr != nil {
|
||||||
|
panic(fnerr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecRaw() []byte { return f.d.rawBytes() }
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
//
|
||||||
|
// Deprecated: builtin no longer supported - so we make this method a no-op,
|
||||||
|
// but leave in-place so that old generated files continue to work without regeneration.
|
||||||
|
func (f genHelperDecoder) TimeRtidIfBinc() (v uintptr) { return }
|
||||||
|
// func (f genHelperDecoder) TimeRtidIfBinc() uintptr {
|
||||||
|
// // Note: builtin is no longer supported - so make this a no-op
|
||||||
|
// if _, ok := f.d.hh.(*BincHandle); ok {
|
||||||
|
// return timeTypId
|
||||||
|
// }
|
||||||
|
// return 0
|
||||||
|
// }
|
||||||
|
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) IsJSONHandle() bool {
|
||||||
|
return f.d.js
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) I2Rtid(v interface{}) uintptr {
|
||||||
|
return i2rtid(v)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) Extension(rtid uintptr) (xfn *extTypeTagFn) {
|
||||||
|
return f.d.h.getExt(rtid)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecExtension(v interface{}, xfFn *extTypeTagFn) {
|
||||||
|
f.d.d.DecodeExt(v, xfFn.tag, xfFn.ext)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) HasExtensions() bool {
|
||||||
|
return len(f.d.h.extHandle) != 0
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecExt(v interface{}) (r bool) {
|
||||||
|
xfFn := f.d.h.getExt(i2rtid(v))
|
||||||
|
if xfFn != nil {
|
||||||
|
f.d.d.DecodeExt(v, xfFn.tag, xfFn.ext)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
func (f genHelperDecoder) DecInferLen(clen, maxlen, unit int) (rvlen int) {
|
||||||
|
return decInferLen(clen, maxlen, unit)
|
||||||
|
}
|
||||||
|
// FOR USE BY CODECGEN ONLY. IT *WILL* CHANGE WITHOUT NOTICE. *DO NOT USE*
|
||||||
|
//
|
||||||
|
// Deprecated: no longer used,
|
||||||
|
// but leave in-place so that old generated files continue to work without regeneration.
|
||||||
|
func (f genHelperDecoder) StringView(v []byte) string { return stringView(v) }
|
||||||
|
|
|
@ -0,0 +1,154 @@
|
||||||
|
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||||
|
|
||||||
|
// Code generated from mammoth-test.go.tmpl - DO NOT EDIT.
|
||||||
|
|
||||||
|
package codec
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
import "fmt"
|
||||||
|
import "reflect"
|
||||||
|
|
||||||
|
// TestMammoth has all the different paths optimized in fast-path
|
||||||
|
// It has all the primitives, slices and maps.
|
||||||
|
//
|
||||||
|
// For each of those types, it has a pointer and a non-pointer field.
|
||||||
|
|
||||||
|
func init() { _ = fmt.Printf } // so we can include fmt as needed
|
||||||
|
|
||||||
|
type TestMammoth struct {
|
||||||
|
|
||||||
|
{{range .Values }}{{if .Primitive }}{{/*
|
||||||
|
*/}}{{ .MethodNamePfx "F" true }} {{ .Primitive }}
|
||||||
|
{{ .MethodNamePfx "Fptr" true }} *{{ .Primitive }}
|
||||||
|
{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values }}{{if not .Primitive }}{{if not .MapKey }}{{/*
|
||||||
|
*/}}{{ .MethodNamePfx "F" false }} []{{ .Elem }}
|
||||||
|
{{ .MethodNamePfx "Fptr" false }} *[]{{ .Elem }}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values }}{{if not .Primitive }}{{if .MapKey }}{{/*
|
||||||
|
*/}}{{ .MethodNamePfx "F" false }} map[{{ .MapKey }}]{{ .Elem }}
|
||||||
|
{{ .MethodNamePfx "Fptr" false }} *map[{{ .MapKey }}]{{ .Elem }}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
{{range .Values }}{{if not .Primitive }}{{if not .MapKey }}{{/*
|
||||||
|
*/}} type {{ .MethodNamePfx "typMbs" false }} []{{ .Elem }}
|
||||||
|
func (_ {{ .MethodNamePfx "typMbs" false }}) MapBySlice() { }
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values }}{{if not .Primitive }}{{if .MapKey }}{{/*
|
||||||
|
*/}} type {{ .MethodNamePfx "typMap" false }} map[{{ .MapKey }}]{{ .Elem }}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
func doTestMammothSlices(t *testing.T, h Handle) {
|
||||||
|
{{range $i, $e := .Values }}{{if not .Primitive }}{{if not .MapKey }}{{/*
|
||||||
|
*/}}
|
||||||
|
var v{{$i}}va [8]{{ .Elem }}
|
||||||
|
for _, v := range [][]{{ .Elem }}{ nil, {}, { {{ nonzerocmd .Elem }}, {{ zerocmd .Elem }}, {{ zerocmd .Elem }}, {{ nonzerocmd .Elem }} } } { {{/*
|
||||||
|
// fmt.Printf(">>>> running mammoth slice v{{$i}}: %v\n", v)
|
||||||
|
// - encode value to some []byte
|
||||||
|
// - decode into a length-wise-equal []byte
|
||||||
|
// - check if equal to initial slice
|
||||||
|
// - encode ptr to the value
|
||||||
|
// - check if encode bytes are same
|
||||||
|
// - decode into ptrs to: nil, then 1-elem slice, equal-length, then large len slice
|
||||||
|
// - decode into non-addressable slice of equal length, then larger len
|
||||||
|
// - for each decode, compare elem-by-elem to the original slice
|
||||||
|
// -
|
||||||
|
// - rinse and repeat for a MapBySlice version
|
||||||
|
// -
|
||||||
|
*/}}
|
||||||
|
var v{{$i}}v1, v{{$i}}v2 []{{ .Elem }}
|
||||||
|
v{{$i}}v1 = v
|
||||||
|
bs{{$i}} := testMarshalErr(v{{$i}}v1, h, t, "enc-slice-v{{$i}}")
|
||||||
|
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make([]{{ .Elem }}, len(v)) }
|
||||||
|
testUnmarshalErr(v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}")
|
||||||
|
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make([]{{ .Elem }}, len(v)) }
|
||||||
|
testUnmarshalErr(reflect.ValueOf(v{{$i}}v2), bs{{$i}}, h, t, "dec-slice-v{{$i}}-noaddr") // non-addressable value
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-noaddr")
|
||||||
|
// ...
|
||||||
|
bs{{$i}} = testMarshalErr(&v{{$i}}v1, h, t, "enc-slice-v{{$i}}-p")
|
||||||
|
v{{$i}}v2 = nil
|
||||||
|
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p")
|
||||||
|
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||||
|
v{{$i}}v2 = v{{$i}}va[:1:1]
|
||||||
|
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-1")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p-1")
|
||||||
|
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||||
|
v{{$i}}v2 = v{{$i}}va[:len(v{{$i}}v1):len(v{{$i}}v1)]
|
||||||
|
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-len")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p-len")
|
||||||
|
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||||
|
v{{$i}}v2 = v{{$i}}va[:]
|
||||||
|
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-cap")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-slice-v{{$i}}-p-cap")
|
||||||
|
if len(v{{$i}}v1) > 1 {
|
||||||
|
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||||
|
testUnmarshalErr((&v{{$i}}va)[:len(v{{$i}}v1)], bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-len-noaddr")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}va[:len(v{{$i}}v1)], t, "equal-slice-v{{$i}}-p-len-noaddr")
|
||||||
|
v{{$i}}va = [8]{{ .Elem }}{} // clear the array
|
||||||
|
testUnmarshalErr((&v{{$i}}va)[:], bs{{$i}}, h, t, "dec-slice-v{{$i}}-p-cap-noaddr")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}va[:len(v{{$i}}v1)], t, "equal-slice-v{{$i}}-p-cap-noaddr")
|
||||||
|
}
|
||||||
|
// ...
|
||||||
|
var v{{$i}}v3, v{{$i}}v4 {{ .MethodNamePfx "typMbs" false }}
|
||||||
|
v{{$i}}v2 = nil
|
||||||
|
if v != nil { v{{$i}}v2 = make([]{{ .Elem }}, len(v)) }
|
||||||
|
v{{$i}}v3 = {{ .MethodNamePfx "typMbs" false }}(v{{$i}}v1)
|
||||||
|
v{{$i}}v4 = {{ .MethodNamePfx "typMbs" false }}(v{{$i}}v2)
|
||||||
|
bs{{$i}} = testMarshalErr(v{{$i}}v3, h, t, "enc-slice-v{{$i}}-custom")
|
||||||
|
testUnmarshalErr(v{{$i}}v4, bs{{$i}}, h, t, "dec-slice-v{{$i}}-custom")
|
||||||
|
testDeepEqualErr(v{{$i}}v3, v{{$i}}v4, t, "equal-slice-v{{$i}}-custom")
|
||||||
|
bs{{$i}} = testMarshalErr(&v{{$i}}v3, h, t, "enc-slice-v{{$i}}-custom-p")
|
||||||
|
v{{$i}}v2 = nil
|
||||||
|
v{{$i}}v4 = {{ .MethodNamePfx "typMbs" false }}(v{{$i}}v2)
|
||||||
|
testUnmarshalErr(&v{{$i}}v4, bs{{$i}}, h, t, "dec-slice-v{{$i}}-custom-p")
|
||||||
|
testDeepEqualErr(v{{$i}}v3, v{{$i}}v4, t, "equal-slice-v{{$i}}-custom-p")
|
||||||
|
}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
}
|
||||||
|
|
||||||
|
func doTestMammothMaps(t *testing.T, h Handle) {
|
||||||
|
{{range $i, $e := .Values }}{{if not .Primitive }}{{if .MapKey }}{{/*
|
||||||
|
*/}}
|
||||||
|
for _, v := range []map[{{ .MapKey }}]{{ .Elem }}{ nil, {}, { {{ nonzerocmd .MapKey }}:{{ zerocmd .Elem }} {{if ne "bool" .MapKey}}, {{ nonzerocmd .MapKey }}:{{ nonzerocmd .Elem }} {{end}} } } {
|
||||||
|
// fmt.Printf(">>>> running mammoth map v{{$i}}: %v\n", v)
|
||||||
|
var v{{$i}}v1, v{{$i}}v2 map[{{ .MapKey }}]{{ .Elem }}
|
||||||
|
v{{$i}}v1 = v
|
||||||
|
bs{{$i}} := testMarshalErr(v{{$i}}v1, h, t, "enc-map-v{{$i}}")
|
||||||
|
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||||
|
testUnmarshalErr(v{{$i}}v2, bs{{$i}}, h, t, "dec-map-v{{$i}}")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}")
|
||||||
|
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||||
|
testUnmarshalErr(reflect.ValueOf(v{{$i}}v2), bs{{$i}}, h, t, "dec-map-v{{$i}}-noaddr") // decode into non-addressable map value
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}-noaddr")
|
||||||
|
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||||
|
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-map-v{{$i}}-p-len")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}-p-len")
|
||||||
|
bs{{$i}} = testMarshalErr(&v{{$i}}v1, h, t, "enc-map-v{{$i}}-p")
|
||||||
|
v{{$i}}v2 = nil
|
||||||
|
testUnmarshalErr(&v{{$i}}v2, bs{{$i}}, h, t, "dec-map-v{{$i}}-p-nil")
|
||||||
|
testDeepEqualErr(v{{$i}}v1, v{{$i}}v2, t, "equal-map-v{{$i}}-p-nil")
|
||||||
|
// ...
|
||||||
|
if v == nil { v{{$i}}v2 = nil } else { v{{$i}}v2 = make(map[{{ .MapKey }}]{{ .Elem }}, len(v)) } // reset map
|
||||||
|
var v{{$i}}v3, v{{$i}}v4 {{ .MethodNamePfx "typMap" false }}
|
||||||
|
v{{$i}}v3 = {{ .MethodNamePfx "typMap" false }}(v{{$i}}v1)
|
||||||
|
v{{$i}}v4 = {{ .MethodNamePfx "typMap" false }}(v{{$i}}v2)
|
||||||
|
bs{{$i}} = testMarshalErr(v{{$i}}v3, h, t, "enc-map-v{{$i}}-custom")
|
||||||
|
testUnmarshalErr(v{{$i}}v4, bs{{$i}}, h, t, "dec-map-v{{$i}}-p-len")
|
||||||
|
testDeepEqualErr(v{{$i}}v3, v{{$i}}v4, t, "equal-map-v{{$i}}-p-len")
|
||||||
|
}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func doTestMammothMapsAndSlices(t *testing.T, h Handle) {
|
||||||
|
doTestMammothSlices(t, h)
|
||||||
|
doTestMammothMaps(t, h)
|
||||||
|
}
|
|
@ -0,0 +1,94 @@
|
||||||
|
// +build !notfastpath
|
||||||
|
|
||||||
|
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved.
|
||||||
|
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
||||||
|
|
||||||
|
// Code generated from mammoth2-test.go.tmpl - DO NOT EDIT.
|
||||||
|
|
||||||
|
package codec
|
||||||
|
|
||||||
|
// Increase codecoverage by covering all the codecgen paths, in fast-path and gen-helper.go....
|
||||||
|
//
|
||||||
|
// Add:
|
||||||
|
// - test file for creating a mammoth generated file as _mammoth_generated.go
|
||||||
|
// - generate a second mammoth files in a different file: mammoth2_generated_test.go
|
||||||
|
// - mammoth-test.go.tmpl will do this
|
||||||
|
// - run codecgen on it, into mammoth2_codecgen_generated_test.go (no build tags)
|
||||||
|
// - as part of TestMammoth, run it also
|
||||||
|
// - this will cover all the codecgen, gen-helper, etc in one full run
|
||||||
|
// - check in mammoth* files into github also
|
||||||
|
// - then
|
||||||
|
//
|
||||||
|
// Now, add some types:
|
||||||
|
// - some that implement BinaryMarshal, TextMarshal, JSONMarshal, and one that implements none of it
|
||||||
|
// - create a wrapper type that includes TestMammoth2, with it in slices, and maps, and the custom types
|
||||||
|
// - this wrapper object is what we work encode/decode (so that the codecgen methods are called)
|
||||||
|
|
||||||
|
|
||||||
|
// import "encoding/binary"
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
type TestMammoth2 struct {
|
||||||
|
|
||||||
|
{{range .Values }}{{if .Primitive }}{{/*
|
||||||
|
*/}}{{ .MethodNamePfx "F" true }} {{ .Primitive }}
|
||||||
|
{{ .MethodNamePfx "Fptr" true }} *{{ .Primitive }}
|
||||||
|
{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values }}{{if not .Primitive }}{{if not .MapKey }}{{/*
|
||||||
|
*/}}{{ .MethodNamePfx "F" false }} []{{ .Elem }}
|
||||||
|
{{ .MethodNamePfx "Fptr" false }} *[]{{ .Elem }}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
{{range .Values }}{{if not .Primitive }}{{if .MapKey }}{{/*
|
||||||
|
*/}}{{ .MethodNamePfx "F" false }} map[{{ .MapKey }}]{{ .Elem }}
|
||||||
|
{{ .MethodNamePfx "Fptr" false }} *map[{{ .MapKey }}]{{ .Elem }}
|
||||||
|
{{end}}{{end}}{{end}}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// -----------
|
||||||
|
|
||||||
|
type testMammoth2Binary uint64
|
||||||
|
func (x testMammoth2Binary) MarshalBinary() (data []byte, err error) {
|
||||||
|
data = make([]byte, 8)
|
||||||
|
bigen.PutUint64(data, uint64(x))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
func (x *testMammoth2Binary) UnmarshalBinary(data []byte) (err error) {
|
||||||
|
*x = testMammoth2Binary(bigen.Uint64(data))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
type testMammoth2Text uint64
|
||||||
|
func (x testMammoth2Text) MarshalText() (data []byte, err error) {
|
||||||
|
data = []byte(fmt.Sprintf("%b", uint64(x)))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
func (x *testMammoth2Text) UnmarshalText(data []byte) (err error) {
|
||||||
|
_, err = fmt.Sscanf(string(data), "%b", (*uint64)(x))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
type testMammoth2Json uint64
|
||||||
|
func (x testMammoth2Json) MarshalJSON() (data []byte, err error) {
|
||||||
|
data = []byte(fmt.Sprintf("%v", uint64(x)))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
func (x *testMammoth2Json) UnmarshalJSON(data []byte) (err error) {
|
||||||
|
_, err = fmt.Sscanf(string(data), "%v", (*uint64)(x))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
type testMammoth2Basic [4]uint64
|
||||||
|
|
||||||
|
type TestMammoth2Wrapper struct {
|
||||||
|
V TestMammoth2
|
||||||
|
T testMammoth2Text
|
||||||
|
B testMammoth2Binary
|
||||||
|
J testMammoth2Json
|
||||||
|
C testMammoth2Basic
|
||||||
|
M map[testMammoth2Basic]TestMammoth2
|
||||||
|
L []TestMammoth2
|
||||||
|
A [4]int64
|
||||||
|
}
|
|
@ -1,214 +0,0 @@
|
||||||
// Copyright (c) 2012-2015 Ugorji Nwoke. All rights reserved.
|
|
||||||
// Use of this source code is governed by a MIT license found in the LICENSE file.
|
|
||||||
|
|
||||||
// +build ignore
|
|
||||||
|
|
||||||
package codec
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math/rand"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
// NoopHandle returns a no-op handle. It basically does nothing.
|
|
||||||
// It is only useful for benchmarking, as it gives an idea of the
|
|
||||||
// overhead from the codec framework.
|
|
||||||
//
|
|
||||||
// LIBRARY USERS: *** DO NOT USE ***
|
|
||||||
func NoopHandle(slen int) *noopHandle {
|
|
||||||
h := noopHandle{}
|
|
||||||
h.rand = rand.New(rand.NewSource(time.Now().UnixNano()))
|
|
||||||
h.B = make([][]byte, slen)
|
|
||||||
h.S = make([]string, slen)
|
|
||||||
for i := 0; i < len(h.S); i++ {
|
|
||||||
b := make([]byte, i+1)
|
|
||||||
for j := 0; j < len(b); j++ {
|
|
||||||
b[j] = 'a' + byte(i)
|
|
||||||
}
|
|
||||||
h.B[i] = b
|
|
||||||
h.S[i] = string(b)
|
|
||||||
}
|
|
||||||
return &h
|
|
||||||
}
|
|
||||||
|
|
||||||
// noopHandle does nothing.
|
|
||||||
// It is used to simulate the overhead of the codec framework.
|
|
||||||
type noopHandle struct {
|
|
||||||
BasicHandle
|
|
||||||
binaryEncodingType
|
|
||||||
noopDrv // noopDrv is unexported here, so we can get a copy of it when needed.
|
|
||||||
}
|
|
||||||
|
|
||||||
type noopDrv struct {
|
|
||||||
d *Decoder
|
|
||||||
e *Encoder
|
|
||||||
i int
|
|
||||||
S []string
|
|
||||||
B [][]byte
|
|
||||||
mks []bool // stack. if map (true), else if array (false)
|
|
||||||
mk bool // top of stack. what container are we on? map or array?
|
|
||||||
ct valueType // last response for IsContainerType.
|
|
||||||
cb int // counter for ContainerType
|
|
||||||
rand *rand.Rand
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *noopDrv) r(v int) int { return h.rand.Intn(v) }
|
|
||||||
func (h *noopDrv) m(v int) int { h.i++; return h.i % v }
|
|
||||||
|
|
||||||
func (h *noopDrv) newEncDriver(e *Encoder) encDriver { h.e = e; return h }
|
|
||||||
func (h *noopDrv) newDecDriver(d *Decoder) decDriver { h.d = d; return h }
|
|
||||||
|
|
||||||
func (h *noopDrv) reset() {}
|
|
||||||
func (h *noopDrv) uncacheRead() {}
|
|
||||||
|
|
||||||
// --- encDriver
|
|
||||||
|
|
||||||
// stack functions (for map and array)
|
|
||||||
func (h *noopDrv) start(b bool) {
|
|
||||||
// println("start", len(h.mks)+1)
|
|
||||||
h.mks = append(h.mks, b)
|
|
||||||
h.mk = b
|
|
||||||
}
|
|
||||||
func (h *noopDrv) end() {
|
|
||||||
// println("end: ", len(h.mks)-1)
|
|
||||||
h.mks = h.mks[:len(h.mks)-1]
|
|
||||||
if len(h.mks) > 0 {
|
|
||||||
h.mk = h.mks[len(h.mks)-1]
|
|
||||||
} else {
|
|
||||||
h.mk = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *noopDrv) EncodeBuiltin(rt uintptr, v interface{}) {}
|
|
||||||
func (h *noopDrv) EncodeNil() {}
|
|
||||||
func (h *noopDrv) EncodeInt(i int64) {}
|
|
||||||
func (h *noopDrv) EncodeUint(i uint64) {}
|
|
||||||
func (h *noopDrv) EncodeBool(b bool) {}
|
|
||||||
func (h *noopDrv) EncodeFloat32(f float32) {}
|
|
||||||
func (h *noopDrv) EncodeFloat64(f float64) {}
|
|
||||||
func (h *noopDrv) EncodeRawExt(re *RawExt, e *Encoder) {}
|
|
||||||
func (h *noopDrv) EncodeArrayStart(length int) { h.start(true) }
|
|
||||||
func (h *noopDrv) EncodeMapStart(length int) { h.start(false) }
|
|
||||||
func (h *noopDrv) EncodeEnd() { h.end() }
|
|
||||||
|
|
||||||
func (h *noopDrv) EncodeString(c charEncoding, v string) {}
|
|
||||||
func (h *noopDrv) EncodeSymbol(v string) {}
|
|
||||||
func (h *noopDrv) EncodeStringBytes(c charEncoding, v []byte) {}
|
|
||||||
|
|
||||||
func (h *noopDrv) EncodeExt(rv interface{}, xtag uint64, ext Ext, e *Encoder) {}
|
|
||||||
|
|
||||||
// ---- decDriver
|
|
||||||
func (h *noopDrv) initReadNext() {}
|
|
||||||
func (h *noopDrv) CheckBreak() bool { return false }
|
|
||||||
func (h *noopDrv) IsBuiltinType(rt uintptr) bool { return false }
|
|
||||||
func (h *noopDrv) DecodeBuiltin(rt uintptr, v interface{}) {}
|
|
||||||
func (h *noopDrv) DecodeInt(bitsize uint8) (i int64) { return int64(h.m(15)) }
|
|
||||||
func (h *noopDrv) DecodeUint(bitsize uint8) (ui uint64) { return uint64(h.m(35)) }
|
|
||||||
func (h *noopDrv) DecodeFloat(chkOverflow32 bool) (f float64) { return float64(h.m(95)) }
|
|
||||||
func (h *noopDrv) DecodeBool() (b bool) { return h.m(2) == 0 }
|
|
||||||
func (h *noopDrv) DecodeString() (s string) { return h.S[h.m(8)] }
|
|
||||||
func (h *noopDrv) DecodeStringAsBytes() []byte { return h.DecodeBytes(nil, true) }
|
|
||||||
|
|
||||||
func (h *noopDrv) DecodeBytes(bs []byte, zerocopy bool) []byte { return h.B[h.m(len(h.B))] }
|
|
||||||
|
|
||||||
func (h *noopDrv) ReadEnd() { h.end() }
|
|
||||||
|
|
||||||
// toggle map/slice
|
|
||||||
func (h *noopDrv) ReadMapStart() int { h.start(true); return h.m(10) }
|
|
||||||
func (h *noopDrv) ReadArrayStart() int { h.start(false); return h.m(10) }
|
|
||||||
|
|
||||||
func (h *noopDrv) ContainerType() (vt valueType) {
|
|
||||||
// return h.m(2) == 0
|
|
||||||
// handle kStruct, which will bomb is it calls this and doesn't get back a map or array.
|
|
||||||
// consequently, if the return value is not map or array, reset it to one of them based on h.m(7) % 2
|
|
||||||
// for kstruct: at least one out of every 2 times, return one of valueTypeMap or Array (else kstruct bombs)
|
|
||||||
// however, every 10th time it is called, we just return something else.
|
|
||||||
var vals = [...]valueType{valueTypeArray, valueTypeMap}
|
|
||||||
// ------------ TAKE ------------
|
|
||||||
// if h.cb%2 == 0 {
|
|
||||||
// if h.ct == valueTypeMap || h.ct == valueTypeArray {
|
|
||||||
// } else {
|
|
||||||
// h.ct = vals[h.m(2)]
|
|
||||||
// }
|
|
||||||
// } else if h.cb%5 == 0 {
|
|
||||||
// h.ct = valueType(h.m(8))
|
|
||||||
// } else {
|
|
||||||
// h.ct = vals[h.m(2)]
|
|
||||||
// }
|
|
||||||
// ------------ TAKE ------------
|
|
||||||
// if h.cb%16 == 0 {
|
|
||||||
// h.ct = valueType(h.cb % 8)
|
|
||||||
// } else {
|
|
||||||
// h.ct = vals[h.cb%2]
|
|
||||||
// }
|
|
||||||
h.ct = vals[h.cb%2]
|
|
||||||
h.cb++
|
|
||||||
return h.ct
|
|
||||||
|
|
||||||
// if h.ct == valueTypeNil || h.ct == valueTypeString || h.ct == valueTypeBytes {
|
|
||||||
// return h.ct
|
|
||||||
// }
|
|
||||||
// return valueTypeUnset
|
|
||||||
// TODO: may need to tweak this so it works.
|
|
||||||
// if h.ct == valueTypeMap && vt == valueTypeArray || h.ct == valueTypeArray && vt == valueTypeMap {
|
|
||||||
// h.cb = !h.cb
|
|
||||||
// h.ct = vt
|
|
||||||
// return h.cb
|
|
||||||
// }
|
|
||||||
// // go in a loop and check it.
|
|
||||||
// h.ct = vt
|
|
||||||
// h.cb = h.m(7) == 0
|
|
||||||
// return h.cb
|
|
||||||
}
|
|
||||||
func (h *noopDrv) TryDecodeAsNil() bool {
|
|
||||||
if h.mk {
|
|
||||||
return false
|
|
||||||
} else {
|
|
||||||
return h.m(8) == 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
func (h *noopDrv) DecodeExt(rv interface{}, xtag uint64, ext Ext) uint64 {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *noopDrv) DecodeNaked() {
|
|
||||||
// use h.r (random) not h.m() because h.m() could cause the same value to be given.
|
|
||||||
var sk int
|
|
||||||
if h.mk {
|
|
||||||
// if mapkey, do not support values of nil OR bytes, array, map or rawext
|
|
||||||
sk = h.r(7) + 1
|
|
||||||
} else {
|
|
||||||
sk = h.r(12)
|
|
||||||
}
|
|
||||||
n := &h.d.n
|
|
||||||
switch sk {
|
|
||||||
case 0:
|
|
||||||
n.v = valueTypeNil
|
|
||||||
case 1:
|
|
||||||
n.v, n.b = valueTypeBool, false
|
|
||||||
case 2:
|
|
||||||
n.v, n.b = valueTypeBool, true
|
|
||||||
case 3:
|
|
||||||
n.v, n.i = valueTypeInt, h.DecodeInt(64)
|
|
||||||
case 4:
|
|
||||||
n.v, n.u = valueTypeUint, h.DecodeUint(64)
|
|
||||||
case 5:
|
|
||||||
n.v, n.f = valueTypeFloat, h.DecodeFloat(true)
|
|
||||||
case 6:
|
|
||||||
n.v, n.f = valueTypeFloat, h.DecodeFloat(false)
|
|
||||||
case 7:
|
|
||||||
n.v, n.s = valueTypeString, h.DecodeString()
|
|
||||||
case 8:
|
|
||||||
n.v, n.l = valueTypeBytes, h.B[h.m(len(h.B))]
|
|
||||||
case 9:
|
|
||||||
n.v = valueTypeArray
|
|
||||||
case 10:
|
|
||||||
n.v = valueTypeMap
|
|
||||||
default:
|
|
||||||
n.v = valueTypeExt
|
|
||||||
n.u = h.DecodeUint(64)
|
|
||||||
n.l = h.B[h.m(len(h.B))]
|
|
||||||
}
|
|
||||||
h.ct = n.v
|
|
||||||
return
|
|
||||||
}
|
|
|
@ -0,0 +1,639 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"cbor": "AA==",
|
||||||
|
"hex": "00",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "AQ==",
|
||||||
|
"hex": "01",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "Cg==",
|
||||||
|
"hex": "0a",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 10
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "Fw==",
|
||||||
|
"hex": "17",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 23
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "GBg=",
|
||||||
|
"hex": "1818",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 24
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "GBk=",
|
||||||
|
"hex": "1819",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 25
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "GGQ=",
|
||||||
|
"hex": "1864",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 100
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "GQPo",
|
||||||
|
"hex": "1903e8",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "GgAPQkA=",
|
||||||
|
"hex": "1a000f4240",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1000000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "GwAAAOjUpRAA",
|
||||||
|
"hex": "1b000000e8d4a51000",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1000000000000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "G///////////",
|
||||||
|
"hex": "1bffffffffffffffff",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 18446744073709551615
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "wkkBAAAAAAAAAAA=",
|
||||||
|
"hex": "c249010000000000000000",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 18446744073709551616
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "O///////////",
|
||||||
|
"hex": "3bffffffffffffffff",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -18446744073709551616,
|
||||||
|
"skip": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "w0kBAAAAAAAAAAA=",
|
||||||
|
"hex": "c349010000000000000000",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -18446744073709551617
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "IA==",
|
||||||
|
"hex": "20",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "KQ==",
|
||||||
|
"hex": "29",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -10
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "OGM=",
|
||||||
|
"hex": "3863",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -100
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "OQPn",
|
||||||
|
"hex": "3903e7",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -1000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+QAA",
|
||||||
|
"hex": "f90000",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 0.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+YAA",
|
||||||
|
"hex": "f98000",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -0.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+TwA",
|
||||||
|
"hex": "f93c00",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+z/xmZmZmZma",
|
||||||
|
"hex": "fb3ff199999999999a",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1.1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+T4A",
|
||||||
|
"hex": "f93e00",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1.5
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+Xv/",
|
||||||
|
"hex": "f97bff",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 65504.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+kfDUAA=",
|
||||||
|
"hex": "fa47c35000",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 100000.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+n9///8=",
|
||||||
|
"hex": "fa7f7fffff",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 3.4028234663852886e+38
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+3435DyIAHWc",
|
||||||
|
"hex": "fb7e37e43c8800759c",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 1.0e+300
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+QAB",
|
||||||
|
"hex": "f90001",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 5.960464477539063e-08
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+QQA",
|
||||||
|
"hex": "f90400",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": 6.103515625e-05
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+cQA",
|
||||||
|
"hex": "f9c400",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -4.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+8AQZmZmZmZm",
|
||||||
|
"hex": "fbc010666666666666",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": -4.1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+XwA",
|
||||||
|
"hex": "f97c00",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "Infinity"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+X4A",
|
||||||
|
"hex": "f97e00",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "NaN"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+fwA",
|
||||||
|
"hex": "f9fc00",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "-Infinity"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+n+AAAA=",
|
||||||
|
"hex": "fa7f800000",
|
||||||
|
"roundtrip": false,
|
||||||
|
"diagnostic": "Infinity"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+n/AAAA=",
|
||||||
|
"hex": "fa7fc00000",
|
||||||
|
"roundtrip": false,
|
||||||
|
"diagnostic": "NaN"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+v+AAAA=",
|
||||||
|
"hex": "faff800000",
|
||||||
|
"roundtrip": false,
|
||||||
|
"diagnostic": "-Infinity"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+3/wAAAAAAAA",
|
||||||
|
"hex": "fb7ff0000000000000",
|
||||||
|
"roundtrip": false,
|
||||||
|
"diagnostic": "Infinity"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+3/4AAAAAAAA",
|
||||||
|
"hex": "fb7ff8000000000000",
|
||||||
|
"roundtrip": false,
|
||||||
|
"diagnostic": "NaN"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+//wAAAAAAAA",
|
||||||
|
"hex": "fbfff0000000000000",
|
||||||
|
"roundtrip": false,
|
||||||
|
"diagnostic": "-Infinity"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "9A==",
|
||||||
|
"hex": "f4",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "9Q==",
|
||||||
|
"hex": "f5",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "9g==",
|
||||||
|
"hex": "f6",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "9w==",
|
||||||
|
"hex": "f7",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "undefined"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "8A==",
|
||||||
|
"hex": "f0",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "simple(16)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+Bg=",
|
||||||
|
"hex": "f818",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "simple(24)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "+P8=",
|
||||||
|
"hex": "f8ff",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "simple(255)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "wHQyMDEzLTAzLTIxVDIwOjA0OjAwWg==",
|
||||||
|
"hex": "c074323031332d30332d32315432303a30343a30305a",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "0(\"2013-03-21T20:04:00Z\")"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "wRpRS2ew",
|
||||||
|
"hex": "c11a514b67b0",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "1(1363896240)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "wftB1FLZ7CAAAA==",
|
||||||
|
"hex": "c1fb41d452d9ec200000",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "1(1363896240.5)"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "10QBAgME",
|
||||||
|
"hex": "d74401020304",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "23(h'01020304')"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "2BhFZElFVEY=",
|
||||||
|
"hex": "d818456449455446",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "24(h'6449455446')"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "2CB2aHR0cDovL3d3dy5leGFtcGxlLmNvbQ==",
|
||||||
|
"hex": "d82076687474703a2f2f7777772e6578616d706c652e636f6d",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "32(\"http://www.example.com\")"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "QA==",
|
||||||
|
"hex": "40",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "h''"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "RAECAwQ=",
|
||||||
|
"hex": "4401020304",
|
||||||
|
"roundtrip": true,
|
||||||
|
"diagnostic": "h'01020304'"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "YA==",
|
||||||
|
"hex": "60",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "YWE=",
|
||||||
|
"hex": "6161",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": "a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "ZElFVEY=",
|
||||||
|
"hex": "6449455446",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": "IETF"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "YiJc",
|
||||||
|
"hex": "62225c",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": "\"\\"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "YsO8",
|
||||||
|
"hex": "62c3bc",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": "ü"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "Y+awtA==",
|
||||||
|
"hex": "63e6b0b4",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": "水"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "ZPCQhZE=",
|
||||||
|
"hex": "64f0908591",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": "𐅑"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "gA==",
|
||||||
|
"hex": "80",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": [
|
||||||
|
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "gwECAw==",
|
||||||
|
"hex": "83010203",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "gwGCAgOCBAU=",
|
||||||
|
"hex": "8301820203820405",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
[
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
[
|
||||||
|
4,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "mBkBAgMEBQYHCAkKCwwNDg8QERITFBUWFxgYGBk=",
|
||||||
|
"hex": "98190102030405060708090a0b0c0d0e0f101112131415161718181819",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
4,
|
||||||
|
5,
|
||||||
|
6,
|
||||||
|
7,
|
||||||
|
8,
|
||||||
|
9,
|
||||||
|
10,
|
||||||
|
11,
|
||||||
|
12,
|
||||||
|
13,
|
||||||
|
14,
|
||||||
|
15,
|
||||||
|
16,
|
||||||
|
17,
|
||||||
|
18,
|
||||||
|
19,
|
||||||
|
20,
|
||||||
|
21,
|
||||||
|
22,
|
||||||
|
23,
|
||||||
|
24,
|
||||||
|
25
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "oA==",
|
||||||
|
"hex": "a0",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": {
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "ogECAwQ=",
|
||||||
|
"hex": "a201020304",
|
||||||
|
"roundtrip": true,
|
||||||
|
"skip": true,
|
||||||
|
"diagnostic": "{1: 2, 3: 4}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "omFhAWFiggID",
|
||||||
|
"hex": "a26161016162820203",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": {
|
||||||
|
"a": 1,
|
||||||
|
"b": [
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "gmFhoWFiYWM=",
|
||||||
|
"hex": "826161a161626163",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": [
|
||||||
|
"a",
|
||||||
|
{
|
||||||
|
"b": "c"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "pWFhYUFhYmFCYWNhQ2FkYURhZWFF",
|
||||||
|
"hex": "a56161614161626142616361436164614461656145",
|
||||||
|
"roundtrip": true,
|
||||||
|
"decoded": {
|
||||||
|
"a": "A",
|
||||||
|
"b": "B",
|
||||||
|
"c": "C",
|
||||||
|
"d": "D",
|
||||||
|
"e": "E"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "X0IBAkMDBAX/",
|
||||||
|
"hex": "5f42010243030405ff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"skip": true,
|
||||||
|
"diagnostic": "(_ h'0102', h'030405')"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "f2VzdHJlYWRtaW5n/w==",
|
||||||
|
"hex": "7f657374726561646d696e67ff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": "streaming"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "n/8=",
|
||||||
|
"hex": "9fff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": [
|
||||||
|
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "nwGCAgOfBAX//w==",
|
||||||
|
"hex": "9f018202039f0405ffff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
[
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
[
|
||||||
|
4,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "nwGCAgOCBAX/",
|
||||||
|
"hex": "9f01820203820405ff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
[
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
[
|
||||||
|
4,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "gwGCAgOfBAX/",
|
||||||
|
"hex": "83018202039f0405ff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
[
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
[
|
||||||
|
4,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "gwGfAgP/ggQF",
|
||||||
|
"hex": "83019f0203ff820405",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
[
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
],
|
||||||
|
[
|
||||||
|
4,
|
||||||
|
5
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "nwECAwQFBgcICQoLDA0ODxAREhMUFRYXGBgYGf8=",
|
||||||
|
"hex": "9f0102030405060708090a0b0c0d0e0f101112131415161718181819ff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": [
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
3,
|
||||||
|
4,
|
||||||
|
5,
|
||||||
|
6,
|
||||||
|
7,
|
||||||
|
8,
|
||||||
|
9,
|
||||||
|
10,
|
||||||
|
11,
|
||||||
|
12,
|
||||||
|
13,
|
||||||
|
14,
|
||||||
|
15,
|
||||||
|
16,
|
||||||
|
17,
|
||||||
|
18,
|
||||||
|
19,
|
||||||
|
20,
|
||||||
|
21,
|
||||||
|
22,
|
||||||
|
23,
|
||||||
|
24,
|
||||||
|
25
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "v2FhAWFinwID//8=",
|
||||||
|
"hex": "bf61610161629f0203ffff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": {
|
||||||
|
"a": 1,
|
||||||
|
"b": [
|
||||||
|
2,
|
||||||
|
3
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "gmFhv2FiYWP/",
|
||||||
|
"hex": "826161bf61626163ff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": [
|
||||||
|
"a",
|
||||||
|
{
|
||||||
|
"b": "c"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cbor": "v2NGdW71Y0FtdCH/",
|
||||||
|
"hex": "bf6346756ef563416d7421ff",
|
||||||
|
"roundtrip": false,
|
||||||
|
"decoded": {
|
||||||
|
"Fun": true,
|
||||||
|
"Amt": -2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,126 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# This will create golden files in a directory passed to it.
|
||||||
|
# A Test calls this internally to create the golden files
|
||||||
|
# So it can process them (so we don't have to checkin the files).
|
||||||
|
|
||||||
|
# Ensure msgpack-python and cbor are installed first, using:
|
||||||
|
# sudo apt-get install python-dev
|
||||||
|
# sudo apt-get install python-pip
|
||||||
|
# pip install --user msgpack-python msgpack-rpc-python cbor
|
||||||
|
|
||||||
|
# Ensure all "string" keys are utf strings (else encoded as bytes)
|
||||||
|
|
||||||
|
import cbor, msgpack, msgpackrpc, sys, os, threading
|
||||||
|
|
||||||
|
def get_test_data_list():
|
||||||
|
# get list with all primitive types, and a combo type
|
||||||
|
l0 = [
|
||||||
|
-8,
|
||||||
|
-1616,
|
||||||
|
-32323232,
|
||||||
|
-6464646464646464,
|
||||||
|
192,
|
||||||
|
1616,
|
||||||
|
32323232,
|
||||||
|
6464646464646464,
|
||||||
|
192,
|
||||||
|
-3232.0,
|
||||||
|
-6464646464.0,
|
||||||
|
3232.0,
|
||||||
|
6464.0,
|
||||||
|
6464646464.0,
|
||||||
|
False,
|
||||||
|
True,
|
||||||
|
u"null",
|
||||||
|
None,
|
||||||
|
u"some&day>some<day",
|
||||||
|
1328176922000002000,
|
||||||
|
u"",
|
||||||
|
-2206187877999998000,
|
||||||
|
u"bytestring",
|
||||||
|
270,
|
||||||
|
u"none",
|
||||||
|
-2013855847999995777,
|
||||||
|
#-6795364578871345152,
|
||||||
|
]
|
||||||
|
l1 = [
|
||||||
|
{ "true": True,
|
||||||
|
"false": False },
|
||||||
|
{ "true": u"True",
|
||||||
|
"false": False,
|
||||||
|
"uint16(1616)": 1616 },
|
||||||
|
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
|
||||||
|
"int32":32323232, "bool": True,
|
||||||
|
"LONG STRING": u"123456789012345678901234567890123456789012345678901234567890",
|
||||||
|
"SHORT STRING": u"1234567890" },
|
||||||
|
{ True: "true", 138: False, "false": 200 }
|
||||||
|
]
|
||||||
|
|
||||||
|
l = []
|
||||||
|
l.extend(l0)
|
||||||
|
l.append(l0)
|
||||||
|
l.append(1)
|
||||||
|
l.extend(l1)
|
||||||
|
return l
|
||||||
|
|
||||||
|
def build_test_data(destdir):
|
||||||
|
l = get_test_data_list()
|
||||||
|
for i in range(len(l)):
|
||||||
|
# packer = msgpack.Packer()
|
||||||
|
serialized = msgpack.dumps(l[i])
|
||||||
|
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
|
||||||
|
f.write(serialized)
|
||||||
|
f.close()
|
||||||
|
serialized = cbor.dumps(l[i])
|
||||||
|
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
|
||||||
|
f.write(serialized)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
def doRpcServer(port, stopTimeSec):
|
||||||
|
class EchoHandler(object):
|
||||||
|
def Echo123(self, msg1, msg2, msg3):
|
||||||
|
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
|
||||||
|
def EchoStruct(self, msg):
|
||||||
|
return ("%s" % msg)
|
||||||
|
|
||||||
|
addr = msgpackrpc.Address('127.0.0.1', port)
|
||||||
|
server = msgpackrpc.Server(EchoHandler())
|
||||||
|
server.listen(addr)
|
||||||
|
# run thread to stop it after stopTimeSec seconds if > 0
|
||||||
|
if stopTimeSec > 0:
|
||||||
|
def myStopRpcServer():
|
||||||
|
server.stop()
|
||||||
|
t = threading.Timer(stopTimeSec, myStopRpcServer)
|
||||||
|
t.start()
|
||||||
|
server.start()
|
||||||
|
|
||||||
|
def doRpcClientToPythonSvc(port):
|
||||||
|
address = msgpackrpc.Address('127.0.0.1', port)
|
||||||
|
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
|
||||||
|
print client.call("Echo123", "A1", "B2", "C3")
|
||||||
|
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
|
||||||
|
|
||||||
|
def doRpcClientToGoSvc(port):
|
||||||
|
# print ">>>> port: ", port, " <<<<<"
|
||||||
|
address = msgpackrpc.Address('127.0.0.1', port)
|
||||||
|
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
|
||||||
|
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
|
||||||
|
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
|
||||||
|
|
||||||
|
def doMain(args):
|
||||||
|
if len(args) == 2 and args[0] == "testdata":
|
||||||
|
build_test_data(args[1])
|
||||||
|
elif len(args) == 3 and args[0] == "rpc-server":
|
||||||
|
doRpcServer(int(args[1]), int(args[2]))
|
||||||
|
elif len(args) == 2 and args[0] == "rpc-client-python-service":
|
||||||
|
doRpcClientToPythonSvc(int(args[1]))
|
||||||
|
elif len(args) == 2 and args[0] == "rpc-client-go-service":
|
||||||
|
doRpcClientToGoSvc(int(args[1]))
|
||||||
|
else:
|
||||||
|
print("Usage: test.py " +
|
||||||
|
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
doMain(sys.argv[1:])
|
||||||
|
|
|
@ -1,432 +0,0 @@
|
||||||
// +build ignore
|
|
||||||
|
|
||||||
package codec
|
|
||||||
|
|
||||||
import "reflect"
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
A strict Non-validating namespace-aware XML 1.0 parser and (en|de)coder.
|
|
||||||
|
|
||||||
We are attempting this due to perceived issues with encoding/xml:
|
|
||||||
- Complicated. It tried to do too much, and is not as simple to use as json.
|
|
||||||
- Due to over-engineering, reflection is over-used AND performance suffers:
|
|
||||||
java is 6X faster:http://fabsk.eu/blog/category/informatique/dev/golang/
|
|
||||||
even PYTHON performs better: http://outgoing.typepad.com/outgoing/2014/07/exploring-golang.html
|
|
||||||
|
|
||||||
codec framework will offer the following benefits
|
|
||||||
- VASTLY improved performance (when using reflection-mode or codecgen)
|
|
||||||
- simplicity and consistency: with the rest of the supported formats
|
|
||||||
- all other benefits of codec framework (streaming, codegeneration, etc)
|
|
||||||
|
|
||||||
codec is not a drop-in replacement for encoding/xml.
|
|
||||||
It is a replacement, based on the simplicity and performance of codec.
|
|
||||||
Look at it like JAXB for Go.
|
|
||||||
|
|
||||||
Challenges:
|
|
||||||
|
|
||||||
- Need to output XML preamble, with all namespaces at the right location in the output.
|
|
||||||
- Each "end" block is dynamic, so we need to maintain a context-aware stack
|
|
||||||
- How to decide when to use an attribute VS an element
|
|
||||||
- How to handle chardata, attr, comment EXPLICITLY.
|
|
||||||
- Should it output fragments?
|
|
||||||
e.g. encoding a bool should just output true OR false, which is not well-formed XML.
|
|
||||||
|
|
||||||
Extend the struct tag. See representative example:
|
|
||||||
type X struct {
|
|
||||||
ID uint8 codec:"xid|http://ugorji.net/x-namespace id,omitempty,toarray,attr,cdata"
|
|
||||||
}
|
|
||||||
|
|
||||||
Based on this, we encode
|
|
||||||
- fields as elements, BUT
|
|
||||||
encode as attributes if struct tag contains ",attr" and is a scalar (bool, number or string)
|
|
||||||
- text as entity-escaped text, BUT encode as CDATA if struct tag contains ",cdata".
|
|
||||||
|
|
||||||
In this mode, we only encode as attribute if ",attr" is found, and only encode as CDATA
|
|
||||||
if ",cdata" is found in the struct tag.
|
|
||||||
|
|
||||||
To handle namespaces:
|
|
||||||
- XMLHandle is denoted as being namespace-aware.
|
|
||||||
Consequently, we WILL use the ns:name pair to encode and decode if defined, else use the plain name.
|
|
||||||
- *Encoder and *Decoder know whether the Handle "prefers" namespaces.
|
|
||||||
- add *Encoder.getEncName(*structFieldInfo).
|
|
||||||
No one calls *structFieldInfo.indexForEncName directly anymore
|
|
||||||
- add *Decoder.getStructFieldInfo(encName string) // encName here is either like abc, or h1:nsabc
|
|
||||||
No one accesses .encName anymore except in
|
|
||||||
- let encode.go and decode.go use these (for consistency)
|
|
||||||
- only problem exists for gen.go, where we create a big switch on encName.
|
|
||||||
Now, we also have to add a switch on strings.endsWith(kName, encNsName)
|
|
||||||
- gen.go will need to have many more methods, and then double-on the 2 switch loops like:
|
|
||||||
switch k {
|
|
||||||
case "abc" : x.abc()
|
|
||||||
case "def" : x.def()
|
|
||||||
default {
|
|
||||||
switch {
|
|
||||||
case !nsAware: panic(...)
|
|
||||||
case strings.endsWith("nsabc"): x.abc()
|
|
||||||
case strings.endsWith("nsdef"): x.def()
|
|
||||||
default: panic(...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
The structure below accommodates this:
|
|
||||||
|
|
||||||
type typeInfo struct {
|
|
||||||
sfi []*structFieldInfo // sorted by encName
|
|
||||||
sfins // sorted by namespace
|
|
||||||
sfia // sorted, to have those with attributes at the top. Needed to write XML appropriately.
|
|
||||||
sfip // unsorted
|
|
||||||
}
|
|
||||||
type structFieldInfo struct {
|
|
||||||
encName
|
|
||||||
nsEncName
|
|
||||||
ns string
|
|
||||||
attr bool
|
|
||||||
cdata bool
|
|
||||||
}
|
|
||||||
|
|
||||||
indexForEncName is now an internal helper function that takes a sorted array
|
|
||||||
(one of ti.sfins or ti.sfi). It is only used by *Encoder.getStructFieldInfo(...)
|
|
||||||
|
|
||||||
There will be a separate parser from the builder.
|
|
||||||
The parser will have a method: next() xmlToken method.
|
|
||||||
|
|
||||||
xmlToken has fields:
|
|
||||||
- type uint8: 0 | ElementStart | ElementEnd | AttrKey | AttrVal | Text
|
|
||||||
- value string
|
|
||||||
- ns string
|
|
||||||
|
|
||||||
SEE: http://www.xml.com/pub/a/98/10/guide0.html?page=3#ENTDECL
|
|
||||||
|
|
||||||
The following are skipped when parsing:
|
|
||||||
- External Entities (from external file)
|
|
||||||
- Notation Declaration e.g. <!NOTATION GIF87A SYSTEM "GIF">
|
|
||||||
- Entity Declarations & References
|
|
||||||
- XML Declaration (assume UTF-8)
|
|
||||||
- XML Directive i.e. <! ... >
|
|
||||||
- Other Declarations: Notation, etc.
|
|
||||||
- Comment
|
|
||||||
- Processing Instruction
|
|
||||||
- schema / DTD for validation:
|
|
||||||
We are not a VALIDATING parser. Validation is done elsewhere.
|
|
||||||
However, some parts of the DTD internal subset are used (SEE BELOW).
|
|
||||||
For Attribute List Declarations e.g.
|
|
||||||
<!ATTLIST foo:oldjoke name ID #REQUIRED label CDATA #IMPLIED status ( funny | notfunny ) 'funny' >
|
|
||||||
We considered using the ATTLIST to get "default" value, but not to validate the contents. (VETOED)
|
|
||||||
|
|
||||||
The following XML features are supported
|
|
||||||
- Namespace
|
|
||||||
- Element
|
|
||||||
- Attribute
|
|
||||||
- cdata
|
|
||||||
- Unicode escape
|
|
||||||
|
|
||||||
The following DTD (when as an internal sub-set) features are supported:
|
|
||||||
- Internal Entities e.g.
|
|
||||||
<!ELEMENT burns "ugorji is cool" > AND entities for the set: [<>&"']
|
|
||||||
- Parameter entities e.g.
|
|
||||||
<!ENTITY % personcontent "ugorji is cool"> <!ELEMENT burns (%personcontent;)*>
|
|
||||||
|
|
||||||
At decode time, a structure containing the following is kept
|
|
||||||
- namespace mapping
|
|
||||||
- default attribute values
|
|
||||||
- all internal entities (<>&"' and others written in the document)
|
|
||||||
|
|
||||||
When decode starts, it parses XML namespace declarations and creates a map in the
|
|
||||||
xmlDecDriver. While parsing, that map continuously gets updated.
|
|
||||||
The only problem happens when a namespace declaration happens on the node that it defines.
|
|
||||||
e.g. <hn:name xmlns:hn="http://www.ugorji.net" >
|
|
||||||
To handle this, each Element must be fully parsed at a time,
|
|
||||||
even if it amounts to multiple tokens which are returned one at a time on request.
|
|
||||||
|
|
||||||
xmlns is a special attribute name.
|
|
||||||
- It is used to define namespaces, including the default
|
|
||||||
- It is never returned as an AttrKey or AttrVal.
|
|
||||||
*We may decide later to allow user to use it e.g. you want to parse the xmlns mappings into a field.*
|
|
||||||
|
|
||||||
Number, bool, null, mapKey, etc can all be decoded from any xmlToken.
|
|
||||||
This accommodates map[int]string for example.
|
|
||||||
|
|
||||||
It should be possible to create a schema from the types,
|
|
||||||
or vice versa (generate types from schema with appropriate tags).
|
|
||||||
This is however out-of-scope from this parsing project.
|
|
||||||
|
|
||||||
We should write all namespace information at the first point that it is referenced in the tree,
|
|
||||||
and use the mapping for all child nodes and attributes. This means that state is maintained
|
|
||||||
at a point in the tree. This also means that calls to Decode or MustDecode will reset some state.
|
|
||||||
|
|
||||||
When decoding, it is important to keep track of entity references and default attribute values.
|
|
||||||
It seems these can only be stored in the DTD components. We should honor them when decoding.
|
|
||||||
|
|
||||||
Configuration for XMLHandle will look like this:
|
|
||||||
|
|
||||||
XMLHandle
|
|
||||||
DefaultNS string
|
|
||||||
// Encoding:
|
|
||||||
NS map[string]string // ns URI to key, used for encoding
|
|
||||||
// Decoding: in case ENTITY declared in external schema or dtd, store info needed here
|
|
||||||
Entities map[string]string // map of entity rep to character
|
|
||||||
|
|
||||||
|
|
||||||
During encode, if a namespace mapping is not defined for a namespace found on a struct,
|
|
||||||
then we create a mapping for it using nsN (where N is 1..1000000, and doesn't conflict
|
|
||||||
with any other namespace mapping).
|
|
||||||
|
|
||||||
Note that different fields in a struct can have different namespaces.
|
|
||||||
However, all fields will default to the namespace on the _struct field (if defined).
|
|
||||||
|
|
||||||
An XML document is a name, a map of attributes and a list of children.
|
|
||||||
Consequently, we cannot "DecodeNaked" into a map[string]interface{} (for example).
|
|
||||||
We have to "DecodeNaked" into something that resembles XML data.
|
|
||||||
|
|
||||||
To support DecodeNaked (decode into nil interface{}), we have to define some "supporting" types:
|
|
||||||
type Name struct { // Preferred. Less allocations due to conversions.
|
|
||||||
Local string
|
|
||||||
Space string
|
|
||||||
}
|
|
||||||
type Element struct {
|
|
||||||
Name Name
|
|
||||||
Attrs map[Name]string
|
|
||||||
Children []interface{} // each child is either *Element or string
|
|
||||||
}
|
|
||||||
Only two "supporting" types are exposed for XML: Name and Element.
|
|
||||||
|
|
||||||
// ------------------
|
|
||||||
|
|
||||||
We considered 'type Name string' where Name is like "Space Local" (space-separated).
|
|
||||||
We decided against it, because each creation of a name would lead to
|
|
||||||
double allocation (first convert []byte to string, then concatenate them into a string).
|
|
||||||
The benefit is that it is faster to read Attrs from a map. But given that Element is a value
|
|
||||||
object, we want to eschew methods and have public exposed variables.
|
|
||||||
|
|
||||||
We also considered the following, where xml types were not value objects, and we used
|
|
||||||
intelligent accessor methods to extract information and for performance.
|
|
||||||
*** WE DECIDED AGAINST THIS. ***
|
|
||||||
type Attr struct {
|
|
||||||
Name Name
|
|
||||||
Value string
|
|
||||||
}
|
|
||||||
// Element is a ValueObject: There are no accessor methods.
|
|
||||||
// Make element self-contained.
|
|
||||||
type Element struct {
|
|
||||||
Name Name
|
|
||||||
attrsMap map[string]string // where key is "Space Local"
|
|
||||||
attrs []Attr
|
|
||||||
childrenT []string
|
|
||||||
childrenE []Element
|
|
||||||
childrenI []int // each child is a index into T or E.
|
|
||||||
}
|
|
||||||
func (x *Element) child(i) interface{} // returns string or *Element
|
|
||||||
|
|
||||||
// ------------------
|
|
||||||
|
|
||||||
Per XML spec and our default handling, white space is insignificant between elements,
|
|
||||||
specifically between parent-child or siblings. White space occurring alone between start
|
|
||||||
and end element IS significant. However, if xml:space='preserve', then we 'preserve'
|
|
||||||
all whitespace. This is more critical when doing a DecodeNaked, but MAY not be as critical
|
|
||||||
when decoding into a typed value.
|
|
||||||
|
|
||||||
**Note: there is no xml: namespace. The xml: attributes were defined before namespaces.**
|
|
||||||
**So treat them as just "directives" that should be interpreted to mean something**.
|
|
||||||
|
|
||||||
On encoding, we don't add any prettifying markup (indenting, etc).
|
|
||||||
|
|
||||||
A document or element can only be encoded/decoded from/to a struct. In this mode:
|
|
||||||
- struct name maps to element name (or tag-info from _struct field)
|
|
||||||
- fields are mapped to child elements or attributes
|
|
||||||
|
|
||||||
A map is either encoded as attributes on current element, or as a set of child elements.
|
|
||||||
Maps are encoded as attributes iff their keys and values are primitives (number, bool, string).
|
|
||||||
|
|
||||||
A list is encoded as a set of child elements.
|
|
||||||
|
|
||||||
Primitives (number, bool, string) are encoded as an element, attribute or text
|
|
||||||
depending on the context.
|
|
||||||
|
|
||||||
Extensions must encode themselves as a text string.
|
|
||||||
|
|
||||||
Encoding is tough, specifically when encoding mappings, because we need to encode
|
|
||||||
as either attribute or element. To do this, we need to default to encoding as attributes,
|
|
||||||
and then let Encoder inform the Handle when to start encoding as nodes.
|
|
||||||
i.e. Encoder does something like:
|
|
||||||
|
|
||||||
h.EncodeMapStart()
|
|
||||||
h.Encode(), h.Encode(), ...
|
|
||||||
h.EncodeMapNotAttrSignal() // this is not a bool, because it's a signal
|
|
||||||
h.Encode(), h.Encode(), ...
|
|
||||||
h.EncodeEnd()
|
|
||||||
|
|
||||||
Only XMLHandle understands this, and will set itself to start encoding as elements.
|
|
||||||
|
|
||||||
This support extends to maps. For example, if a struct field is a map, and it has
|
|
||||||
the struct tag signifying it should be attr, then all its fields are encoded as attributes.
|
|
||||||
e.g.
|
|
||||||
|
|
||||||
type X struct {
|
|
||||||
M map[string]int `codec:"m,attr"` // encode as attributes
|
|
||||||
}
|
|
||||||
|
|
||||||
Question:
|
|
||||||
- if encoding a map, what if map keys have spaces in them???
|
|
||||||
Then they cannot be attributes or child elements. Error.
|
|
||||||
|
|
||||||
Misc:
|
|
||||||
|
|
||||||
- For attribute values, normalize by trimming beginning and ending white space,
|
|
||||||
and converting every white space sequence to a single space.
|
|
||||||
- ATTLIST restrictions are enforced.
|
|
||||||
e.g. default value of xml:space, skipping xml:XYZ style attributes, etc.
|
|
||||||
- Consider supporting NON-STRICT mode (e.g. to handle HTML parsing).
|
|
||||||
Some elements e.g. br, hr, etc need not close and should be auto-closed
|
|
||||||
... (see http://www.w3.org/TR/html4/loose.dtd)
|
|
||||||
An expansive set of entities are pre-defined.
|
|
||||||
- Have easy way to create a HTML parser:
|
|
||||||
add a HTML() method to XMLHandle, that will set Strict=false, specify AutoClose,
|
|
||||||
and add HTML Entities to the list.
|
|
||||||
- Support validating element/attribute XMLName before writing it.
|
|
||||||
Keep this behind a flag, which is set to false by default (for performance).
|
|
||||||
type XMLHandle struct {
|
|
||||||
CheckName bool
|
|
||||||
}
|
|
||||||
|
|
||||||
ROADMAP (1 weeks):
|
|
||||||
- build encoder (1 day)
|
|
||||||
- build decoder (based off xmlParser) (1 day)
|
|
||||||
- implement xmlParser (2 days).
|
|
||||||
Look at encoding/xml for inspiration.
|
|
||||||
- integrate and TEST (1 days)
|
|
||||||
- write article and post it (1 day)
|
|
||||||
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
// ----------- PARSER -------------------
|
|
||||||
|
|
||||||
type xmlTokenType uint8
|
|
||||||
|
|
||||||
const (
|
|
||||||
_ xmlTokenType = iota << 1
|
|
||||||
xmlTokenElemStart
|
|
||||||
xmlTokenElemEnd
|
|
||||||
xmlTokenAttrKey
|
|
||||||
xmlTokenAttrVal
|
|
||||||
xmlTokenText
|
|
||||||
)
|
|
||||||
|
|
||||||
type xmlToken struct {
|
|
||||||
Type xmlTokenType
|
|
||||||
Value string
|
|
||||||
Namespace string // blank for AttrVal and Text
|
|
||||||
}
|
|
||||||
|
|
||||||
type xmlParser struct {
|
|
||||||
r decReader
|
|
||||||
toks []xmlToken // list of tokens.
|
|
||||||
ptr int // ptr into the toks slice
|
|
||||||
done bool // nothing else to parse. r now returns EOF.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (x *xmlParser) next() (t *xmlToken) {
|
|
||||||
// once x.done, or x.ptr == len(x.toks) == 0, then return nil (to signify finish)
|
|
||||||
if !x.done && len(x.toks) == 0 {
|
|
||||||
x.nextTag()
|
|
||||||
}
|
|
||||||
// parses one element at a time (into possible many tokens)
|
|
||||||
if x.ptr < len(x.toks) {
|
|
||||||
t = &(x.toks[x.ptr])
|
|
||||||
x.ptr++
|
|
||||||
if x.ptr == len(x.toks) {
|
|
||||||
x.ptr = 0
|
|
||||||
x.toks = x.toks[:0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// nextTag will parses the next element and fill up toks.
|
|
||||||
// It set done flag if/once EOF is reached.
|
|
||||||
func (x *xmlParser) nextTag() {
|
|
||||||
// TODO: implement.
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------- ENCODER -------------------
|
|
||||||
|
|
||||||
type xmlEncDriver struct {
|
|
||||||
e *Encoder
|
|
||||||
w encWriter
|
|
||||||
h *XMLHandle
|
|
||||||
b [64]byte // scratch
|
|
||||||
bs []byte // scratch
|
|
||||||
// s jsonStack
|
|
||||||
noBuiltInTypes
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------- DECODER -------------------
|
|
||||||
|
|
||||||
type xmlDecDriver struct {
|
|
||||||
d *Decoder
|
|
||||||
h *XMLHandle
|
|
||||||
r decReader // *bytesDecReader decReader
|
|
||||||
ct valueType // container type. one of unset, array or map.
|
|
||||||
bstr [8]byte // scratch used for string \UXXX parsing
|
|
||||||
b [64]byte // scratch
|
|
||||||
|
|
||||||
// wsSkipped bool // whitespace skipped
|
|
||||||
|
|
||||||
// s jsonStack
|
|
||||||
|
|
||||||
noBuiltInTypes
|
|
||||||
}
|
|
||||||
|
|
||||||
// DecodeNaked will decode into an XMLNode
|
|
||||||
|
|
||||||
// XMLName is a value object representing a namespace-aware NAME
|
|
||||||
type XMLName struct {
|
|
||||||
Local string
|
|
||||||
Space string
|
|
||||||
}
|
|
||||||
|
|
||||||
// XMLNode represents a "union" of the different types of XML Nodes.
|
|
||||||
// Only one of fields (Text or *Element) is set.
|
|
||||||
type XMLNode struct {
|
|
||||||
Element *Element
|
|
||||||
Text string
|
|
||||||
}
|
|
||||||
|
|
||||||
// XMLElement is a value object representing an fully-parsed XML element.
|
|
||||||
type XMLElement struct {
|
|
||||||
Name Name
|
|
||||||
Attrs map[XMLName]string
|
|
||||||
// Children is a list of child nodes, each being a *XMLElement or string
|
|
||||||
Children []XMLNode
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------- HANDLE -------------------
|
|
||||||
|
|
||||||
type XMLHandle struct {
|
|
||||||
BasicHandle
|
|
||||||
textEncodingType
|
|
||||||
|
|
||||||
DefaultNS string
|
|
||||||
NS map[string]string // ns URI to key, for encoding
|
|
||||||
Entities map[string]string // entity representation to string, for encoding.
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *XMLHandle) newEncDriver(e *Encoder) encDriver {
|
|
||||||
return &xmlEncDriver{e: e, w: e.w, h: h}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *XMLHandle) newDecDriver(d *Decoder) decDriver {
|
|
||||||
// d := xmlDecDriver{r: r.(*bytesDecReader), h: h}
|
|
||||||
hd := xmlDecDriver{d: d, r: d.r, h: h}
|
|
||||||
hd.n.bytes = d.b[:]
|
|
||||||
return &hd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *XMLHandle) SetInterfaceExt(rt reflect.Type, tag uint64, ext InterfaceExt) (err error) {
|
|
||||||
return h.SetExt(rt, tag, &setExtWrapper{i: ext})
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ decDriver = (*xmlDecDriver)(nil)
|
|
||||||
var _ encDriver = (*xmlEncDriver)(nil)
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
language: go
|
||||||
|
|
||||||
|
go:
|
||||||
|
- 1.7
|
||||||
|
- 1.8
|
||||||
|
- 1.9
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- go get github.com/axw/gocov/gocov
|
||||||
|
- go get github.com/mattn/goveralls
|
||||||
|
- if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
|
||||||
|
install:
|
||||||
|
- go get -u -v $(go list -f '{{join .Imports "\n"}}{{"\n"}}{{join .TestImports "\n"}}' ./... | sort | uniq | grep -v gopher-lua)
|
||||||
|
script:
|
||||||
|
- $HOME/gopath/bin/goveralls -service=travis-ci
|
|
@ -0,0 +1,10 @@
|
||||||
|
.PHONY: build test glua
|
||||||
|
|
||||||
|
build:
|
||||||
|
./_tools/go-inline *.go && go fmt . && go build
|
||||||
|
|
||||||
|
glua: *.go pm/*.go cmd/glua/glua.go
|
||||||
|
./_tools/go-inline *.go && go fmt . && go build cmd/glua/glua.go
|
||||||
|
|
||||||
|
test:
|
||||||
|
./_tools/go-inline *.go && go fmt . && go test
|
|
@ -0,0 +1,801 @@
|
||||||
|
===============================================================================
|
||||||
|
GopherLua: VM and compiler for Lua in Go.
|
||||||
|
===============================================================================
|
||||||
|
|
||||||
|
.. image:: https://godoc.org/github.com/yuin/gopher-lua?status.svg
|
||||||
|
:target: http://godoc.org/github.com/yuin/gopher-lua
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.org/yuin/gopher-lua.svg
|
||||||
|
:target: https://travis-ci.org/yuin/gopher-lua
|
||||||
|
|
||||||
|
.. image:: https://coveralls.io/repos/yuin/gopher-lua/badge.svg
|
||||||
|
:target: https://coveralls.io/r/yuin/gopher-lua
|
||||||
|
|
||||||
|
.. image:: https://badges.gitter.im/Join%20Chat.svg
|
||||||
|
:alt: Join the chat at https://gitter.im/yuin/gopher-lua
|
||||||
|
:target: https://gitter.im/yuin/gopher-lua?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||||
|
|
||||||
|
|
|
||||||
|
|
||||||
|
|
||||||
|
GopherLua is a Lua5.1 VM and compiler written in Go. GopherLua has a same goal
|
||||||
|
with Lua: **Be a scripting language with extensible semantics** . It provides
|
||||||
|
Go APIs that allow you to easily embed a scripting language to your Go host
|
||||||
|
programs.
|
||||||
|
|
||||||
|
.. contents::
|
||||||
|
:depth: 1
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Design principle
|
||||||
|
----------------------------------------------------------------
|
||||||
|
|
||||||
|
- Be a scripting language with extensible semantics.
|
||||||
|
- User-friendly Go API
|
||||||
|
- The stack based API like the one used in the original Lua
|
||||||
|
implementation will cause a performance improvements in GopherLua
|
||||||
|
(It will reduce memory allocations and concrete type <-> interface conversions).
|
||||||
|
GopherLua API is **not** the stack based API.
|
||||||
|
GopherLua give preference to the user-friendliness over the performance.
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
How about performance?
|
||||||
|
----------------------------------------------------------------
|
||||||
|
GopherLua is not fast but not too slow, I think.
|
||||||
|
|
||||||
|
GopherLua has almost equivalent ( or little bit better ) performance as Python3 on micro benchmarks.
|
||||||
|
|
||||||
|
There are some benchmarks on the `wiki page <https://github.com/yuin/gopher-lua/wiki/Benchmarks>`_ .
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Installation
|
||||||
|
----------------------------------------------------------------
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
go get github.com/yuin/gopher-lua
|
||||||
|
|
||||||
|
GopherLua supports >= Go1.7.
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Usage
|
||||||
|
----------------------------------------------------------------
|
||||||
|
GopherLua APIs perform in much the same way as Lua, **but the stack is used only
|
||||||
|
for passing arguments and receiving returned values.**
|
||||||
|
|
||||||
|
GopherLua supports channel operations. See **"Goroutines"** section.
|
||||||
|
|
||||||
|
Import a package.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/yuin/gopher-lua"
|
||||||
|
)
|
||||||
|
|
||||||
|
Run scripts in the VM.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
if err := L.DoString(`print("hello")`); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
if err := L.DoFile("hello.lua"); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
Refer to `Lua Reference Manual <http://www.lua.org/manual/5.1/>`_ and `Go doc <http://godoc.org/github.com/yuin/gopher-lua>`_ for further information.
|
||||||
|
|
||||||
|
Note that elements that are not commented in `Go doc <http://godoc.org/github.com/yuin/gopher-lua>`_ equivalent to `Lua Reference Manual <http://www.lua.org/manual/5.1/>`_ , except GopherLua uses objects instead of Lua stack indices.
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Data model
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
All data in a GopherLua program is an ``LValue`` . ``LValue`` is an interface
|
||||||
|
type that has following methods.
|
||||||
|
|
||||||
|
- ``String() string``
|
||||||
|
- ``Type() LValueType``
|
||||||
|
|
||||||
|
|
||||||
|
Objects implement an LValue interface are
|
||||||
|
|
||||||
|
================ ========================= ================== =======================
|
||||||
|
Type name Go type Type() value Constants
|
||||||
|
================ ========================= ================== =======================
|
||||||
|
``LNilType`` (constants) ``LTNil`` ``LNil``
|
||||||
|
``LBool`` (constants) ``LTBool`` ``LTrue``, ``LFalse``
|
||||||
|
``LNumber`` float64 ``LTNumber`` ``-``
|
||||||
|
``LString`` string ``LTString`` ``-``
|
||||||
|
``LFunction`` struct pointer ``LTFunction`` ``-``
|
||||||
|
``LUserData`` struct pointer ``LTUserData`` ``-``
|
||||||
|
``LState`` struct pointer ``LTThread`` ``-``
|
||||||
|
``LTable`` struct pointer ``LTTable`` ``-``
|
||||||
|
``LChannel`` chan LValue ``LTChannel`` ``-``
|
||||||
|
================ ========================= ================== =======================
|
||||||
|
|
||||||
|
You can test an object type in Go way(type assertion) or using a ``Type()`` value.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
lv := L.Get(-1) // get the value at the top of the stack
|
||||||
|
if str, ok := lv.(lua.LString); ok {
|
||||||
|
// lv is LString
|
||||||
|
fmt.Println(string(str))
|
||||||
|
}
|
||||||
|
if lv.Type() != lua.LTString {
|
||||||
|
panic("string required.")
|
||||||
|
}
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
lv := L.Get(-1) // get the value at the top of the stack
|
||||||
|
if tbl, ok := lv.(*lua.LTable); ok {
|
||||||
|
// lv is LTable
|
||||||
|
fmt.Println(L.ObjLen(tbl))
|
||||||
|
}
|
||||||
|
|
||||||
|
Note that ``LBool`` , ``LNumber`` , ``LString`` is not a pointer.
|
||||||
|
|
||||||
|
To test ``LNilType`` and ``LBool``, You **must** use pre-defined constants.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
lv := L.Get(-1) // get the value at the top of the stack
|
||||||
|
|
||||||
|
if lv == lua.LTrue { // correct
|
||||||
|
}
|
||||||
|
|
||||||
|
if bl, ok := lv.(lua.LBool); ok && bool(bl) { // wrong
|
||||||
|
}
|
||||||
|
|
||||||
|
In Lua, both ``nil`` and ``false`` make a condition false. ``LVIsFalse`` and ``LVAsBool`` implement this specification.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
lv := L.Get(-1) // get the value at the top of the stack
|
||||||
|
if lua.LVIsFalse(lv) { // lv is nil or false
|
||||||
|
}
|
||||||
|
|
||||||
|
if lua.LVAsBool(lv) { // lv is neither nil nor false
|
||||||
|
}
|
||||||
|
|
||||||
|
Objects that based on go structs(``LFunction``. ``LUserData``, ``LTable``)
|
||||||
|
have some public methods and fields. You can use these methods and fields for
|
||||||
|
performance and debugging, but there are some limitations.
|
||||||
|
|
||||||
|
- Metatable does not work.
|
||||||
|
- No error handlings.
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Callstack & Registry size
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Size of the callstack & registry is **fixed** for mainly performance.
|
||||||
|
You can change the default size of the callstack & registry.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
lua.RegistrySize = 1024 * 20
|
||||||
|
lua.CallStackSize = 1024
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
|
||||||
|
You can also create an LState object that has the callstack & registry size specified by ``Options`` .
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
L := lua.NewState(lua.Options{
|
||||||
|
CallStackSize: 120,
|
||||||
|
RegistrySize: 120*20,
|
||||||
|
})
|
||||||
|
|
||||||
|
An LState object that has been created by ``*LState#NewThread()`` inherits the callstack & registry size from the parent LState object.
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Miscellaneous lua.NewState options
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
- **Options.SkipOpenLibs bool(default false)**
|
||||||
|
- By default, GopherLua opens all built-in libraries when new LState is created.
|
||||||
|
- You can skip this behaviour by setting this to ``true`` .
|
||||||
|
- Using the various `OpenXXX(L *LState) int` functions you can open only those libraries that you require, for an example see below.
|
||||||
|
- **Options.IncludeGoStackTrace bool(default false)**
|
||||||
|
- By default, GopherLua does not show Go stack traces when panics occur.
|
||||||
|
- You can get Go stack traces by setting this to ``true`` .
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
API
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Refer to `Lua Reference Manual <http://www.lua.org/manual/5.1/>`_ and `Go doc(LState methods) <http://godoc.org/github.com/yuin/gopher-lua>`_ for further information.
|
||||||
|
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
Calling Go from Lua
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
func Double(L *lua.LState) int {
|
||||||
|
lv := L.ToInt(1) /* get argument */
|
||||||
|
L.Push(lua.LNumber(lv * 2)) /* push result */
|
||||||
|
return 1 /* number of results */
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
L.SetGlobal("double", L.NewFunction(Double)) /* Original lua_setglobal uses stack... */
|
||||||
|
}
|
||||||
|
|
||||||
|
.. code-block:: lua
|
||||||
|
|
||||||
|
print(double(20)) -- > "40"
|
||||||
|
|
||||||
|
Any function registered with GopherLua is a ``lua.LGFunction``, defined in ``value.go``
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
type LGFunction func(*LState) int
|
||||||
|
|
||||||
|
Working with coroutines.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
co, _ := L.NewThread() /* create a new thread */
|
||||||
|
fn := L.GetGlobal("coro").(*lua.LFunction) /* get function from lua */
|
||||||
|
for {
|
||||||
|
st, err, values := L.Resume(co, fn)
|
||||||
|
if st == lua.ResumeError {
|
||||||
|
fmt.Println("yield break(error)")
|
||||||
|
fmt.Println(err.Error())
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, lv := range values {
|
||||||
|
fmt.Printf("%v : %v\n", i, lv)
|
||||||
|
}
|
||||||
|
|
||||||
|
if st == lua.ResumeOK {
|
||||||
|
fmt.Println("yield break(ok)")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
Opening a subset of builtin modules
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
|
||||||
|
The following demonstrates how to open a subset of the built-in modules in Lua, say for example to avoid enabling modules with access to local files or system calls.
|
||||||
|
|
||||||
|
main.go
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
L := lua.NewState(lua.Options{SkipOpenLibs: true})
|
||||||
|
defer L.Close()
|
||||||
|
for _, pair := range []struct {
|
||||||
|
n string
|
||||||
|
f lua.LGFunction
|
||||||
|
}{
|
||||||
|
{lua.LoadLibName, lua.OpenPackage}, // Must be first
|
||||||
|
{lua.BaseLibName, lua.OpenBase},
|
||||||
|
{lua.TabLibName, lua.OpenTable},
|
||||||
|
} {
|
||||||
|
if err := L.CallByParam(lua.P{
|
||||||
|
Fn: L.NewFunction(pair.f),
|
||||||
|
NRet: 0,
|
||||||
|
Protect: true,
|
||||||
|
}, lua.LString(pair.n)); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := L.DoFile("main.lua"); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
Creating a module by Go
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
|
||||||
|
mymodule.go
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
package mymodule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/yuin/gopher-lua"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Loader(L *lua.LState) int {
|
||||||
|
// register functions to the table
|
||||||
|
mod := L.SetFuncs(L.NewTable(), exports)
|
||||||
|
// register other stuff
|
||||||
|
L.SetField(mod, "name", lua.LString("value"))
|
||||||
|
|
||||||
|
// returns the module
|
||||||
|
L.Push(mod)
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
var exports = map[string]lua.LGFunction{
|
||||||
|
"myfunc": myfunc,
|
||||||
|
}
|
||||||
|
|
||||||
|
func myfunc(L *lua.LState) int {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
mymain.go
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"./mymodule"
|
||||||
|
"github.com/yuin/gopher-lua"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
L.PreloadModule("mymodule", mymodule.Loader)
|
||||||
|
if err := L.DoFile("main.lua"); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main.lua
|
||||||
|
|
||||||
|
.. code-block:: lua
|
||||||
|
|
||||||
|
local m = require("mymodule")
|
||||||
|
m.myfunc()
|
||||||
|
print(m.name)
|
||||||
|
|
||||||
|
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
Calling Lua from Go
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
if err := L.DoFile("double.lua"); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
if err := L.CallByParam(lua.P{
|
||||||
|
Fn: L.GetGlobal("double"),
|
||||||
|
NRet: 1,
|
||||||
|
Protect: true,
|
||||||
|
}, lua.LNumber(10)); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
ret := L.Get(-1) // returned value
|
||||||
|
L.Pop(1) // remove received value
|
||||||
|
|
||||||
|
If ``Protect`` is false, GopherLua will panic instead of returning an ``error`` value.
|
||||||
|
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
User-Defined types
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
You can extend GopherLua with new types written in Go.
|
||||||
|
``LUserData`` is provided for this purpose.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
type Person struct {
|
||||||
|
Name string
|
||||||
|
}
|
||||||
|
|
||||||
|
const luaPersonTypeName = "person"
|
||||||
|
|
||||||
|
// Registers my person type to given L.
|
||||||
|
func registerPersonType(L *lua.LState) {
|
||||||
|
mt := L.NewTypeMetatable(luaPersonTypeName)
|
||||||
|
L.SetGlobal("person", mt)
|
||||||
|
// static attributes
|
||||||
|
L.SetField(mt, "new", L.NewFunction(newPerson))
|
||||||
|
// methods
|
||||||
|
L.SetField(mt, "__index", L.SetFuncs(L.NewTable(), personMethods))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Constructor
|
||||||
|
func newPerson(L *lua.LState) int {
|
||||||
|
person := &Person{L.CheckString(1)}
|
||||||
|
ud := L.NewUserData()
|
||||||
|
ud.Value = person
|
||||||
|
L.SetMetatable(ud, L.GetTypeMetatable(luaPersonTypeName))
|
||||||
|
L.Push(ud)
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks whether the first lua argument is a *LUserData with *Person and returns this *Person.
|
||||||
|
func checkPerson(L *lua.LState) *Person {
|
||||||
|
ud := L.CheckUserData(1)
|
||||||
|
if v, ok := ud.Value.(*Person); ok {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
L.ArgError(1, "person expected")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var personMethods = map[string]lua.LGFunction{
|
||||||
|
"name": personGetSetName,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Getter and setter for the Person#Name
|
||||||
|
func personGetSetName(L *lua.LState) int {
|
||||||
|
p := checkPerson(L)
|
||||||
|
if L.GetTop() == 2 {
|
||||||
|
p.Name = L.CheckString(2)
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
L.Push(lua.LString(p.Name))
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
registerPersonType(L)
|
||||||
|
if err := L.DoString(`
|
||||||
|
p = person.new("Steeve")
|
||||||
|
print(p:name()) -- "Steeve"
|
||||||
|
p:name("Alice")
|
||||||
|
print(p:name()) -- "Alice"
|
||||||
|
`); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
Terminating a running LState
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
GopherLua supports the `Go Concurrency Patterns: Context <https://blog.golang.org/context>`_ .
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
// set the context to our LState
|
||||||
|
L.SetContext(ctx)
|
||||||
|
err := L.DoString(`
|
||||||
|
local clock = os.clock
|
||||||
|
function sleep(n) -- seconds
|
||||||
|
local t0 = clock()
|
||||||
|
while clock() - t0 <= n do end
|
||||||
|
end
|
||||||
|
sleep(3)
|
||||||
|
`)
|
||||||
|
// err.Error() contains "context deadline exceeded"
|
||||||
|
|
||||||
|
With coroutines
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
L.SetContext(ctx)
|
||||||
|
defer cancel()
|
||||||
|
L.DoString(`
|
||||||
|
function coro()
|
||||||
|
local i = 0
|
||||||
|
while true do
|
||||||
|
coroutine.yield(i)
|
||||||
|
i = i+1
|
||||||
|
end
|
||||||
|
return i
|
||||||
|
end
|
||||||
|
`)
|
||||||
|
co, cocancel := L.NewThread()
|
||||||
|
defer cocancel()
|
||||||
|
fn := L.GetGlobal("coro").(*LFunction)
|
||||||
|
|
||||||
|
_, err, values := L.Resume(co, fn) // err is nil
|
||||||
|
|
||||||
|
cancel() // cancel the parent context
|
||||||
|
|
||||||
|
_, err, values = L.Resume(co, fn) // err is NOT nil : child context was canceled
|
||||||
|
|
||||||
|
**Note that using a context causes performance degradation.**
|
||||||
|
|
||||||
|
.. code-block::
|
||||||
|
|
||||||
|
time ./glua-with-context.exe fib.lua
|
||||||
|
9227465
|
||||||
|
0.01s user 0.11s system 1% cpu 7.505 total
|
||||||
|
|
||||||
|
time ./glua-without-context.exe fib.lua
|
||||||
|
9227465
|
||||||
|
0.01s user 0.01s system 0% cpu 5.306 total
|
||||||
|
|
||||||
|
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
Goroutines
|
||||||
|
+++++++++++++++++++++++++++++++++++++++++
|
||||||
|
The ``LState`` is not goroutine-safe. It is recommended to use one LState per goroutine and communicate between goroutines by using channels.
|
||||||
|
|
||||||
|
Channels are represented by ``channel`` objects in GopherLua. And a ``channel`` table provides functions for performing channel operations.
|
||||||
|
|
||||||
|
Some objects can not be sent over channels due to having non-goroutine-safe objects inside itself.
|
||||||
|
|
||||||
|
- a thread(state)
|
||||||
|
- a function
|
||||||
|
- an userdata
|
||||||
|
- a table with a metatable
|
||||||
|
|
||||||
|
You **must not** send these objects from Go APIs to channels.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
func receiver(ch, quit chan lua.LValue) {
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
L.SetGlobal("ch", lua.LChannel(ch))
|
||||||
|
L.SetGlobal("quit", lua.LChannel(quit))
|
||||||
|
if err := L.DoString(`
|
||||||
|
local exit = false
|
||||||
|
while not exit do
|
||||||
|
channel.select(
|
||||||
|
{"|<-", ch, function(ok, v)
|
||||||
|
if not ok then
|
||||||
|
print("channel closed")
|
||||||
|
exit = true
|
||||||
|
else
|
||||||
|
print("received:", v)
|
||||||
|
end
|
||||||
|
end},
|
||||||
|
{"|<-", quit, function(ok, v)
|
||||||
|
print("quit")
|
||||||
|
exit = true
|
||||||
|
end}
|
||||||
|
)
|
||||||
|
end
|
||||||
|
`); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func sender(ch, quit chan lua.LValue) {
|
||||||
|
L := lua.NewState()
|
||||||
|
defer L.Close()
|
||||||
|
L.SetGlobal("ch", lua.LChannel(ch))
|
||||||
|
L.SetGlobal("quit", lua.LChannel(quit))
|
||||||
|
if err := L.DoString(`
|
||||||
|
ch:send("1")
|
||||||
|
ch:send("2")
|
||||||
|
`); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
ch <- lua.LString("3")
|
||||||
|
quit <- lua.LTrue
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
ch := make(chan lua.LValue)
|
||||||
|
quit := make(chan lua.LValue)
|
||||||
|
go receiver(ch, quit)
|
||||||
|
go sender(ch, quit)
|
||||||
|
time.Sleep(3 * time.Second)
|
||||||
|
}
|
||||||
|
|
||||||
|
'''''''''''''''
|
||||||
|
Go API
|
||||||
|
'''''''''''''''
|
||||||
|
|
||||||
|
``ToChannel``, ``CheckChannel``, ``OptChannel`` are available.
|
||||||
|
|
||||||
|
Refer to `Go doc(LState methods) <http://godoc.org/github.com/yuin/gopher-lua>`_ for further information.
|
||||||
|
|
||||||
|
'''''''''''''''
|
||||||
|
Lua API
|
||||||
|
'''''''''''''''
|
||||||
|
|
||||||
|
- **channel.make([buf:int]) -> ch:channel**
|
||||||
|
- Create new channel that has a buffer size of ``buf``. By default, ``buf`` is 0.
|
||||||
|
|
||||||
|
- **channel.select(case:table [, case:table, case:table ...]) -> {index:int, recv:any, ok}**
|
||||||
|
- Same as the ``select`` statement in Go. It returns the index of the chosen case and, if that
|
||||||
|
case was a receive operation, the value received and a boolean indicating whether the channel has been closed.
|
||||||
|
- ``case`` is a table that outlined below.
|
||||||
|
- receiving: `{"|<-", ch:channel [, handler:func(ok, data:any)]}`
|
||||||
|
- sending: `{"<-|", ch:channel, data:any [, handler:func(data:any)]}`
|
||||||
|
- default: `{"default" [, handler:func()]}`
|
||||||
|
|
||||||
|
``channel.select`` examples:
|
||||||
|
|
||||||
|
.. code-block:: lua
|
||||||
|
|
||||||
|
local idx, recv, ok = channel.select(
|
||||||
|
{"|<-", ch1},
|
||||||
|
{"|<-", ch2}
|
||||||
|
)
|
||||||
|
if not ok then
|
||||||
|
print("closed")
|
||||||
|
elseif idx == 1 then -- received from ch1
|
||||||
|
print(recv)
|
||||||
|
elseif idx == 2 then -- received from ch2
|
||||||
|
print(recv)
|
||||||
|
end
|
||||||
|
|
||||||
|
.. code-block:: lua
|
||||||
|
|
||||||
|
channel.select(
|
||||||
|
{"|<-", ch1, function(ok, data)
|
||||||
|
print(ok, data)
|
||||||
|
end},
|
||||||
|
{"<-|", ch2, "value", function(data)
|
||||||
|
print(data)
|
||||||
|
end},
|
||||||
|
{"default", function()
|
||||||
|
print("default action")
|
||||||
|
end}
|
||||||
|
)
|
||||||
|
|
||||||
|
- **channel:send(data:any)**
|
||||||
|
- Send ``data`` over the channel.
|
||||||
|
- **channel:receive() -> ok:bool, data:any**
|
||||||
|
- Receive some data over the channel.
|
||||||
|
- **channel:close()**
|
||||||
|
- Close the channel.
|
||||||
|
|
||||||
|
''''''''''''''''''''''''''''''
|
||||||
|
The LState pool pattern
|
||||||
|
''''''''''''''''''''''''''''''
|
||||||
|
To create per-thread LState instances, You can use the ``sync.Pool`` like mechanism.
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
type lStatePool struct {
|
||||||
|
m sync.Mutex
|
||||||
|
saved []*lua.LState
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pl *lStatePool) Get() *lua.LState {
|
||||||
|
pl.m.Lock()
|
||||||
|
defer pl.m.Unlock()
|
||||||
|
n := len(pl.saved)
|
||||||
|
if n == 0 {
|
||||||
|
return pl.New()
|
||||||
|
}
|
||||||
|
x := pl.saved[n-1]
|
||||||
|
pl.saved = pl.saved[0 : n-1]
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pl *lStatePool) New() *lua.LState {
|
||||||
|
L := lua.NewState()
|
||||||
|
// setting the L up here.
|
||||||
|
// load scripts, set global variables, share channels, etc...
|
||||||
|
return L
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pl *lStatePool) Put(L *lua.LState) {
|
||||||
|
pl.m.Lock()
|
||||||
|
defer pl.m.Unlock()
|
||||||
|
pl.saved = append(pl.saved, L)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pl *lStatePool) Shutdown() {
|
||||||
|
for _, L := range pl.saved {
|
||||||
|
L.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Global LState pool
|
||||||
|
var luaPool = &lStatePool{
|
||||||
|
saved: make([]*lua.LState, 0, 4),
|
||||||
|
}
|
||||||
|
|
||||||
|
Now, you can get per-thread LState objects from the ``luaPool`` .
|
||||||
|
|
||||||
|
.. code-block:: go
|
||||||
|
|
||||||
|
func MyWorker() {
|
||||||
|
L := luaPool.Get()
|
||||||
|
defer luaPool.Put(L)
|
||||||
|
/* your code here */
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
defer luaPool.Shutdown()
|
||||||
|
go MyWorker()
|
||||||
|
go MyWorker()
|
||||||
|
/* etc... */
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Differences between Lua and GopherLua
|
||||||
|
----------------------------------------------------------------
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Goroutines
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
- GopherLua supports channel operations.
|
||||||
|
- GopherLua has a type named ``channel``.
|
||||||
|
- The ``channel`` table provides functions for performing channel operations.
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Unsupported functions
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
- ``string.dump``
|
||||||
|
- ``os.setlocale``
|
||||||
|
- ``lua_Debug.namewhat``
|
||||||
|
- ``package.loadlib``
|
||||||
|
- debug hooks
|
||||||
|
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
Miscellaneous notes
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
- ``collectgarbage`` does not take any arguments and runs the garbage collector for the entire Go program.
|
||||||
|
- ``file:setvbuf`` does not support a line buffering.
|
||||||
|
- Daylight saving time is not supported.
|
||||||
|
- GopherLua has a function to set an environment variable : ``os.setenv(name, value)``
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Standalone interpreter
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Lua has an interpreter called ``lua`` . GopherLua has an interpreter called ``glua`` .
|
||||||
|
|
||||||
|
.. code-block:: bash
|
||||||
|
|
||||||
|
go get github.com/yuin/gopher-lua/cmd/glua
|
||||||
|
|
||||||
|
``glua`` has same options as ``lua`` .
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
How to Contribute
|
||||||
|
----------------------------------------------------------------
|
||||||
|
See `Guidlines for contributors <https://github.com/yuin/gopher-lua/tree/master/.github/CONTRIBUTING.md>`_ .
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Libraries for GopherLua
|
||||||
|
----------------------------------------------------------------
|
||||||
|
|
||||||
|
- `gopher-luar <https://github.com/layeh/gopher-luar>`_ : Custom type reflection for gopher-lua
|
||||||
|
- `gluamapper <https://github.com/yuin/gluamapper>`_ : Mapping a Lua table to a Go struct
|
||||||
|
- `gluare <https://github.com/yuin/gluare>`_ : Regular expressions for gopher-lua
|
||||||
|
- `gluahttp <https://github.com/cjoudrey/gluahttp>`_ : HTTP request module for gopher-lua
|
||||||
|
- `gopher-json <https://github.com/layeh/gopher-json>`_ : A simple JSON encoder/decoder for gopher-lua
|
||||||
|
- `gluayaml <https://github.com/kohkimakimoto/gluayaml>`_ : Yaml parser for gopher-lua
|
||||||
|
- `glua-lfs <https://github.com/layeh/gopher-lfs>`_ : Partially implements the luafilesystem module for gopher-lua
|
||||||
|
- `gluaurl <https://github.com/cjoudrey/gluaurl>`_ : A url parser/builder module for gopher-lua
|
||||||
|
- `gluahttpscrape <https://github.com/felipejfc/gluahttpscrape>`_ : A simple HTML scraper module for gopher-lua
|
||||||
|
- `gluaxmlpath <https://github.com/ailncode/gluaxmlpath>`_ : An xmlpath module for gopher-lua
|
||||||
|
- `gluasocket <https://github.com/BixData/gluasocket>`_ : A LuaSocket library for the GopherLua VM
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Donation
|
||||||
|
----------------------------------------------------------------
|
||||||
|
|
||||||
|
BTC: 1NEDSyUmo4SMTDP83JJQSWi1MvQUGGNMZB
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
License
|
||||||
|
----------------------------------------------------------------
|
||||||
|
MIT
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Author
|
||||||
|
----------------------------------------------------------------
|
||||||
|
Yusuke Inuzuka
|
|
@ -0,0 +1,4 @@
|
||||||
|
all : parser.go
|
||||||
|
|
||||||
|
parser.go : parser.go.y
|
||||||
|
goyacc -o $@ parser.go.y; [ -f y.output ] && ( rm -f y.output )
|
|
@ -0,0 +1,524 @@
|
||||||
|
%{
|
||||||
|
package parse
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/yuin/gopher-lua/ast"
|
||||||
|
)
|
||||||
|
%}
|
||||||
|
%type<stmts> chunk
|
||||||
|
%type<stmts> chunk1
|
||||||
|
%type<stmts> block
|
||||||
|
%type<stmt> stat
|
||||||
|
%type<stmts> elseifs
|
||||||
|
%type<stmt> laststat
|
||||||
|
%type<funcname> funcname
|
||||||
|
%type<funcname> funcname1
|
||||||
|
%type<exprlist> varlist
|
||||||
|
%type<expr> var
|
||||||
|
%type<namelist> namelist
|
||||||
|
%type<exprlist> exprlist
|
||||||
|
%type<expr> expr
|
||||||
|
%type<expr> string
|
||||||
|
%type<expr> prefixexp
|
||||||
|
%type<expr> functioncall
|
||||||
|
%type<expr> afunctioncall
|
||||||
|
%type<exprlist> args
|
||||||
|
%type<expr> function
|
||||||
|
%type<funcexpr> funcbody
|
||||||
|
%type<parlist> parlist
|
||||||
|
%type<expr> tableconstructor
|
||||||
|
%type<fieldlist> fieldlist
|
||||||
|
%type<field> field
|
||||||
|
%type<fieldsep> fieldsep
|
||||||
|
|
||||||
|
%union {
|
||||||
|
token ast.Token
|
||||||
|
|
||||||
|
stmts []ast.Stmt
|
||||||
|
stmt ast.Stmt
|
||||||
|
|
||||||
|
funcname *ast.FuncName
|
||||||
|
funcexpr *ast.FunctionExpr
|
||||||
|
|
||||||
|
exprlist []ast.Expr
|
||||||
|
expr ast.Expr
|
||||||
|
|
||||||
|
fieldlist []*ast.Field
|
||||||
|
field *ast.Field
|
||||||
|
fieldsep string
|
||||||
|
|
||||||
|
namelist []string
|
||||||
|
parlist *ast.ParList
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Reserved words */
|
||||||
|
%token<token> TAnd TBreak TDo TElse TElseIf TEnd TFalse TFor TFunction TIf TIn TLocal TNil TNot TOr TReturn TRepeat TThen TTrue TUntil TWhile
|
||||||
|
|
||||||
|
/* Literals */
|
||||||
|
%token<token> TEqeq TNeq TLte TGte T2Comma T3Comma TIdent TNumber TString '{' '('
|
||||||
|
|
||||||
|
/* Operators */
|
||||||
|
%left TOr
|
||||||
|
%left TAnd
|
||||||
|
%left '>' '<' TGte TLte TEqeq TNeq
|
||||||
|
%right T2Comma
|
||||||
|
%left '+' '-'
|
||||||
|
%left '*' '/' '%'
|
||||||
|
%right UNARY /* not # -(unary) */
|
||||||
|
%right '^'
|
||||||
|
|
||||||
|
%%
|
||||||
|
|
||||||
|
chunk:
|
||||||
|
chunk1 {
|
||||||
|
$$ = $1
|
||||||
|
if l, ok := yylex.(*Lexer); ok {
|
||||||
|
l.Stmts = $$
|
||||||
|
}
|
||||||
|
} |
|
||||||
|
chunk1 laststat {
|
||||||
|
$$ = append($1, $2)
|
||||||
|
if l, ok := yylex.(*Lexer); ok {
|
||||||
|
l.Stmts = $$
|
||||||
|
}
|
||||||
|
} |
|
||||||
|
chunk1 laststat ';' {
|
||||||
|
$$ = append($1, $2)
|
||||||
|
if l, ok := yylex.(*Lexer); ok {
|
||||||
|
l.Stmts = $$
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
chunk1:
|
||||||
|
{
|
||||||
|
$$ = []ast.Stmt{}
|
||||||
|
} |
|
||||||
|
chunk1 stat {
|
||||||
|
$$ = append($1, $2)
|
||||||
|
} |
|
||||||
|
chunk1 ';' {
|
||||||
|
$$ = $1
|
||||||
|
}
|
||||||
|
|
||||||
|
block:
|
||||||
|
chunk {
|
||||||
|
$$ = $1
|
||||||
|
}
|
||||||
|
|
||||||
|
stat:
|
||||||
|
varlist '=' exprlist {
|
||||||
|
$$ = &ast.AssignStmt{Lhs: $1, Rhs: $3}
|
||||||
|
$$.SetLine($1[0].Line())
|
||||||
|
} |
|
||||||
|
/* 'stat = functioncal' causes a reduce/reduce conflict */
|
||||||
|
prefixexp {
|
||||||
|
if _, ok := $1.(*ast.FuncCallExpr); !ok {
|
||||||
|
yylex.(*Lexer).Error("parse error")
|
||||||
|
} else {
|
||||||
|
$$ = &ast.FuncCallStmt{Expr: $1}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
}
|
||||||
|
} |
|
||||||
|
TDo block TEnd {
|
||||||
|
$$ = &ast.DoBlockStmt{Stmts: $2}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($3.Pos.Line)
|
||||||
|
} |
|
||||||
|
TWhile expr TDo block TEnd {
|
||||||
|
$$ = &ast.WhileStmt{Condition: $2, Stmts: $4}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($5.Pos.Line)
|
||||||
|
} |
|
||||||
|
TRepeat block TUntil expr {
|
||||||
|
$$ = &ast.RepeatStmt{Condition: $4, Stmts: $2}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($4.Line())
|
||||||
|
} |
|
||||||
|
TIf expr TThen block elseifs TEnd {
|
||||||
|
$$ = &ast.IfStmt{Condition: $2, Then: $4}
|
||||||
|
cur := $$
|
||||||
|
for _, elseif := range $5 {
|
||||||
|
cur.(*ast.IfStmt).Else = []ast.Stmt{elseif}
|
||||||
|
cur = elseif
|
||||||
|
}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($6.Pos.Line)
|
||||||
|
} |
|
||||||
|
TIf expr TThen block elseifs TElse block TEnd {
|
||||||
|
$$ = &ast.IfStmt{Condition: $2, Then: $4}
|
||||||
|
cur := $$
|
||||||
|
for _, elseif := range $5 {
|
||||||
|
cur.(*ast.IfStmt).Else = []ast.Stmt{elseif}
|
||||||
|
cur = elseif
|
||||||
|
}
|
||||||
|
cur.(*ast.IfStmt).Else = $7
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($8.Pos.Line)
|
||||||
|
} |
|
||||||
|
TFor TIdent '=' expr ',' expr TDo block TEnd {
|
||||||
|
$$ = &ast.NumberForStmt{Name: $2.Str, Init: $4, Limit: $6, Stmts: $8}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($9.Pos.Line)
|
||||||
|
} |
|
||||||
|
TFor TIdent '=' expr ',' expr ',' expr TDo block TEnd {
|
||||||
|
$$ = &ast.NumberForStmt{Name: $2.Str, Init: $4, Limit: $6, Step:$8, Stmts: $10}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($11.Pos.Line)
|
||||||
|
} |
|
||||||
|
TFor namelist TIn exprlist TDo block TEnd {
|
||||||
|
$$ = &ast.GenericForStmt{Names:$2, Exprs:$4, Stmts: $6}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($7.Pos.Line)
|
||||||
|
} |
|
||||||
|
TFunction funcname funcbody {
|
||||||
|
$$ = &ast.FuncDefStmt{Name: $2, Func: $3}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($3.LastLine())
|
||||||
|
} |
|
||||||
|
TLocal TFunction TIdent funcbody {
|
||||||
|
$$ = &ast.LocalAssignStmt{Names:[]string{$3.Str}, Exprs: []ast.Expr{$4}}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($4.LastLine())
|
||||||
|
} |
|
||||||
|
TLocal namelist '=' exprlist {
|
||||||
|
$$ = &ast.LocalAssignStmt{Names: $2, Exprs:$4}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
TLocal namelist {
|
||||||
|
$$ = &ast.LocalAssignStmt{Names: $2, Exprs:[]ast.Expr{}}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
elseifs:
|
||||||
|
{
|
||||||
|
$$ = []ast.Stmt{}
|
||||||
|
} |
|
||||||
|
elseifs TElseIf expr TThen block {
|
||||||
|
$$ = append($1, &ast.IfStmt{Condition: $3, Then: $5})
|
||||||
|
$$[len($$)-1].SetLine($2.Pos.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
laststat:
|
||||||
|
TReturn {
|
||||||
|
$$ = &ast.ReturnStmt{Exprs:nil}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
TReturn exprlist {
|
||||||
|
$$ = &ast.ReturnStmt{Exprs:$2}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
TBreak {
|
||||||
|
$$ = &ast.BreakStmt{}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
funcname:
|
||||||
|
funcname1 {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
funcname1 ':' TIdent {
|
||||||
|
$$ = &ast.FuncName{Func:nil, Receiver:$1.Func, Method: $3.Str}
|
||||||
|
}
|
||||||
|
|
||||||
|
funcname1:
|
||||||
|
TIdent {
|
||||||
|
$$ = &ast.FuncName{Func: &ast.IdentExpr{Value:$1.Str}}
|
||||||
|
$$.Func.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
funcname1 '.' TIdent {
|
||||||
|
key:= &ast.StringExpr{Value:$3.Str}
|
||||||
|
key.SetLine($3.Pos.Line)
|
||||||
|
fn := &ast.AttrGetExpr{Object: $1.Func, Key: key}
|
||||||
|
fn.SetLine($3.Pos.Line)
|
||||||
|
$$ = &ast.FuncName{Func: fn}
|
||||||
|
}
|
||||||
|
|
||||||
|
varlist:
|
||||||
|
var {
|
||||||
|
$$ = []ast.Expr{$1}
|
||||||
|
} |
|
||||||
|
varlist ',' var {
|
||||||
|
$$ = append($1, $3)
|
||||||
|
}
|
||||||
|
|
||||||
|
var:
|
||||||
|
TIdent {
|
||||||
|
$$ = &ast.IdentExpr{Value:$1.Str}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
prefixexp '[' expr ']' {
|
||||||
|
$$ = &ast.AttrGetExpr{Object: $1, Key: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
prefixexp '.' TIdent {
|
||||||
|
key := &ast.StringExpr{Value:$3.Str}
|
||||||
|
key.SetLine($3.Pos.Line)
|
||||||
|
$$ = &ast.AttrGetExpr{Object: $1, Key: key}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
}
|
||||||
|
|
||||||
|
namelist:
|
||||||
|
TIdent {
|
||||||
|
$$ = []string{$1.Str}
|
||||||
|
} |
|
||||||
|
namelist ',' TIdent {
|
||||||
|
$$ = append($1, $3.Str)
|
||||||
|
}
|
||||||
|
|
||||||
|
exprlist:
|
||||||
|
expr {
|
||||||
|
$$ = []ast.Expr{$1}
|
||||||
|
} |
|
||||||
|
exprlist ',' expr {
|
||||||
|
$$ = append($1, $3)
|
||||||
|
}
|
||||||
|
|
||||||
|
expr:
|
||||||
|
TNil {
|
||||||
|
$$ = &ast.NilExpr{}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
TFalse {
|
||||||
|
$$ = &ast.FalseExpr{}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
TTrue {
|
||||||
|
$$ = &ast.TrueExpr{}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
TNumber {
|
||||||
|
$$ = &ast.NumberExpr{Value: $1.Str}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
T3Comma {
|
||||||
|
$$ = &ast.Comma3Expr{}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
function {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
prefixexp {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
string {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
tableconstructor {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
expr TOr expr {
|
||||||
|
$$ = &ast.LogicalOpExpr{Lhs: $1, Operator: "or", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr TAnd expr {
|
||||||
|
$$ = &ast.LogicalOpExpr{Lhs: $1, Operator: "and", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '>' expr {
|
||||||
|
$$ = &ast.RelationalOpExpr{Lhs: $1, Operator: ">", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '<' expr {
|
||||||
|
$$ = &ast.RelationalOpExpr{Lhs: $1, Operator: "<", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr TGte expr {
|
||||||
|
$$ = &ast.RelationalOpExpr{Lhs: $1, Operator: ">=", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr TLte expr {
|
||||||
|
$$ = &ast.RelationalOpExpr{Lhs: $1, Operator: "<=", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr TEqeq expr {
|
||||||
|
$$ = &ast.RelationalOpExpr{Lhs: $1, Operator: "==", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr TNeq expr {
|
||||||
|
$$ = &ast.RelationalOpExpr{Lhs: $1, Operator: "~=", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr T2Comma expr {
|
||||||
|
$$ = &ast.StringConcatOpExpr{Lhs: $1, Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '+' expr {
|
||||||
|
$$ = &ast.ArithmeticOpExpr{Lhs: $1, Operator: "+", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '-' expr {
|
||||||
|
$$ = &ast.ArithmeticOpExpr{Lhs: $1, Operator: "-", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '*' expr {
|
||||||
|
$$ = &ast.ArithmeticOpExpr{Lhs: $1, Operator: "*", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '/' expr {
|
||||||
|
$$ = &ast.ArithmeticOpExpr{Lhs: $1, Operator: "/", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '%' expr {
|
||||||
|
$$ = &ast.ArithmeticOpExpr{Lhs: $1, Operator: "%", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
expr '^' expr {
|
||||||
|
$$ = &ast.ArithmeticOpExpr{Lhs: $1, Operator: "^", Rhs: $3}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
'-' expr %prec UNARY {
|
||||||
|
$$ = &ast.UnaryMinusOpExpr{Expr: $2}
|
||||||
|
$$.SetLine($2.Line())
|
||||||
|
} |
|
||||||
|
TNot expr %prec UNARY {
|
||||||
|
$$ = &ast.UnaryNotOpExpr{Expr: $2}
|
||||||
|
$$.SetLine($2.Line())
|
||||||
|
} |
|
||||||
|
'#' expr %prec UNARY {
|
||||||
|
$$ = &ast.UnaryLenOpExpr{Expr: $2}
|
||||||
|
$$.SetLine($2.Line())
|
||||||
|
}
|
||||||
|
|
||||||
|
string:
|
||||||
|
TString {
|
||||||
|
$$ = &ast.StringExpr{Value: $1.Str}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
prefixexp:
|
||||||
|
var {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
afunctioncall {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
functioncall {
|
||||||
|
$$ = $1
|
||||||
|
} |
|
||||||
|
'(' expr ')' {
|
||||||
|
$$ = $2
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
afunctioncall:
|
||||||
|
'(' functioncall ')' {
|
||||||
|
$2.(*ast.FuncCallExpr).AdjustRet = true
|
||||||
|
$$ = $2
|
||||||
|
}
|
||||||
|
|
||||||
|
functioncall:
|
||||||
|
prefixexp args {
|
||||||
|
$$ = &ast.FuncCallExpr{Func: $1, Args: $2}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
} |
|
||||||
|
prefixexp ':' TIdent args {
|
||||||
|
$$ = &ast.FuncCallExpr{Method: $3.Str, Receiver: $1, Args: $4}
|
||||||
|
$$.SetLine($1.Line())
|
||||||
|
}
|
||||||
|
|
||||||
|
args:
|
||||||
|
'(' ')' {
|
||||||
|
if yylex.(*Lexer).PNewLine {
|
||||||
|
yylex.(*Lexer).TokenError($1, "ambiguous syntax (function call x new statement)")
|
||||||
|
}
|
||||||
|
$$ = []ast.Expr{}
|
||||||
|
} |
|
||||||
|
'(' exprlist ')' {
|
||||||
|
if yylex.(*Lexer).PNewLine {
|
||||||
|
yylex.(*Lexer).TokenError($1, "ambiguous syntax (function call x new statement)")
|
||||||
|
}
|
||||||
|
$$ = $2
|
||||||
|
} |
|
||||||
|
tableconstructor {
|
||||||
|
$$ = []ast.Expr{$1}
|
||||||
|
} |
|
||||||
|
string {
|
||||||
|
$$ = []ast.Expr{$1}
|
||||||
|
}
|
||||||
|
|
||||||
|
function:
|
||||||
|
TFunction funcbody {
|
||||||
|
$$ = &ast.FunctionExpr{ParList:$2.ParList, Stmts: $2.Stmts}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($2.LastLine())
|
||||||
|
}
|
||||||
|
|
||||||
|
funcbody:
|
||||||
|
'(' parlist ')' block TEnd {
|
||||||
|
$$ = &ast.FunctionExpr{ParList: $2, Stmts: $4}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($5.Pos.Line)
|
||||||
|
} |
|
||||||
|
'(' ')' block TEnd {
|
||||||
|
$$ = &ast.FunctionExpr{ParList: &ast.ParList{HasVargs: false, Names: []string{}}, Stmts: $3}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
$$.SetLastLine($4.Pos.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
parlist:
|
||||||
|
T3Comma {
|
||||||
|
$$ = &ast.ParList{HasVargs: true, Names: []string{}}
|
||||||
|
} |
|
||||||
|
namelist {
|
||||||
|
$$ = &ast.ParList{HasVargs: false, Names: []string{}}
|
||||||
|
$$.Names = append($$.Names, $1...)
|
||||||
|
} |
|
||||||
|
namelist ',' T3Comma {
|
||||||
|
$$ = &ast.ParList{HasVargs: true, Names: []string{}}
|
||||||
|
$$.Names = append($$.Names, $1...)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
tableconstructor:
|
||||||
|
'{' '}' {
|
||||||
|
$$ = &ast.TableExpr{Fields: []*ast.Field{}}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
'{' fieldlist '}' {
|
||||||
|
$$ = &ast.TableExpr{Fields: $2}
|
||||||
|
$$.SetLine($1.Pos.Line)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fieldlist:
|
||||||
|
field {
|
||||||
|
$$ = []*ast.Field{$1}
|
||||||
|
} |
|
||||||
|
fieldlist fieldsep field {
|
||||||
|
$$ = append($1, $3)
|
||||||
|
} |
|
||||||
|
fieldlist fieldsep {
|
||||||
|
$$ = $1
|
||||||
|
}
|
||||||
|
|
||||||
|
field:
|
||||||
|
TIdent '=' expr {
|
||||||
|
$$ = &ast.Field{Key: &ast.StringExpr{Value:$1.Str}, Value: $3}
|
||||||
|
$$.Key.SetLine($1.Pos.Line)
|
||||||
|
} |
|
||||||
|
'[' expr ']' '=' expr {
|
||||||
|
$$ = &ast.Field{Key: $2, Value: $5}
|
||||||
|
} |
|
||||||
|
expr {
|
||||||
|
$$ = &ast.Field{Value: $1}
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldsep:
|
||||||
|
',' {
|
||||||
|
$$ = ","
|
||||||
|
} |
|
||||||
|
';' {
|
||||||
|
$$ = ";"
|
||||||
|
}
|
||||||
|
|
||||||
|
%%
|
||||||
|
|
||||||
|
func TokenName(c int) string {
|
||||||
|
if c >= TAnd && c-TAnd < len(yyToknames) {
|
||||||
|
if yyToknames[c-TAnd] != "" {
|
||||||
|
return yyToknames[c-TAnd]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return string([]byte{byte(c)})
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
# This source code refers to The Go Authors for copyright purposes.
|
||||||
|
# The master list of authors is in the main Go distribution,
|
||||||
|
# visible at http://tip.golang.org/AUTHORS.
|
|
@ -0,0 +1,3 @@
|
||||||
|
# This source code was written by the Go contributors.
|
||||||
|
# The master list of contributors is in the main Go distribution,
|
||||||
|
# visible at http://tip.golang.org/CONTRIBUTORS.
|
|
@ -0,0 +1,49 @@
|
||||||
|
# github.com/cupcake/rdb v0.0.0-20161107195141-43ba34106c76
|
||||||
|
github.com/cupcake/rdb
|
||||||
|
github.com/cupcake/rdb/crc64
|
||||||
|
github.com/cupcake/rdb/nopdecoder
|
||||||
|
# github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712
|
||||||
|
github.com/edsrzf/mmap-go
|
||||||
|
# github.com/glendc/gopher-json v0.0.0-20170414221815-dc4743023d0c
|
||||||
|
github.com/glendc/gopher-json
|
||||||
|
# github.com/golang/snappy v0.0.0-20170215233205-553a64147049
|
||||||
|
github.com/golang/snappy
|
||||||
|
# github.com/pelletier/go-toml v1.0.1
|
||||||
|
github.com/pelletier/go-toml
|
||||||
|
# github.com/peterh/liner v1.0.1-0.20171122030339-3681c2a91233
|
||||||
|
github.com/peterh/liner
|
||||||
|
# github.com/siddontang/go v0.0.0-20170517070808-cb568a3e5cc0
|
||||||
|
github.com/siddontang/go/bson
|
||||||
|
github.com/siddontang/go/filelock
|
||||||
|
github.com/siddontang/go/hack
|
||||||
|
github.com/siddontang/go/ioutil2
|
||||||
|
github.com/siddontang/go/log
|
||||||
|
github.com/siddontang/go/num
|
||||||
|
github.com/siddontang/go/snappy
|
||||||
|
github.com/siddontang/go/sync2
|
||||||
|
# github.com/siddontang/goredis v0.0.0-20150324035039-760763f78400
|
||||||
|
github.com/siddontang/goredis
|
||||||
|
# github.com/siddontang/rdb v0.0.0-20150307021120-fc89ed2e418d
|
||||||
|
github.com/siddontang/rdb
|
||||||
|
# github.com/syndtr/goleveldb v0.0.0-20160425020131-cfa635847112
|
||||||
|
github.com/syndtr/goleveldb/leveldb
|
||||||
|
github.com/syndtr/goleveldb/leveldb/cache
|
||||||
|
github.com/syndtr/goleveldb/leveldb/comparer
|
||||||
|
github.com/syndtr/goleveldb/leveldb/errors
|
||||||
|
github.com/syndtr/goleveldb/leveldb/filter
|
||||||
|
github.com/syndtr/goleveldb/leveldb/iterator
|
||||||
|
github.com/syndtr/goleveldb/leveldb/journal
|
||||||
|
github.com/syndtr/goleveldb/leveldb/memdb
|
||||||
|
github.com/syndtr/goleveldb/leveldb/opt
|
||||||
|
github.com/syndtr/goleveldb/leveldb/storage
|
||||||
|
github.com/syndtr/goleveldb/leveldb/table
|
||||||
|
github.com/syndtr/goleveldb/leveldb/util
|
||||||
|
# github.com/ugorji/go v0.0.0-20171122102828-84cb69a8af83
|
||||||
|
github.com/ugorji/go/codec
|
||||||
|
# github.com/yuin/gopher-lua v0.0.0-20171031051903-609c9cd26973
|
||||||
|
github.com/yuin/gopher-lua
|
||||||
|
github.com/yuin/gopher-lua/ast
|
||||||
|
github.com/yuin/gopher-lua/parse
|
||||||
|
github.com/yuin/gopher-lua/pm
|
||||||
|
# golang.org/x/net v0.0.0-20180906233101-161cd47e91fd
|
||||||
|
golang.org/x/net/context
|
Loading…
Reference in New Issue