pax_global_header 0000666 0000000 0000000 00000000064 13776023740 0014523 g ustar 00root root 0000000 0000000 52 comment=df3ea76ece10095374fd1c9a22a4fb85a44efc42
jsonparser-1.1.1/ 0000775 0000000 0000000 00000000000 13776023740 0013711 5 ustar 00root root 0000000 0000000 jsonparser-1.1.1/.github/ 0000775 0000000 0000000 00000000000 13776023740 0015251 5 ustar 00root root 0000000 0000000 jsonparser-1.1.1/.github/PULL_REQUEST_TEMPLATE.md 0000664 0000000 0000000 00000000357 13776023740 0021057 0 ustar 00root root 0000000 0000000 **Description**: What this PR does
**Benchmark before change**:
**Benchmark after change**:
For running benchmarks use:
```
go test -test.benchmem -bench JsonParser ./benchmark/ -benchtime 5s -v
# OR
make bench (runs inside docker)
``` jsonparser-1.1.1/.gitignore 0000664 0000000 0000000 00000000122 13776023740 0015674 0 ustar 00root root 0000000 0000000
*.test
*.out
*.mprof
.idea
vendor/github.com/buger/goterm/
prof.cpu
prof.mem
jsonparser-1.1.1/.travis.yml 0000664 0000000 0000000 00000000206 13776023740 0016020 0 ustar 00root root 0000000 0000000 language: go
arch:
- amd64
- ppc64le
go:
- 1.7.x
- 1.8.x
- 1.9.x
- 1.10.x
- 1.11.x
script: go test -v ./.
jsonparser-1.1.1/Dockerfile 0000664 0000000 0000000 00000000612 13776023740 0015702 0 ustar 00root root 0000000 0000000 FROM golang:1.6
RUN go get github.com/Jeffail/gabs
RUN go get github.com/bitly/go-simplejson
RUN go get github.com/pquerna/ffjson
RUN go get github.com/antonholmquist/jason
RUN go get github.com/mreiferson/go-ujson
RUN go get -tags=unsafe -u github.com/ugorji/go/codec
RUN go get github.com/mailru/easyjson
WORKDIR /go/src/github.com/buger/jsonparser
ADD . /go/src/github.com/buger/jsonparser jsonparser-1.1.1/LICENSE 0000664 0000000 0000000 00000002056 13776023740 0014721 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2016 Leonid Bugaev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
jsonparser-1.1.1/Makefile 0000664 0000000 0000000 00000001770 13776023740 0015356 0 ustar 00root root 0000000 0000000 SOURCE = parser.go
CONTAINER = jsonparser
SOURCE_PATH = /go/src/github.com/buger/jsonparser
BENCHMARK = JsonParser
BENCHTIME = 5s
TEST = .
DRUN = docker run -v `pwd`:$(SOURCE_PATH) -i -t $(CONTAINER)
build:
docker build -t $(CONTAINER) .
race:
$(DRUN) --env GORACE="halt_on_error=1" go test ./. $(ARGS) -v -race -timeout 15s
bench:
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -benchtime $(BENCHTIME) -v
bench_local:
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench . $(ARGS) -benchtime $(BENCHTIME) -v
profile:
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -memprofile mem.mprof -v
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -cpuprofile cpu.out -v
$(DRUN) go test $(LDFLAGS) -test.benchmem -bench $(BENCHMARK) ./benchmark/ $(ARGS) -c
test:
$(DRUN) go test $(LDFLAGS) ./ -run $(TEST) -timeout 10s $(ARGS) -v
fmt:
$(DRUN) go fmt ./...
vet:
$(DRUN) go vet ./.
bash:
$(DRUN) /bin/bash jsonparser-1.1.1/README.md 0000664 0000000 0000000 00000040350 13776023740 0015172 0 ustar 00root root 0000000 0000000 [](https://goreportcard.com/report/github.com/buger/jsonparser) 
# Alternative JSON parser for Go (10x times faster standard library)
It does not require you to know the structure of the payload (eg. create structs), and allows accessing fields by providing the path to them. It is up to **10 times faster** than standard `encoding/json` package (depending on payload size and usage), **allocates no memory**. See benchmarks below.
## Rationale
Originally I made this for a project that relies on a lot of 3rd party APIs that can be unpredictable and complex.
I love simplicity and prefer to avoid external dependecies. `encoding/json` requires you to know exactly your data structures, or if you prefer to use `map[string]interface{}` instead, it will be very slow and hard to manage.
I investigated what's on the market and found that most libraries are just wrappers around `encoding/json`, there is few options with own parsers (`ffjson`, `easyjson`), but they still requires you to create data structures.
Goal of this project is to push JSON parser to the performance limits and not sacrifice with compliance and developer user experience.
## Example
For the given JSON our goal is to extract the user's full name, number of github followers and avatar.
```go
import "github.com/buger/jsonparser"
...
data := []byte(`{
"person": {
"name": {
"first": "Leonid",
"last": "Bugaev",
"fullName": "Leonid Bugaev"
},
"github": {
"handle": "buger",
"followers": 109
},
"avatars": [
{ "url": "https://avatars1.githubusercontent.com/u/14009?v=3&s=460", "type": "thumbnail" }
]
},
"company": {
"name": "Acme"
}
}`)
// You can specify key path by providing arguments to Get function
jsonparser.Get(data, "person", "name", "fullName")
// There is `GetInt` and `GetBoolean` helpers if you exactly know key data type
jsonparser.GetInt(data, "person", "github", "followers")
// When you try to get object, it will return you []byte slice pointer to data containing it
// In `company` it will be `{"name": "Acme"}`
jsonparser.Get(data, "company")
// If the key doesn't exist it will throw an error
var size int64
if value, err := jsonparser.GetInt(data, "company", "size"); err == nil {
size = value
}
// You can use `ArrayEach` helper to iterate items [item1, item2 .... itemN]
jsonparser.ArrayEach(data, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
fmt.Println(jsonparser.Get(value, "url"))
}, "person", "avatars")
// Or use can access fields by index!
jsonparser.GetString(data, "person", "avatars", "[0]", "url")
// You can use `ObjectEach` helper to iterate objects { "key1":object1, "key2":object2, .... "keyN":objectN }
jsonparser.ObjectEach(data, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
fmt.Printf("Key: '%s'\n Value: '%s'\n Type: %s\n", string(key), string(value), dataType)
return nil
}, "person", "name")
// The most efficient way to extract multiple keys is `EachKey`
paths := [][]string{
[]string{"person", "name", "fullName"},
[]string{"person", "avatars", "[0]", "url"},
[]string{"company", "url"},
}
jsonparser.EachKey(data, func(idx int, value []byte, vt jsonparser.ValueType, err error){
switch idx {
case 0: // []string{"person", "name", "fullName"}
...
case 1: // []string{"person", "avatars", "[0]", "url"}
...
case 2: // []string{"company", "url"},
...
}
}, paths...)
// For more information see docs below
```
## Need to speedup your app?
I'm available for consulting and can help you push your app performance to the limits. Ping me at: leonsbox@gmail.com.
## Reference
Library API is really simple. You just need the `Get` method to perform any operation. The rest is just helpers around it.
You also can view API at [godoc.org](https://godoc.org/github.com/buger/jsonparser)
### **`Get`**
```go
func Get(data []byte, keys ...string) (value []byte, dataType jsonparser.ValueType, offset int, err error)
```
Receives data structure, and key path to extract value from.
Returns:
* `value` - Pointer to original data structure containing key value, or just empty slice if nothing found or error
* `dataType` - Can be: `NotExist`, `String`, `Number`, `Object`, `Array`, `Boolean` or `Null`
* `offset` - Offset from provided data structure where key value ends. Used mostly internally, for example for `ArrayEach` helper.
* `err` - If the key is not found or any other parsing issue, it should return error. If key not found it also sets `dataType` to `NotExist`
Accepts multiple keys to specify path to JSON value (in case of quering nested structures).
If no keys are provided it will try to extract the closest JSON value (simple ones or object/array), useful for reading streams or arrays, see `ArrayEach` implementation.
Note that keys can be an array indexes: `jsonparser.GetInt("person", "avatars", "[0]", "url")`, pretty cool, yeah?
### **`GetString`**
```go
func GetString(data []byte, keys ...string) (val string, err error)
```
Returns strings properly handing escaped and unicode characters. Note that this will cause additional memory allocations.
### **`GetUnsafeString`**
If you need string in your app, and ready to sacrifice with support of escaped symbols in favor of speed. It returns string mapped to existing byte slice memory, without any allocations:
```go
s, _, := jsonparser.GetUnsafeString(data, "person", "name", "title")
switch s {
case 'CEO':
...
case 'Engineer'
...
...
}
```
Note that `unsafe` here means that your string will exist until GC will free underlying byte slice, for most of cases it means that you can use this string only in current context, and should not pass it anywhere externally: through channels or any other way.
### **`GetBoolean`**, **`GetInt`** and **`GetFloat`**
```go
func GetBoolean(data []byte, keys ...string) (val bool, err error)
func GetFloat(data []byte, keys ...string) (val float64, err error)
func GetInt(data []byte, keys ...string) (val int64, err error)
```
If you know the key type, you can use the helpers above.
If key data type do not match, it will return error.
### **`ArrayEach`**
```go
func ArrayEach(data []byte, cb func(value []byte, dataType jsonparser.ValueType, offset int, err error), keys ...string)
```
Needed for iterating arrays, accepts a callback function with the same return arguments as `Get`.
### **`ObjectEach`**
```go
func ObjectEach(data []byte, callback func(key []byte, value []byte, dataType ValueType, offset int) error, keys ...string) (err error)
```
Needed for iterating object, accepts a callback function. Example:
```go
var handler func([]byte, []byte, jsonparser.ValueType, int) error
handler = func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
//do stuff here
}
jsonparser.ObjectEach(myJson, handler)
```
### **`EachKey`**
```go
func EachKey(data []byte, cb func(idx int, value []byte, dataType jsonparser.ValueType, err error), paths ...[]string)
```
When you need to read multiple keys, and you do not afraid of low-level API `EachKey` is your friend. It read payload only single time, and calls callback function once path is found. For example when you call multiple times `Get`, it has to process payload multiple times, each time you call it. Depending on payload `EachKey` can be multiple times faster than `Get`. Path can use nested keys as well!
```go
paths := [][]string{
[]string{"uuid"},
[]string{"tz"},
[]string{"ua"},
[]string{"st"},
}
var data SmallPayload
jsonparser.EachKey(smallFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error){
switch idx {
case 0:
data.Uuid, _ = value
case 1:
v, _ := jsonparser.ParseInt(value)
data.Tz = int(v)
case 2:
data.Ua, _ = value
case 3:
v, _ := jsonparser.ParseInt(value)
data.St = int(v)
}
}, paths...)
```
### **`Set`**
```go
func Set(data []byte, setValue []byte, keys ...string) (value []byte, err error)
```
Receives existing data structure, key path to set, and value to set at that key. *This functionality is experimental.*
Returns:
* `value` - Pointer to original data structure with updated or added key value.
* `err` - If any parsing issue, it should return error.
Accepts multiple keys to specify path to JSON value (in case of updating or creating nested structures).
Note that keys can be an array indexes: `jsonparser.Set(data, []byte("http://github.com"), "person", "avatars", "[0]", "url")`
### **`Delete`**
```go
func Delete(data []byte, keys ...string) value []byte
```
Receives existing data structure, and key path to delete. *This functionality is experimental.*
Returns:
* `value` - Pointer to original data structure with key path deleted if it can be found. If there is no key path, then the whole data structure is deleted.
Accepts multiple keys to specify path to JSON value (in case of updating or creating nested structures).
Note that keys can be an array indexes: `jsonparser.Delete(data, "person", "avatars", "[0]", "url")`
## What makes it so fast?
* It does not rely on `encoding/json`, `reflection` or `interface{}`, the only real package dependency is `bytes`.
* Operates with JSON payload on byte level, providing you pointers to the original data structure: no memory allocation.
* No automatic type conversions, by default everything is a []byte, but it provides you value type, so you can convert by yourself (there is few helpers included).
* Does not parse full record, only keys you specified
## Benchmarks
There are 3 benchmark types, trying to simulate real-life usage for small, medium and large JSON payloads.
For each metric, the lower value is better. Time/op is in nanoseconds. Values better than standard encoding/json marked as bold text.
Benchmarks run on standard Linode 1024 box.
Compared libraries:
* https://golang.org/pkg/encoding/json
* https://github.com/Jeffail/gabs
* https://github.com/a8m/djson
* https://github.com/bitly/go-simplejson
* https://github.com/antonholmquist/jason
* https://github.com/mreiferson/go-ujson
* https://github.com/ugorji/go/codec
* https://github.com/pquerna/ffjson
* https://github.com/mailru/easyjson
* https://github.com/buger/jsonparser
#### TLDR
If you want to skip next sections we have 2 winner: `jsonparser` and `easyjson`.
`jsonparser` is up to 10 times faster than standard `encoding/json` package (depending on payload size and usage), and almost infinitely (literally) better in memory consumption because it operates with data on byte level, and provide direct slice pointers.
`easyjson` wins in CPU in medium tests and frankly i'm impressed with this package: it is remarkable results considering that it is almost drop-in replacement for `encoding/json` (require some code generation).
It's hard to fully compare `jsonparser` and `easyjson` (or `ffson`), they a true parsers and fully process record, unlike `jsonparser` which parse only keys you specified.
If you searching for replacement of `encoding/json` while keeping structs, `easyjson` is an amazing choice. If you want to process dynamic JSON, have memory constrains, or more control over your data you should try `jsonparser`.
`jsonparser` performance heavily depends on usage, and it works best when you do not need to process full record, only some keys. The more calls you need to make, the slower it will be, in contrast `easyjson` (or `ffjson`, `encoding/json`) parser record only 1 time, and then you can make as many calls as you want.
With great power comes great responsibility! :)
#### Small payload
Each test processes 190 bytes of http log as a JSON record.
It should read multiple fields.
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_small_payload_test.go
Library | time/op | bytes/op | allocs/op
------ | ------- | -------- | -------
encoding/json struct | 7879 | 880 | 18
encoding/json interface{} | 8946 | 1521 | 38
Jeffail/gabs | 10053 | 1649 | 46
bitly/go-simplejson | 10128 | 2241 | 36
antonholmquist/jason | 27152 | 7237 | 101
github.com/ugorji/go/codec | 8806 | 2176 | 31
mreiferson/go-ujson | **7008** | **1409** | 37
a8m/djson | 3862 | 1249 | 30
pquerna/ffjson | **3769** | **624** | **15**
mailru/easyjson | **2002** | **192** | **9**
buger/jsonparser | **1367** | **0** | **0**
buger/jsonparser (EachKey API) | **809** | **0** | **0**
Winners are ffjson, easyjson and jsonparser, where jsonparser is up to 9.8x faster than encoding/json and 4.6x faster than ffjson, and slightly faster than easyjson.
If you look at memory allocation, jsonparser has no rivals, as it makes no data copy and operates with raw []byte structures and pointers to it.
#### Medium payload
Each test processes a 2.4kb JSON record (based on Clearbit API).
It should read multiple nested fields and 1 array.
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_medium_payload_test.go
| Library | time/op | bytes/op | allocs/op |
| ------- | ------- | -------- | --------- |
| encoding/json struct | 57749 | 1336 | 29 |
| encoding/json interface{} | 79297 | 10627 | 215 |
| Jeffail/gabs | 83807 | 11202 | 235 |
| bitly/go-simplejson | 88187 | 17187 | 220 |
| antonholmquist/jason | 94099 | 19013 | 247 |
| github.com/ugorji/go/codec | 114719 | 6712 | 152 |
| mreiferson/go-ujson | **56972** | 11547 | 270 |
| a8m/djson | 28525 | 10196 | 198 |
| pquerna/ffjson | **20298** | **856** | **20** |
| mailru/easyjson | **10512** | **336** | **12** |
| buger/jsonparser | **15955** | **0** | **0** |
| buger/jsonparser (EachKey API) | **8916** | **0** | **0** |
The difference between ffjson and jsonparser in CPU usage is smaller, while the memory consumption difference is growing. On the other hand `easyjson` shows remarkable performance for medium payload.
`gabs`, `go-simplejson` and `jason` are based on encoding/json and map[string]interface{} and actually only helpers for unstructured JSON, their performance correlate with `encoding/json interface{}`, and they will skip next round.
`go-ujson` while have its own parser, shows same performance as `encoding/json`, also skips next round. Same situation with `ugorji/go/codec`, but it showed unexpectedly bad performance for complex payloads.
#### Large payload
Each test processes a 24kb JSON record (based on Discourse API)
It should read 2 arrays, and for each item in array get a few fields.
Basically it means processing a full JSON file.
https://github.com/buger/jsonparser/blob/master/benchmark/benchmark_large_payload_test.go
| Library | time/op | bytes/op | allocs/op |
| --- | --- | --- | --- |
| encoding/json struct | 748336 | 8272 | 307 |
| encoding/json interface{} | 1224271 | 215425 | 3395 |
| a8m/djson | 510082 | 213682 | 2845 |
| pquerna/ffjson | **312271** | **7792** | **298** |
| mailru/easyjson | **154186** | **6992** | **288** |
| buger/jsonparser | **85308** | **0** | **0** |
`jsonparser` now is a winner, but do not forget that it is way more lightweight parser than `ffson` or `easyjson`, and they have to parser all the data, while `jsonparser` parse only what you need. All `ffjson`, `easysjon` and `jsonparser` have their own parsing code, and does not depend on `encoding/json` or `interface{}`, thats one of the reasons why they are so fast. `easyjson` also use a bit of `unsafe` package to reduce memory consuption (in theory it can lead to some unexpected GC issue, but i did not tested enough)
Also last benchmark did not included `EachKey` test, because in this particular case we need to read lot of Array values, and using `ArrayEach` is more efficient.
## Questions and support
All bug-reports and suggestions should go though Github Issues.
## Contributing
1. Fork it
2. Create your feature branch (git checkout -b my-new-feature)
3. Commit your changes (git commit -am 'Added some feature')
4. Push to the branch (git push origin my-new-feature)
5. Create new Pull Request
## Development
All my development happens using Docker, and repo include some Make tasks to simplify development.
* `make build` - builds docker image, usually can be called only once
* `make test` - run tests
* `make fmt` - run go fmt
* `make bench` - run benchmarks (if you need to run only single benchmark modify `BENCHMARK` variable in make file)
* `make profile` - runs benchmark and generate 3 files- `cpu.out`, `mem.mprof` and `benchmark.test` binary, which can be used for `go tool pprof`
* `make bash` - enter container (i use it for running `go tool pprof` above)
jsonparser-1.1.1/benchmark/ 0000775 0000000 0000000 00000000000 13776023740 0015643 5 ustar 00root root 0000000 0000000 jsonparser-1.1.1/benchmark/benchmark.go 0000664 0000000 0000000 00000075727 13776023740 0020146 0 ustar 00root root 0000000 0000000 package benchmark
/*
Small paylod, http log like structure. Size: 190 bytes
*/
var smallFixture []byte = []byte(`{
"st": 1,
"sid": 486,
"tt": "active",
"gr": 0,
"uuid": "de305d54-75b4-431b-adb2-eb6b9e546014",
"ip": "127.0.0.1",
"ua": "user_agent",
"tz": -6,
"v": 1
}`)
type SmallPayload struct {
St int
Sid int
Tt string
Gr int
Uuid string
Ip string
Ua string
Tz int
V int
}
/*
Medium payload (based on Clearbit API response)
*/
type CBAvatar struct {
Url string
}
type CBGravatar struct {
Avatars []*CBAvatar
}
type CBGithub struct {
Followers int
}
type CBName struct {
FullName string
}
type CBPerson struct {
Name *CBName
Github *CBGithub
Gravatar *CBGravatar
}
type MediumPayload struct {
Person *CBPerson
Company map[string]interface{}
}
// Reponse from Clearbit API. Size: 2.4kb
var mediumFixture []byte = []byte(`{
"person": {
"id": "d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
"name": {
"fullName": "Leonid Bugaev",
"givenName": "Leonid",
"familyName": "Bugaev"
},
"email": "leonsbox@gmail.com",
"gender": "male",
"location": "Saint Petersburg, Saint Petersburg, RU",
"geo": {
"city": "Saint Petersburg",
"state": "Saint Petersburg",
"country": "Russia",
"lat": 59.9342802,
"lng": 30.3350986
},
"bio": "Senior engineer at Granify.com",
"site": "http://flickfaver.com",
"avatar": "https://d1ts43dypk8bqh.cloudfront.net/v1/avatars/d50887ca-a6ce-4e59-b89f-14f0b5d03b03",
"employment": {
"name": "www.latera.ru",
"title": "Software Engineer",
"domain": "gmail.com"
},
"facebook": {
"handle": "leonid.bugaev"
},
"github": {
"handle": "buger",
"id": 14009,
"avatar": "https://avatars.githubusercontent.com/u/14009?v=3",
"company": "Granify",
"blog": "http://leonsbox.com",
"followers": 95,
"following": 10
},
"twitter": {
"handle": "flickfaver",
"id": 77004410,
"bio": null,
"followers": 2,
"following": 1,
"statuses": 5,
"favorites": 0,
"location": "",
"site": "http://flickfaver.com",
"avatar": null
},
"linkedin": {
"handle": "in/leonidbugaev"
},
"googleplus": {
"handle": null
},
"angellist": {
"handle": "leonid-bugaev",
"id": 61541,
"bio": "Senior engineer at Granify.com",
"blog": "http://buger.github.com",
"site": "http://buger.github.com",
"followers": 41,
"avatar": "https://d1qb2nb5cznatu.cloudfront.net/users/61541-medium_jpg?1405474390"
},
"klout": {
"handle": null,
"score": null
},
"foursquare": {
"handle": null
},
"aboutme": {
"handle": "leonid.bugaev",
"bio": null,
"avatar": null
},
"gravatar": {
"handle": "buger",
"urls": [
],
"avatar": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
"avatars": [
{
"url": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510",
"type": "thumbnail"
}
]
},
"fuzzy": false
},
"company": null
}`)
/*
Large payload, based on Discourse API. Size: 28kb
*/
type DSUser struct {
Username string
}
type DSTopic struct {
Id int
Slug string
}
type DSTopicsList struct {
Topics []*DSTopic
MoreTopicsUrl string
}
type LargePayload struct {
Users []*DSUser
Topics *DSTopicsList
}
var largeFixture []byte = []byte(`
{"users":[{"id":-1,"username":"system","avatar_template":"/user_avatar/discourse.metabase.com/system/{size}/6_1.png"},{"id":89,"username":"zergot","avatar_template":"https://avatars.discourse.org/v2/letter/z/0ea827/{size}.png"},{"id":1,"username":"sameer","avatar_template":"https://avatars.discourse.org/v2/letter/s/bbce88/{size}.png"},{"id":84,"username":"HenryMirror","avatar_template":"https://avatars.discourse.org/v2/letter/h/ecd19e/{size}.png"},{"id":73,"username":"fimp","avatar_template":"https://avatars.discourse.org/v2/letter/f/ee59a6/{size}.png"},{"id":14,"username":"agilliland","avatar_template":"/user_avatar/discourse.metabase.com/agilliland/{size}/26_1.png"},{"id":87,"username":"amir","avatar_template":"https://avatars.discourse.org/v2/letter/a/c37758/{size}.png"},{"id":82,"username":"waseem","avatar_template":"https://avatars.discourse.org/v2/letter/w/9dc877/{size}.png"},{"id":78,"username":"tovenaar","avatar_template":"https://avatars.discourse.org/v2/letter/t/9de0a6/{size}.png"},{"id":74,"username":"Ben","avatar_template":"https://avatars.discourse.org/v2/letter/b/df788c/{size}.png"},{"id":71,"username":"MarkLaFay","avatar_template":"https://avatars.discourse.org/v2/letter/m/3bc359/{size}.png"},{"id":72,"username":"camsaul","avatar_template":"/user_avatar/discourse.metabase.com/camsaul/{size}/70_1.png"},{"id":53,"username":"mhjb","avatar_template":"/user_avatar/discourse.metabase.com/mhjb/{size}/54_1.png"},{"id":58,"username":"jbwiv","avatar_template":"https://avatars.discourse.org/v2/letter/j/6bbea6/{size}.png"},{"id":70,"username":"Maggs","avatar_template":"https://avatars.discourse.org/v2/letter/m/bbce88/{size}.png"},{"id":69,"username":"andrefaria","avatar_template":"/user_avatar/discourse.metabase.com/andrefaria/{size}/65_1.png"},{"id":60,"username":"bencarter78","avatar_template":"/user_avatar/discourse.metabase.com/bencarter78/{size}/59_1.png"},{"id":55,"username":"vikram","avatar_template":"https://avatars.discourse.org/v2/letter/v/e47774/{size}.png"},{"id":68,"username":"edchan77","avatar_template":"/user_avatar/discourse.metabase.com/edchan77/{size}/66_1.png"},{"id":9,"username":"karthikd","avatar_template":"https://avatars.discourse.org/v2/letter/k/cab0a1/{size}.png"},{"id":23,"username":"arthurz","avatar_template":"/user_avatar/discourse.metabase.com/arthurz/{size}/32_1.png"},{"id":3,"username":"tom","avatar_template":"/user_avatar/discourse.metabase.com/tom/{size}/21_1.png"},{"id":50,"username":"LeoNogueira","avatar_template":"/user_avatar/discourse.metabase.com/leonogueira/{size}/52_1.png"},{"id":66,"username":"ss06vi","avatar_template":"https://avatars.discourse.org/v2/letter/s/3ab097/{size}.png"},{"id":34,"username":"mattcollins","avatar_template":"/user_avatar/discourse.metabase.com/mattcollins/{size}/41_1.png"},{"id":51,"username":"krmmalik","avatar_template":"/user_avatar/discourse.metabase.com/krmmalik/{size}/53_1.png"},{"id":46,"username":"odysseas","avatar_template":"https://avatars.discourse.org/v2/letter/o/5f8ce5/{size}.png"},{"id":5,"username":"jonthewayne","avatar_template":"/user_avatar/discourse.metabase.com/jonthewayne/{size}/18_1.png"},{"id":11,"username":"anandiyer","avatar_template":"/user_avatar/discourse.metabase.com/anandiyer/{size}/23_1.png"},{"id":25,"username":"alnorth","avatar_template":"/user_avatar/discourse.metabase.com/alnorth/{size}/34_1.png"},{"id":52,"username":"j_at_svg","avatar_template":"https://avatars.discourse.org/v2/letter/j/96bed5/{size}.png"},{"id":42,"username":"styts","avatar_template":"/user_avatar/discourse.metabase.com/styts/{size}/47_1.png"}],"topics":{"can_create_topic":false,"more_topics_url":"/c/uncategorized/l/latest?page=1","draft":null,"draft_key":"new_topic","draft_sequence":null,"per_page":30,"topics":[{"id":8,"title":"Welcome to Metabase's Discussion Forum","fancy_title":"Welcome to Metabase’s Discussion Forum","slug":"welcome-to-metabases-discussion-forum","posts_count":1,"reply_count":0,"highest_post_number":1,"image_url":"/images/welcome/discourse-edit-post-animated.gif","created_at":"2015-10-17T00:14:49.526Z","last_posted_at":"2015-10-17T00:14:49.557Z","bumped":true,"bumped_at":"2015-10-21T02:32:22.486Z","unseen":false,"pinned":true,"unpinned":null,"excerpt":"Welcome to Metabase's discussion forum. This is a place to get help on installation, setting up as well as sharing tips and tricks.","visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":197,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"system","category_id":1,"pinned_globally":true,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":-1}]},{"id":169,"title":"Formatting Dates","fancy_title":"Formatting Dates","slug":"formatting-dates","posts_count":1,"reply_count":0,"highest_post_number":1,"image_url":null,"created_at":"2016-01-14T06:30:45.311Z","last_posted_at":"2016-01-14T06:30:45.397Z","bumped":true,"bumped_at":"2016-01-14T06:30:45.397Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":11,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"zergot","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":89}]},{"id":168,"title":"Setting for google api key","fancy_title":"Setting for google api key","slug":"setting-for-google-api-key","posts_count":2,"reply_count":0,"highest_post_number":2,"image_url":null,"created_at":"2016-01-13T17:14:31.799Z","last_posted_at":"2016-01-14T06:24:03.421Z","bumped":true,"bumped_at":"2016-01-14T06:24:03.421Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":16,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"zergot","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":89}]},{"id":167,"title":"Cannot see non-US timezones on the admin","fancy_title":"Cannot see non-US timezones on the admin","slug":"cannot-see-non-us-timezones-on-the-admin","posts_count":1,"reply_count":0,"highest_post_number":1,"image_url":null,"created_at":"2016-01-13T17:07:36.764Z","last_posted_at":"2016-01-13T17:07:36.831Z","bumped":true,"bumped_at":"2016-01-13T17:07:36.831Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":11,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"zergot","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":89}]},{"id":164,"title":"External (Metabase level) linkages in data schema","fancy_title":"External (Metabase level) linkages in data schema","slug":"external-metabase-level-linkages-in-data-schema","posts_count":4,"reply_count":1,"highest_post_number":4,"image_url":null,"created_at":"2016-01-11T13:51:02.286Z","last_posted_at":"2016-01-12T11:06:37.259Z","bumped":true,"bumped_at":"2016-01-12T11:06:37.259Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":32,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"zergot","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":89},{"extras":null,"description":"Frequent Poster","user_id":1}]},{"id":155,"title":"Query working on \"Questions\" but not in \"Pulses\"","fancy_title":"Query working on “Questions” but not in “Pulses”","slug":"query-working-on-questions-but-not-in-pulses","posts_count":3,"reply_count":0,"highest_post_number":3,"image_url":null,"created_at":"2016-01-01T14:06:10.083Z","last_posted_at":"2016-01-08T22:37:51.772Z","bumped":true,"bumped_at":"2016-01-08T22:37:51.772Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":72,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"agilliland","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":84},{"extras":null,"description":"Frequent Poster","user_id":73},{"extras":"latest","description":"Most Recent Poster","user_id":14}]},{"id":161,"title":"Pulses posted to Slack don't show question output","fancy_title":"Pulses posted to Slack don’t show question output","slug":"pulses-posted-to-slack-dont-show-question-output","posts_count":2,"reply_count":0,"highest_post_number":2,"image_url":"/uploads/default/original/1X/9d2806517bf3598b10be135b2c58923b47ba23e7.png","created_at":"2016-01-08T22:09:58.205Z","last_posted_at":"2016-01-08T22:28:44.685Z","bumped":true,"bumped_at":"2016-01-08T22:28:44.685Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":34,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":87},{"extras":"latest","description":"Most Recent Poster","user_id":1}]},{"id":152,"title":"Should we build Kafka connecter or Kafka plugin","fancy_title":"Should we build Kafka connecter or Kafka plugin","slug":"should-we-build-kafka-connecter-or-kafka-plugin","posts_count":4,"reply_count":1,"highest_post_number":4,"image_url":null,"created_at":"2015-12-28T20:37:23.501Z","last_posted_at":"2015-12-31T18:16:45.477Z","bumped":true,"bumped_at":"2015-12-31T18:16:45.477Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":84,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":82},{"extras":"latest","description":"Most Recent Poster, Frequent Poster","user_id":1}]},{"id":147,"title":"Change X and Y on graph","fancy_title":"Change X and Y on graph","slug":"change-x-and-y-on-graph","posts_count":1,"reply_count":0,"highest_post_number":1,"image_url":null,"created_at":"2015-12-21T17:52:46.581Z","last_posted_at":"2015-12-21T17:52:46.684Z","bumped":true,"bumped_at":"2015-12-21T18:19:13.003Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":68,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"tovenaar","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":78}]},{"id":142,"title":"Issues sending mail via office365 relay","fancy_title":"Issues sending mail via office365 relay","slug":"issues-sending-mail-via-office365-relay","posts_count":5,"reply_count":2,"highest_post_number":5,"image_url":null,"created_at":"2015-12-16T10:38:47.315Z","last_posted_at":"2015-12-21T09:26:27.167Z","bumped":true,"bumped_at":"2015-12-21T09:26:27.167Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":122,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"Ben","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":74},{"extras":null,"description":"Frequent Poster","user_id":1}]},{"id":137,"title":"I see triplicates of my mongoDB collections","fancy_title":"I see triplicates of my mongoDB collections","slug":"i-see-triplicates-of-my-mongodb-collections","posts_count":3,"reply_count":0,"highest_post_number":3,"image_url":null,"created_at":"2015-12-14T13:33:03.426Z","last_posted_at":"2015-12-17T18:40:05.487Z","bumped":true,"bumped_at":"2015-12-17T18:40:05.487Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":97,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"MarkLaFay","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":71},{"extras":null,"description":"Frequent Poster","user_id":14}]},{"id":140,"title":"Google Analytics plugin","fancy_title":"Google Analytics plugin","slug":"google-analytics-plugin","posts_count":1,"reply_count":0,"highest_post_number":1,"image_url":null,"created_at":"2015-12-15T13:00:55.644Z","last_posted_at":"2015-12-15T13:00:55.705Z","bumped":true,"bumped_at":"2015-12-15T13:00:55.705Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":105,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"fimp","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":73}]},{"id":138,"title":"With-mongo-connection failed: bad connection details:","fancy_title":"With-mongo-connection failed: bad connection details:","slug":"with-mongo-connection-failed-bad-connection-details","posts_count":1,"reply_count":0,"highest_post_number":1,"image_url":null,"created_at":"2015-12-14T17:28:11.041Z","last_posted_at":"2015-12-14T17:28:11.111Z","bumped":true,"bumped_at":"2015-12-14T17:28:11.111Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":56,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"MarkLaFay","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":71}]},{"id":133,"title":"\"We couldn't understand your question.\" when I query mongoDB","fancy_title":"“We couldn’t understand your question.” when I query mongoDB","slug":"we-couldnt-understand-your-question-when-i-query-mongodb","posts_count":3,"reply_count":0,"highest_post_number":3,"image_url":null,"created_at":"2015-12-11T17:38:30.576Z","last_posted_at":"2015-12-14T13:31:26.395Z","bumped":true,"bumped_at":"2015-12-14T13:31:26.395Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":107,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"MarkLaFay","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":71},{"extras":null,"description":"Frequent Poster","user_id":72}]},{"id":129,"title":"My bar charts are all thin","fancy_title":"My bar charts are all thin","slug":"my-bar-charts-are-all-thin","posts_count":4,"reply_count":1,"highest_post_number":4,"image_url":"/uploads/default/original/1X/41bcf3b2a00dc7cfaff01cb3165d35d32a85bf1d.png","created_at":"2015-12-09T22:09:56.394Z","last_posted_at":"2015-12-11T19:00:45.289Z","bumped":true,"bumped_at":"2015-12-11T19:00:45.289Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":116,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"mhjb","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":53},{"extras":null,"description":"Frequent Poster","user_id":1}]},{"id":106,"title":"What is the expected return order of columns for graphing results when using raw SQL?","fancy_title":"What is the expected return order of columns for graphing results when using raw SQL?","slug":"what-is-the-expected-return-order-of-columns-for-graphing-results-when-using-raw-sql","posts_count":3,"reply_count":0,"highest_post_number":3,"image_url":null,"created_at":"2015-11-24T19:07:14.561Z","last_posted_at":"2015-12-11T17:04:14.149Z","bumped":true,"bumped_at":"2015-12-11T17:04:14.149Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":153,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"jbwiv","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":58},{"extras":null,"description":"Frequent Poster","user_id":14}]},{"id":131,"title":"Set site url from admin panel","fancy_title":"Set site url from admin panel","slug":"set-site-url-from-admin-panel","posts_count":2,"reply_count":0,"highest_post_number":2,"image_url":null,"created_at":"2015-12-10T06:22:46.042Z","last_posted_at":"2015-12-10T19:12:57.449Z","bumped":true,"bumped_at":"2015-12-10T19:12:57.449Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":77,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":70},{"extras":"latest","description":"Most Recent Poster","user_id":1}]},{"id":127,"title":"Internationalization (i18n)","fancy_title":"Internationalization (i18n)","slug":"internationalization-i18n","posts_count":2,"reply_count":0,"highest_post_number":2,"image_url":null,"created_at":"2015-12-08T16:55:37.397Z","last_posted_at":"2015-12-09T16:49:55.816Z","bumped":true,"bumped_at":"2015-12-09T16:49:55.816Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":85,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"agilliland","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":69},{"extras":"latest","description":"Most Recent Poster","user_id":14}]},{"id":109,"title":"Returning raw data with no filters always returns We couldn't understand your question","fancy_title":"Returning raw data with no filters always returns We couldn’t understand your question","slug":"returning-raw-data-with-no-filters-always-returns-we-couldnt-understand-your-question","posts_count":3,"reply_count":1,"highest_post_number":3,"image_url":null,"created_at":"2015-11-25T21:35:01.315Z","last_posted_at":"2015-12-09T10:26:12.255Z","bumped":true,"bumped_at":"2015-12-09T10:26:12.255Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":133,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"bencarter78","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":60},{"extras":null,"description":"Frequent Poster","user_id":14}]},{"id":103,"title":"Support for Cassandra?","fancy_title":"Support for Cassandra?","slug":"support-for-cassandra","posts_count":5,"reply_count":1,"highest_post_number":5,"image_url":null,"created_at":"2015-11-20T06:45:31.741Z","last_posted_at":"2015-12-09T03:18:51.274Z","bumped":true,"bumped_at":"2015-12-09T03:18:51.274Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":169,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"vikram","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":55},{"extras":null,"description":"Frequent Poster","user_id":1}]},{"id":128,"title":"Mongo query with Date breaks [solved: Mongo 3.0 required]","fancy_title":"Mongo query with Date breaks [solved: Mongo 3.0 required]","slug":"mongo-query-with-date-breaks-solved-mongo-3-0-required","posts_count":5,"reply_count":0,"highest_post_number":5,"image_url":null,"created_at":"2015-12-08T18:30:56.562Z","last_posted_at":"2015-12-08T21:03:02.421Z","bumped":true,"bumped_at":"2015-12-08T21:03:02.421Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":102,"like_count":1,"has_summary":false,"archetype":"regular","last_poster_username":"edchan77","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest","description":"Original Poster, Most Recent Poster","user_id":68},{"extras":null,"description":"Frequent Poster","user_id":1}]},{"id":23,"title":"Can this connect to MS SQL Server?","fancy_title":"Can this connect to MS SQL Server?","slug":"can-this-connect-to-ms-sql-server","posts_count":7,"reply_count":1,"highest_post_number":7,"image_url":null,"created_at":"2015-10-21T18:52:37.987Z","last_posted_at":"2015-12-07T17:41:51.609Z","bumped":true,"bumped_at":"2015-12-07T17:41:51.609Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":367,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":9},{"extras":null,"description":"Frequent Poster","user_id":23},{"extras":null,"description":"Frequent Poster","user_id":3},{"extras":null,"description":"Frequent Poster","user_id":50},{"extras":"latest","description":"Most Recent Poster","user_id":1}]},{"id":121,"title":"Cannot restart metabase in docker","fancy_title":"Cannot restart metabase in docker","slug":"cannot-restart-metabase-in-docker","posts_count":5,"reply_count":1,"highest_post_number":5,"image_url":null,"created_at":"2015-12-04T21:28:58.137Z","last_posted_at":"2015-12-04T23:02:00.488Z","bumped":true,"bumped_at":"2015-12-04T23:02:00.488Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":96,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":66},{"extras":"latest","description":"Most Recent Poster, Frequent Poster","user_id":1}]},{"id":85,"title":"Edit Max Rows Count","fancy_title":"Edit Max Rows Count","slug":"edit-max-rows-count","posts_count":4,"reply_count":2,"highest_post_number":4,"image_url":null,"created_at":"2015-11-11T23:46:52.917Z","last_posted_at":"2015-11-24T01:01:14.569Z","bumped":true,"bumped_at":"2015-11-24T01:01:14.569Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":169,"like_count":1,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":34},{"extras":"latest","description":"Most Recent Poster, Frequent Poster","user_id":1}]},{"id":96,"title":"Creating charts by querying more than one table at a time","fancy_title":"Creating charts by querying more than one table at a time","slug":"creating-charts-by-querying-more-than-one-table-at-a-time","posts_count":6,"reply_count":4,"highest_post_number":6,"image_url":null,"created_at":"2015-11-17T11:20:18.442Z","last_posted_at":"2015-11-21T02:12:25.995Z","bumped":true,"bumped_at":"2015-11-21T02:12:25.995Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":217,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":51},{"extras":"latest","description":"Most Recent Poster, Frequent Poster","user_id":1}]},{"id":90,"title":"Trying to add RDS postgresql as the database fails silently","fancy_title":"Trying to add RDS postgresql as the database fails silently","slug":"trying-to-add-rds-postgresql-as-the-database-fails-silently","posts_count":4,"reply_count":2,"highest_post_number":4,"image_url":null,"created_at":"2015-11-14T23:45:02.967Z","last_posted_at":"2015-11-21T01:08:45.915Z","bumped":true,"bumped_at":"2015-11-21T01:08:45.915Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":162,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":46},{"extras":"latest","description":"Most Recent Poster, Frequent Poster","user_id":1}]},{"id":17,"title":"Deploy to Heroku isn't working","fancy_title":"Deploy to Heroku isn’t working","slug":"deploy-to-heroku-isnt-working","posts_count":9,"reply_count":3,"highest_post_number":9,"image_url":null,"created_at":"2015-10-21T16:42:03.096Z","last_posted_at":"2015-11-20T18:34:14.044Z","bumped":true,"bumped_at":"2015-11-20T18:34:14.044Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":332,"like_count":2,"has_summary":false,"archetype":"regular","last_poster_username":"agilliland","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":5},{"extras":null,"description":"Frequent Poster","user_id":3},{"extras":null,"description":"Frequent Poster","user_id":11},{"extras":null,"description":"Frequent Poster","user_id":25},{"extras":"latest","description":"Most Recent Poster","user_id":14}]},{"id":100,"title":"Can I use DATEPART() in SQL queries?","fancy_title":"Can I use DATEPART() in SQL queries?","slug":"can-i-use-datepart-in-sql-queries","posts_count":2,"reply_count":0,"highest_post_number":2,"image_url":null,"created_at":"2015-11-17T23:15:58.033Z","last_posted_at":"2015-11-18T00:19:48.763Z","bumped":true,"bumped_at":"2015-11-18T00:19:48.763Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":112,"like_count":1,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":53},{"extras":"latest","description":"Most Recent Poster","user_id":1}]},{"id":98,"title":"Feature Request: LDAP Authentication","fancy_title":"Feature Request: LDAP Authentication","slug":"feature-request-ldap-authentication","posts_count":1,"reply_count":0,"highest_post_number":1,"image_url":null,"created_at":"2015-11-17T17:22:44.484Z","last_posted_at":"2015-11-17T17:22:44.577Z","bumped":true,"bumped_at":"2015-11-17T17:22:44.577Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":97,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"j_at_svg","category_id":1,"pinned_globally":false,"posters":[{"extras":"latest single","description":"Original Poster, Most Recent Poster","user_id":52}]},{"id":87,"title":"Migrating from internal H2 to Postgres","fancy_title":"Migrating from internal H2 to Postgres","slug":"migrating-from-internal-h2-to-postgres","posts_count":2,"reply_count":0,"highest_post_number":2,"image_url":null,"created_at":"2015-11-12T14:36:06.745Z","last_posted_at":"2015-11-12T18:05:10.796Z","bumped":true,"bumped_at":"2015-11-12T18:05:10.796Z","unseen":false,"pinned":false,"unpinned":null,"visible":true,"closed":false,"archived":false,"bookmarked":null,"liked":null,"views":111,"like_count":0,"has_summary":false,"archetype":"regular","last_poster_username":"sameer","category_id":1,"pinned_globally":false,"posters":[{"extras":null,"description":"Original Poster","user_id":42},{"extras":"latest","description":"Most Recent Poster","user_id":1}]}]}}
`)
jsonparser-1.1.1/benchmark/benchmark_codecgen.go 0000664 0000000 0000000 00000161613 13776023740 0021763 0 ustar 00root root 0000000 0000000 // ************************************************************
// DO NOT EDIT.
// THIS FILE IS AUTO-GENERATED BY codecgen.
// ************************************************************
package benchmark
import (
"errors"
codec1978 "github.com/ugorji/go/codec"
"runtime"
"strconv"
)
const (
// ----- content types ----
codecSelferCcUTF86617 = 1
codecSelferCcRAW6617 = 255
// ----- value types used ----
codecSelferValueTypeArray6617 = 10
codecSelferValueTypeMap6617 = 9
codecSelferValueTypeString6617 = 6
codecSelferValueTypeInt6617 = 2
codecSelferValueTypeUint6617 = 3
codecSelferValueTypeFloat6617 = 4
codecSelferBitsize6617 = uint8(32 << (^uint(0) >> 63))
)
var (
errCodecSelferOnlyMapOrArrayEncodeToStruct6617 = errors.New(`only encoded map or array can be decoded into a struct`)
)
type codecSelfer6617 struct{}
func init() {
if codec1978.GenVersion != 10 {
_, file, _, _ := runtime.Caller(0)
panic("codecgen version mismatch: current: 10, need " + strconv.FormatInt(int64(codec1978.GenVersion), 10) + ". Re-generate file: " + file)
}
if false {
var _ byte = 0 // reference the types, but skip this branch at build/run time
}
}
func (x *SmallPayload) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(9)
} else {
r.WriteMapStart(9)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
r.EncodeInt(int64(x.St))
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"St\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `St`)
}
r.WriteMapElemValue()
if false {
} else {
r.EncodeInt(int64(x.St))
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
r.EncodeInt(int64(x.Sid))
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Sid\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Sid`)
}
r.WriteMapElemValue()
if false {
} else {
r.EncodeInt(int64(x.Sid))
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Tt)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Tt))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Tt\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Tt`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Tt)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Tt))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
r.EncodeInt(int64(x.Gr))
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Gr\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Gr`)
}
r.WriteMapElemValue()
if false {
} else {
r.EncodeInt(int64(x.Gr))
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Uuid)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Uuid))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Uuid\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Uuid`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Uuid)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Uuid))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Ip)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Ip))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Ip\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Ip`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Ip)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Ip))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Ua)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Ua))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Ua\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Ua`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Ua)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Ua))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
r.EncodeInt(int64(x.Tz))
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Tz\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Tz`)
}
r.WriteMapElemValue()
if false {
} else {
r.EncodeInt(int64(x.Tz))
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
r.EncodeInt(int64(x.V))
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"V\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `V`)
}
r.WriteMapElemValue()
if false {
} else {
r.EncodeInt(int64(x.V))
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *SmallPayload) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *SmallPayload) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "St":
if r.TryDecodeAsNil() {
x.St = 0
} else {
x.St = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
case "Sid":
if r.TryDecodeAsNil() {
x.Sid = 0
} else {
x.Sid = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
case "Tt":
if r.TryDecodeAsNil() {
x.Tt = ""
} else {
x.Tt = (string)(r.DecodeString())
}
case "Gr":
if r.TryDecodeAsNil() {
x.Gr = 0
} else {
x.Gr = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
case "Uuid":
if r.TryDecodeAsNil() {
x.Uuid = ""
} else {
x.Uuid = (string)(r.DecodeString())
}
case "Ip":
if r.TryDecodeAsNil() {
x.Ip = ""
} else {
x.Ip = (string)(r.DecodeString())
}
case "Ua":
if r.TryDecodeAsNil() {
x.Ua = ""
} else {
x.Ua = (string)(r.DecodeString())
}
case "Tz":
if r.TryDecodeAsNil() {
x.Tz = 0
} else {
x.Tz = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
case "V":
if r.TryDecodeAsNil() {
x.V = 0
} else {
x.V = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *SmallPayload) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj13 int
var yyb13 bool
var yyhl13 bool = l >= 0
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.St = 0
} else {
x.St = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Sid = 0
} else {
x.Sid = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Tt = ""
} else {
x.Tt = (string)(r.DecodeString())
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Gr = 0
} else {
x.Gr = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Uuid = ""
} else {
x.Uuid = (string)(r.DecodeString())
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Ip = ""
} else {
x.Ip = (string)(r.DecodeString())
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Ua = ""
} else {
x.Ua = (string)(r.DecodeString())
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Tz = 0
} else {
x.Tz = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.V = 0
} else {
x.V = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
for {
yyj13++
if yyhl13 {
yyb13 = yyj13 > l
} else {
yyb13 = r.CheckBreak()
}
if yyb13 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj13-1, "")
}
r.ReadArrayEnd()
}
func (x *CBAvatar) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(1)
} else {
r.WriteMapStart(1)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Url)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Url))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Url\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Url`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Url)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Url))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *CBAvatar) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *CBAvatar) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Url":
if r.TryDecodeAsNil() {
x.Url = ""
} else {
x.Url = (string)(r.DecodeString())
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *CBAvatar) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj5 int
var yyb5 bool
var yyhl5 bool = l >= 0
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Url = ""
} else {
x.Url = (string)(r.DecodeString())
}
for {
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj5-1, "")
}
r.ReadArrayEnd()
}
func (x *CBGravatar) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(1)
} else {
r.WriteMapStart(1)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if x.Avatars == nil {
r.EncodeNil()
} else {
if false {
} else {
h.encSlicePtrtoCBAvatar(([]*CBAvatar)(x.Avatars), e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Avatars\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Avatars`)
}
r.WriteMapElemValue()
if x.Avatars == nil {
r.EncodeNil()
} else {
if false {
} else {
h.encSlicePtrtoCBAvatar(([]*CBAvatar)(x.Avatars), e)
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *CBGravatar) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *CBGravatar) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Avatars":
if r.TryDecodeAsNil() {
x.Avatars = nil
} else {
if false {
} else {
h.decSlicePtrtoCBAvatar((*[]*CBAvatar)(&x.Avatars), d)
}
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *CBGravatar) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj6 int
var yyb6 bool
var yyhl6 bool = l >= 0
yyj6++
if yyhl6 {
yyb6 = yyj6 > l
} else {
yyb6 = r.CheckBreak()
}
if yyb6 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Avatars = nil
} else {
if false {
} else {
h.decSlicePtrtoCBAvatar((*[]*CBAvatar)(&x.Avatars), d)
}
}
for {
yyj6++
if yyhl6 {
yyb6 = yyj6 > l
} else {
yyb6 = r.CheckBreak()
}
if yyb6 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj6-1, "")
}
r.ReadArrayEnd()
}
func (x *CBGithub) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(1)
} else {
r.WriteMapStart(1)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
r.EncodeInt(int64(x.Followers))
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Followers\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Followers`)
}
r.WriteMapElemValue()
if false {
} else {
r.EncodeInt(int64(x.Followers))
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *CBGithub) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *CBGithub) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Followers":
if r.TryDecodeAsNil() {
x.Followers = 0
} else {
x.Followers = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *CBGithub) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj5 int
var yyb5 bool
var yyhl5 bool = l >= 0
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Followers = 0
} else {
x.Followers = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
for {
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj5-1, "")
}
r.ReadArrayEnd()
}
func (x *CBName) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(1)
} else {
r.WriteMapStart(1)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.FullName)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.FullName))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"FullName\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `FullName`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.FullName)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.FullName))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *CBName) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *CBName) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "FullName":
if r.TryDecodeAsNil() {
x.FullName = ""
} else {
x.FullName = (string)(r.DecodeString())
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *CBName) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj5 int
var yyb5 bool
var yyhl5 bool = l >= 0
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.FullName = ""
} else {
x.FullName = (string)(r.DecodeString())
}
for {
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj5-1, "")
}
r.ReadArrayEnd()
}
func (x *CBPerson) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(3)
} else {
r.WriteMapStart(3)
}
var yyn3 bool
if x.Name == nil {
yyn3 = true
goto LABEL3
}
LABEL3:
if yyr2 || yy2arr2 {
if yyn3 {
r.WriteArrayElem()
r.EncodeNil()
} else {
r.WriteArrayElem()
if x.Name == nil {
r.EncodeNil()
} else {
x.Name.CodecEncodeSelf(e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Name\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Name`)
}
r.WriteMapElemValue()
if yyn3 {
r.EncodeNil()
} else {
if x.Name == nil {
r.EncodeNil()
} else {
x.Name.CodecEncodeSelf(e)
}
}
}
var yyn6 bool
if x.Github == nil {
yyn6 = true
goto LABEL6
}
LABEL6:
if yyr2 || yy2arr2 {
if yyn6 {
r.WriteArrayElem()
r.EncodeNil()
} else {
r.WriteArrayElem()
if x.Github == nil {
r.EncodeNil()
} else {
x.Github.CodecEncodeSelf(e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Github\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Github`)
}
r.WriteMapElemValue()
if yyn6 {
r.EncodeNil()
} else {
if x.Github == nil {
r.EncodeNil()
} else {
x.Github.CodecEncodeSelf(e)
}
}
}
var yyn9 bool
if x.Gravatar == nil {
yyn9 = true
goto LABEL9
}
LABEL9:
if yyr2 || yy2arr2 {
if yyn9 {
r.WriteArrayElem()
r.EncodeNil()
} else {
r.WriteArrayElem()
if x.Gravatar == nil {
r.EncodeNil()
} else {
x.Gravatar.CodecEncodeSelf(e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Gravatar\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Gravatar`)
}
r.WriteMapElemValue()
if yyn9 {
r.EncodeNil()
} else {
if x.Gravatar == nil {
r.EncodeNil()
} else {
x.Gravatar.CodecEncodeSelf(e)
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *CBPerson) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *CBPerson) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Name":
if r.TryDecodeAsNil() {
if true && x.Name != nil {
x.Name = nil
}
} else {
if x.Name == nil {
x.Name = new(CBName)
}
x.Name.CodecDecodeSelf(d)
}
case "Github":
if r.TryDecodeAsNil() {
if true && x.Github != nil {
x.Github = nil
}
} else {
if x.Github == nil {
x.Github = new(CBGithub)
}
x.Github.CodecDecodeSelf(d)
}
case "Gravatar":
if r.TryDecodeAsNil() {
if true && x.Gravatar != nil {
x.Gravatar = nil
}
} else {
if x.Gravatar == nil {
x.Gravatar = new(CBGravatar)
}
x.Gravatar.CodecDecodeSelf(d)
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *CBPerson) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj7 int
var yyb7 bool
var yyhl7 bool = l >= 0
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
if true && x.Name != nil {
x.Name = nil
}
} else {
if x.Name == nil {
x.Name = new(CBName)
}
x.Name.CodecDecodeSelf(d)
}
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
if true && x.Github != nil {
x.Github = nil
}
} else {
if x.Github == nil {
x.Github = new(CBGithub)
}
x.Github.CodecDecodeSelf(d)
}
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
if true && x.Gravatar != nil {
x.Gravatar = nil
}
} else {
if x.Gravatar == nil {
x.Gravatar = new(CBGravatar)
}
x.Gravatar.CodecDecodeSelf(d)
}
for {
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj7-1, "")
}
r.ReadArrayEnd()
}
func (x *MediumPayload) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(2)
} else {
r.WriteMapStart(2)
}
var yyn3 bool
if x.Person == nil {
yyn3 = true
goto LABEL3
}
LABEL3:
if yyr2 || yy2arr2 {
if yyn3 {
r.WriteArrayElem()
r.EncodeNil()
} else {
r.WriteArrayElem()
if x.Person == nil {
r.EncodeNil()
} else {
x.Person.CodecEncodeSelf(e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Person\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Person`)
}
r.WriteMapElemValue()
if yyn3 {
r.EncodeNil()
} else {
if x.Person == nil {
r.EncodeNil()
} else {
x.Person.CodecEncodeSelf(e)
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if x.Company == nil {
r.EncodeNil()
} else {
if false {
} else {
z.F.EncMapStringIntfV(x.Company, e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Company\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Company`)
}
r.WriteMapElemValue()
if x.Company == nil {
r.EncodeNil()
} else {
if false {
} else {
z.F.EncMapStringIntfV(x.Company, e)
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *MediumPayload) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *MediumPayload) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Person":
if r.TryDecodeAsNil() {
if true && x.Person != nil {
x.Person = nil
}
} else {
if x.Person == nil {
x.Person = new(CBPerson)
}
x.Person.CodecDecodeSelf(d)
}
case "Company":
if r.TryDecodeAsNil() {
x.Company = nil
} else {
if false {
} else {
z.F.DecMapStringIntfX(&x.Company, d)
}
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *MediumPayload) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj7 int
var yyb7 bool
var yyhl7 bool = l >= 0
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
if true && x.Person != nil {
x.Person = nil
}
} else {
if x.Person == nil {
x.Person = new(CBPerson)
}
x.Person.CodecDecodeSelf(d)
}
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Company = nil
} else {
if false {
} else {
z.F.DecMapStringIntfX(&x.Company, d)
}
}
for {
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj7-1, "")
}
r.ReadArrayEnd()
}
func (x *DSUser) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(1)
} else {
r.WriteMapStart(1)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Username)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Username))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Username\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Username`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Username)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Username))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *DSUser) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *DSUser) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Username":
if r.TryDecodeAsNil() {
x.Username = ""
} else {
x.Username = (string)(r.DecodeString())
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *DSUser) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj5 int
var yyb5 bool
var yyhl5 bool = l >= 0
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Username = ""
} else {
x.Username = (string)(r.DecodeString())
}
for {
yyj5++
if yyhl5 {
yyb5 = yyj5 > l
} else {
yyb5 = r.CheckBreak()
}
if yyb5 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj5-1, "")
}
r.ReadArrayEnd()
}
func (x *DSTopic) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(2)
} else {
r.WriteMapStart(2)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
r.EncodeInt(int64(x.Id))
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Id\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Id`)
}
r.WriteMapElemValue()
if false {
} else {
r.EncodeInt(int64(x.Id))
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Slug)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Slug))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Slug\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Slug`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.Slug)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.Slug))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *DSTopic) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *DSTopic) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Id":
if r.TryDecodeAsNil() {
x.Id = 0
} else {
x.Id = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
case "Slug":
if r.TryDecodeAsNil() {
x.Slug = ""
} else {
x.Slug = (string)(r.DecodeString())
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *DSTopic) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj6 int
var yyb6 bool
var yyhl6 bool = l >= 0
yyj6++
if yyhl6 {
yyb6 = yyj6 > l
} else {
yyb6 = r.CheckBreak()
}
if yyb6 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Id = 0
} else {
x.Id = (int)(z.C.IntV(r.DecodeInt64(), codecSelferBitsize6617))
}
yyj6++
if yyhl6 {
yyb6 = yyj6 > l
} else {
yyb6 = r.CheckBreak()
}
if yyb6 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Slug = ""
} else {
x.Slug = (string)(r.DecodeString())
}
for {
yyj6++
if yyhl6 {
yyb6 = yyj6 > l
} else {
yyb6 = r.CheckBreak()
}
if yyb6 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj6-1, "")
}
r.ReadArrayEnd()
}
func (x *DSTopicsList) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(2)
} else {
r.WriteMapStart(2)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if x.Topics == nil {
r.EncodeNil()
} else {
if false {
} else {
h.encSlicePtrtoDSTopic(([]*DSTopic)(x.Topics), e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Topics\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Topics`)
}
r.WriteMapElemValue()
if x.Topics == nil {
r.EncodeNil()
} else {
if false {
} else {
h.encSlicePtrtoDSTopic(([]*DSTopic)(x.Topics), e)
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.MoreTopicsUrl)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.MoreTopicsUrl))
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"MoreTopicsUrl\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `MoreTopicsUrl`)
}
r.WriteMapElemValue()
if false {
} else {
if z.EncBasicHandle().StringToRaw {
r.EncodeStringBytesRaw(z.BytesView(string(x.MoreTopicsUrl)))
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, string(x.MoreTopicsUrl))
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *DSTopicsList) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *DSTopicsList) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Topics":
if r.TryDecodeAsNil() {
x.Topics = nil
} else {
if false {
} else {
h.decSlicePtrtoDSTopic((*[]*DSTopic)(&x.Topics), d)
}
}
case "MoreTopicsUrl":
if r.TryDecodeAsNil() {
x.MoreTopicsUrl = ""
} else {
x.MoreTopicsUrl = (string)(r.DecodeString())
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *DSTopicsList) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj7 int
var yyb7 bool
var yyhl7 bool = l >= 0
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Topics = nil
} else {
if false {
} else {
h.decSlicePtrtoDSTopic((*[]*DSTopic)(&x.Topics), d)
}
}
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.MoreTopicsUrl = ""
} else {
x.MoreTopicsUrl = (string)(r.DecodeString())
}
for {
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj7-1, "")
}
r.ReadArrayEnd()
}
func (x *LargePayload) CodecEncodeSelf(e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
if x == nil {
r.EncodeNil()
} else {
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.EncExtension(x, yyxt1)
} else if !z.EncBinary() && z.IsJSONHandle() {
z.EncJSONMarshal(x)
} else {
yysep2 := !z.EncBinary()
yy2arr2 := z.EncBasicHandle().StructToArray
_, _ = yysep2, yy2arr2
const yyr2 bool = false // struct tag has 'toArray'
if yyr2 || yy2arr2 {
r.WriteArrayStart(2)
} else {
r.WriteMapStart(2)
}
if yyr2 || yy2arr2 {
r.WriteArrayElem()
if x.Users == nil {
r.EncodeNil()
} else {
if false {
} else {
h.encSlicePtrtoDSUser(([]*DSUser)(x.Users), e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Users\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Users`)
}
r.WriteMapElemValue()
if x.Users == nil {
r.EncodeNil()
} else {
if false {
} else {
h.encSlicePtrtoDSUser(([]*DSUser)(x.Users), e)
}
}
}
var yyn6 bool
if x.Topics == nil {
yyn6 = true
goto LABEL6
}
LABEL6:
if yyr2 || yy2arr2 {
if yyn6 {
r.WriteArrayElem()
r.EncodeNil()
} else {
r.WriteArrayElem()
if x.Topics == nil {
r.EncodeNil()
} else {
x.Topics.CodecEncodeSelf(e)
}
}
} else {
r.WriteMapElemKey()
if z.IsJSONHandle() {
z.WriteStr("\"Topics\"")
} else {
r.EncodeStringEnc(codecSelferCcUTF86617, `Topics`)
}
r.WriteMapElemValue()
if yyn6 {
r.EncodeNil()
} else {
if x.Topics == nil {
r.EncodeNil()
} else {
x.Topics.CodecEncodeSelf(e)
}
}
}
if yyr2 || yy2arr2 {
r.WriteArrayEnd()
} else {
r.WriteMapEnd()
}
}
}
}
func (x *LargePayload) CodecDecodeSelf(d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
if false {
} else if yyxt1 := z.Extension(z.I2Rtid(x)); yyxt1 != nil {
z.DecExtension(x, yyxt1)
} else if !z.DecBinary() && z.IsJSONHandle() {
z.DecJSONUnmarshal(x)
} else {
yyct2 := r.ContainerType()
if yyct2 == codecSelferValueTypeMap6617 {
yyl2 := r.ReadMapStart()
if yyl2 == 0 {
r.ReadMapEnd()
} else {
x.codecDecodeSelfFromMap(yyl2, d)
}
} else if yyct2 == codecSelferValueTypeArray6617 {
yyl2 := r.ReadArrayStart()
if yyl2 == 0 {
r.ReadArrayEnd()
} else {
x.codecDecodeSelfFromArray(yyl2, d)
}
} else {
panic(errCodecSelferOnlyMapOrArrayEncodeToStruct6617)
}
}
}
func (x *LargePayload) codecDecodeSelfFromMap(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyhl3 bool = l >= 0
for yyj3 := 0; ; yyj3++ {
if yyhl3 {
if yyj3 >= l {
break
}
} else {
if r.CheckBreak() {
break
}
}
r.ReadMapElemKey()
yys3 := z.StringView(r.DecodeStringAsBytes())
r.ReadMapElemValue()
switch yys3 {
case "Users":
if r.TryDecodeAsNil() {
x.Users = nil
} else {
if false {
} else {
h.decSlicePtrtoDSUser((*[]*DSUser)(&x.Users), d)
}
}
case "Topics":
if r.TryDecodeAsNil() {
if true && x.Topics != nil {
x.Topics = nil
}
} else {
if x.Topics == nil {
x.Topics = new(DSTopicsList)
}
x.Topics.CodecDecodeSelf(d)
}
default:
z.DecStructFieldNotFound(-1, yys3)
} // end switch yys3
} // end for yyj3
r.ReadMapEnd()
}
func (x *LargePayload) codecDecodeSelfFromArray(l int, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
var yyj7 int
var yyb7 bool
var yyhl7 bool = l >= 0
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
x.Users = nil
} else {
if false {
} else {
h.decSlicePtrtoDSUser((*[]*DSUser)(&x.Users), d)
}
}
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
r.ReadArrayEnd()
return
}
r.ReadArrayElem()
if r.TryDecodeAsNil() {
if true && x.Topics != nil {
x.Topics = nil
}
} else {
if x.Topics == nil {
x.Topics = new(DSTopicsList)
}
x.Topics.CodecDecodeSelf(d)
}
for {
yyj7++
if yyhl7 {
yyb7 = yyj7 > l
} else {
yyb7 = r.CheckBreak()
}
if yyb7 {
break
}
r.ReadArrayElem()
z.DecStructFieldNotFound(yyj7-1, "")
}
r.ReadArrayEnd()
}
func (x codecSelfer6617) encSlicePtrtoCBAvatar(v []*CBAvatar, e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
r.WriteArrayStart(len(v))
for _, yyv1 := range v {
r.WriteArrayElem()
if yyv1 == nil {
r.EncodeNil()
} else {
yyv1.CodecEncodeSelf(e)
}
}
r.WriteArrayEnd()
}
func (x codecSelfer6617) decSlicePtrtoCBAvatar(v *[]*CBAvatar, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
yyv1 := *v
yyh1, yyl1 := z.DecSliceHelperStart()
var yyc1 bool
_ = yyc1
if yyl1 == 0 {
if yyv1 == nil {
yyv1 = []*CBAvatar{}
yyc1 = true
} else if len(yyv1) != 0 {
yyv1 = yyv1[:0]
yyc1 = true
}
} else {
yyhl1 := yyl1 > 0
var yyrl1 int
_ = yyrl1
if yyhl1 {
if yyl1 > cap(yyv1) {
yyrl1 = z.DecInferLen(yyl1, z.DecBasicHandle().MaxInitLen, 8)
if yyrl1 <= cap(yyv1) {
yyv1 = yyv1[:yyrl1]
} else {
yyv1 = make([]*CBAvatar, yyrl1)
}
yyc1 = true
} else if yyl1 != len(yyv1) {
yyv1 = yyv1[:yyl1]
yyc1 = true
}
}
var yyj1 int
// var yydn1 bool
for yyj1 = 0; (yyhl1 && yyj1 < yyl1) || !(yyhl1 || r.CheckBreak()); yyj1++ { // bounds-check-elimination
if yyj1 == 0 && yyv1 == nil {
if yyhl1 {
yyrl1 = z.DecInferLen(yyl1, z.DecBasicHandle().MaxInitLen, 8)
} else {
yyrl1 = 8
}
yyv1 = make([]*CBAvatar, yyrl1)
yyc1 = true
}
yyh1.ElemContainerState(yyj1)
var yydb1 bool
if yyj1 >= len(yyv1) {
yyv1 = append(yyv1, nil)
yyc1 = true
}
if yydb1 {
z.DecSwallow()
} else {
if r.TryDecodeAsNil() {
yyv1[yyj1] = nil
} else {
if yyv1[yyj1] == nil {
yyv1[yyj1] = new(CBAvatar)
}
yyv1[yyj1].CodecDecodeSelf(d)
}
}
}
if yyj1 < len(yyv1) {
yyv1 = yyv1[:yyj1]
yyc1 = true
} else if yyj1 == 0 && yyv1 == nil {
yyv1 = make([]*CBAvatar, 0)
yyc1 = true
}
}
yyh1.End()
if yyc1 {
*v = yyv1
}
}
func (x codecSelfer6617) encSlicePtrtoDSTopic(v []*DSTopic, e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
r.WriteArrayStart(len(v))
for _, yyv1 := range v {
r.WriteArrayElem()
if yyv1 == nil {
r.EncodeNil()
} else {
yyv1.CodecEncodeSelf(e)
}
}
r.WriteArrayEnd()
}
func (x codecSelfer6617) decSlicePtrtoDSTopic(v *[]*DSTopic, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
yyv1 := *v
yyh1, yyl1 := z.DecSliceHelperStart()
var yyc1 bool
_ = yyc1
if yyl1 == 0 {
if yyv1 == nil {
yyv1 = []*DSTopic{}
yyc1 = true
} else if len(yyv1) != 0 {
yyv1 = yyv1[:0]
yyc1 = true
}
} else {
yyhl1 := yyl1 > 0
var yyrl1 int
_ = yyrl1
if yyhl1 {
if yyl1 > cap(yyv1) {
yyrl1 = z.DecInferLen(yyl1, z.DecBasicHandle().MaxInitLen, 8)
if yyrl1 <= cap(yyv1) {
yyv1 = yyv1[:yyrl1]
} else {
yyv1 = make([]*DSTopic, yyrl1)
}
yyc1 = true
} else if yyl1 != len(yyv1) {
yyv1 = yyv1[:yyl1]
yyc1 = true
}
}
var yyj1 int
// var yydn1 bool
for yyj1 = 0; (yyhl1 && yyj1 < yyl1) || !(yyhl1 || r.CheckBreak()); yyj1++ { // bounds-check-elimination
if yyj1 == 0 && yyv1 == nil {
if yyhl1 {
yyrl1 = z.DecInferLen(yyl1, z.DecBasicHandle().MaxInitLen, 8)
} else {
yyrl1 = 8
}
yyv1 = make([]*DSTopic, yyrl1)
yyc1 = true
}
yyh1.ElemContainerState(yyj1)
var yydb1 bool
if yyj1 >= len(yyv1) {
yyv1 = append(yyv1, nil)
yyc1 = true
}
if yydb1 {
z.DecSwallow()
} else {
if r.TryDecodeAsNil() {
yyv1[yyj1] = nil
} else {
if yyv1[yyj1] == nil {
yyv1[yyj1] = new(DSTopic)
}
yyv1[yyj1].CodecDecodeSelf(d)
}
}
}
if yyj1 < len(yyv1) {
yyv1 = yyv1[:yyj1]
yyc1 = true
} else if yyj1 == 0 && yyv1 == nil {
yyv1 = make([]*DSTopic, 0)
yyc1 = true
}
}
yyh1.End()
if yyc1 {
*v = yyv1
}
}
func (x codecSelfer6617) encSlicePtrtoDSUser(v []*DSUser, e *codec1978.Encoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperEncoder(e)
_, _, _ = h, z, r
r.WriteArrayStart(len(v))
for _, yyv1 := range v {
r.WriteArrayElem()
if yyv1 == nil {
r.EncodeNil()
} else {
yyv1.CodecEncodeSelf(e)
}
}
r.WriteArrayEnd()
}
func (x codecSelfer6617) decSlicePtrtoDSUser(v *[]*DSUser, d *codec1978.Decoder) {
var h codecSelfer6617
z, r := codec1978.GenHelperDecoder(d)
_, _, _ = h, z, r
yyv1 := *v
yyh1, yyl1 := z.DecSliceHelperStart()
var yyc1 bool
_ = yyc1
if yyl1 == 0 {
if yyv1 == nil {
yyv1 = []*DSUser{}
yyc1 = true
} else if len(yyv1) != 0 {
yyv1 = yyv1[:0]
yyc1 = true
}
} else {
yyhl1 := yyl1 > 0
var yyrl1 int
_ = yyrl1
if yyhl1 {
if yyl1 > cap(yyv1) {
yyrl1 = z.DecInferLen(yyl1, z.DecBasicHandle().MaxInitLen, 8)
if yyrl1 <= cap(yyv1) {
yyv1 = yyv1[:yyrl1]
} else {
yyv1 = make([]*DSUser, yyrl1)
}
yyc1 = true
} else if yyl1 != len(yyv1) {
yyv1 = yyv1[:yyl1]
yyc1 = true
}
}
var yyj1 int
// var yydn1 bool
for yyj1 = 0; (yyhl1 && yyj1 < yyl1) || !(yyhl1 || r.CheckBreak()); yyj1++ { // bounds-check-elimination
if yyj1 == 0 && yyv1 == nil {
if yyhl1 {
yyrl1 = z.DecInferLen(yyl1, z.DecBasicHandle().MaxInitLen, 8)
} else {
yyrl1 = 8
}
yyv1 = make([]*DSUser, yyrl1)
yyc1 = true
}
yyh1.ElemContainerState(yyj1)
var yydb1 bool
if yyj1 >= len(yyv1) {
yyv1 = append(yyv1, nil)
yyc1 = true
}
if yydb1 {
z.DecSwallow()
} else {
if r.TryDecodeAsNil() {
yyv1[yyj1] = nil
} else {
if yyv1[yyj1] == nil {
yyv1[yyj1] = new(DSUser)
}
yyv1[yyj1].CodecDecodeSelf(d)
}
}
}
if yyj1 < len(yyv1) {
yyv1 = yyv1[:yyj1]
yyc1 = true
} else if yyj1 == 0 && yyv1 == nil {
yyv1 = make([]*DSUser, 0)
yyc1 = true
}
}
yyh1.End()
if yyc1 {
*v = yyv1
}
}
jsonparser-1.1.1/benchmark/benchmark_delete_test.go 0000664 0000000 0000000 00000037143 13776023740 0022515 0 ustar 00root root 0000000 0000000 package benchmark
import (
"testing"
"github.com/buger/jsonparser"
)
func BenchmarkDeleteSmall(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
data1 := []byte(`{ "instanceId": 1, "ip": "10.10.10.10", "services": [ { "id": 1, "name": "srv1" } ] }`)
_ = jsonparser.Delete(data1, "services")
}
}
func BenchmarkDeleteNested(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
data1 := []byte(`{ "instanceId": 1, "ip": "10.10.10.10", "services": [ { "id": 1, "name": "srv1" } ] }`)
_ = jsonparser.Delete(data1, "services", "id")
}
}
func BenchmarkDeleteLarge(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
data1 := []byte(`{"adsEnabled":true,"assetGroup":{"id":"4131","logoURL":"https://www.gannett-cdn.com/sites/usatoday/images/blogs/talkingtech/logo_front_v2.png","name":"talkingtech","siteCode":"USAT","siteId":"1","siteName":"USA TODAY","sstsId":"c67ad92a-3c9b-4817-9030-9357a9c2a86e","type":"blog","URL":"/tech/talkingtech"},"authoringBehavior":"text","authoringTypeCode":"blog","awsPath":"tech/talkingtech","backfillDate":"2018-10-30T14:56:31.522Z","byline":"Mike Snider","contentProtectionState":"free","contentSourceCode":"USAT","contributors":[{"id":"1071","name":"Mike Snider"}],"createDate":"2018-10-30T13:58:41.194Z","createSystem":"Presto Next","createUser":"msnider","eventDate":"2018-10-30T15:09:50.43Z","excludeFromMobile":false,"fronts":[{"id":"206","name":"tech","recommendedDate":"2018-10-30T15:09:50.399Z","type":"section-front"},{"id":"1012186","name":"tech_talkingtech","recommendedDate":"2018-10-30T15:09:50.399Z","type":"section-front"},{"id":"196","name":"money","recommendedDate":"2018-10-30T15:09:50.399Z","type":"section-front"},{"id":"156","name":"home","recommendedDate":"2018-10-30T15:09:50.399Z","type":"section-front"},{"id":"156","name":"home","recommendedDate":"2018-10-30T15:09:50.399Z","type":"section-front"}],"geoTag":{"attributes":{"lat":"","long":""},"id":""},"headline":"'Red Dead Redemption 2' rakes in $725M for Rockstar Games in blockbuster weekend debut","id":"1817435002","initialPublishDate":"2018-10-30T14:56:31.522Z","isEvergreen":false,"links":{"assets":[{"id":"1763879002","overrides":{},"position":1,"relationshipTypeFlags":"PromoImage"},{"id":"1764652002","overrides":{},"position":2,"relationshipTypeFlags":"Undefined"},{"id":"1765924002","overrides":{},"position":3,"relationshipTypeFlags":"Undefined"}],"photoId":"1763879002"},"pageURL":{"long":"http://www.usatoday.com/story/tech/talkingtech/2018/10/30/red-dead-redemption-2-makes-725-million-debut-rockstar-games/1817435002/","short":"http://www.usatoday.com/story/tech/talkingtech/2018/10/30/red-dead-redemption-2-makes-725-million-debut-rockstar-games/1817435002/"},"promoBrief":"Video game \"Red Dead Redemption 2\" corralled blockbuster sales of $725 million in its first three days, according to publisher Rockstar Games.","propertyDisplayName":"USA TODAY","propertyId":"1","propertyName":"USATODAY","publication":"USA TODAY","publishDate":"2018-10-30T15:09:50.399Z","publishSystem":"authoring","publishUser":"geronimo-publish-handler","readerCommentsEnabled":false,"schemaVersion":"0.11.20","shortHeadline":"'Red Dead Redemption 2' corrals $725M in sales","source":"USA TODAY","ssts":{"leafName":"talkingtech","path":"tech/talkingtech","section":"tech","subsection":"talkingtech","taxonomyEntityDisplayName":"Talking Tech","topic":"","subtopic":""},"statusName":"published","tags":[{"id":"855b0686-b2d8-4d98-b5f4-fcacf713047b","isPrimary":true,"name":"Talking Tech","path":"USAT TOPICS/USAT Science and technology/Talking Tech","taggingStatus":"UserTagged","vocabulary":"Topics"},{"id":"5dd5b5f2-9594-4aae-83c8-1ebb8aa50767","name":"Rockstar Games","path":"Candidates/Rockstar Games","taggingStatus":"UserTagged","vocabulary":"Companies"},{"id":"ceff0ffa-451d-46ae-8c4f-f958264b165e","name":"Video Games","path":"Consumer Products/Video Games","taggingStatus":"UserTagged","vocabulary":"Subjects"},{"id":"d59ddfbc-2afe-40e3-a9a2-5debe530dc5f","name":"Redemption","path":"Religious Organizations/Redemption","taggingStatus":"AutoTagged","vocabulary":"Organizations"},{"id":"09f4e1a7-50e7-4fc5-b318-d300acc4718f","name":"Success","path":"Emotions/Success","taggingStatus":"AutoTagged","vocabulary":"SubjectCodes"},{"id":"7095bb07-b172-434b-a4eb-8856263ad949","name":"Overall Positive","path":"Emotions/Overall Positive","taggingStatus":"AutoTagged","vocabulary":"SubjectCodes"},{"id":"d2cb2465-3a24-4104-8569-31785b515f62","name":"Sony","path":"Corporations/Sony","taggingStatus":"AutoTagged","vocabulary":"Companies"},{"id":"9b993d1c-2a6d-4279-acb3-ecac95d77320","name":"Amusement","path":"Emotions/Amusement","taggingStatus":"AutoTagged","vocabulary":"SubjectCodes"}],"title":"Red Dead Redemption 2 makes $725 million in debut for Rockstar Games","updateDate":"2018-10-30T15:09:50.43Z","updateUser":"mhayes","aggregateId":"acba765c-c573-42af-929f-26ea5920b932","body":{"desktop":[{"type":"asset","value":"1763879002"},{"type":"text","value":"
Rockstar Games has another hard-boiled hit on its hands.
"},{"type":"text","value":"Old West adventure game "Red Dead Redemption 2," which landed Friday, lassoed $725 million in sales worldwide in its first three days.
"},{"type":"text","value":"That places the massive explorable open-world game as the No. 2 game out of the gate, just behind Rockstar's "Grand Theft Auto V," the biggest seller of all time, which took in $1 billion in its first three days when it launched on Sept. 17, 2013.
"},{"type":"text","value":""GTA V" has gone on to make more money than any other single game title, selling nearly 100 million copies and reaping $6 billion in revenue, according to MarketWatch.
"},{"type":"text","value":"The three-day start makes "Red Dead Redemption 2" the single-biggest opening weekend in "the history of entertainment," Rockstar said in a press release detailing the game's achievements. That means the three-day sales for the game, prices of which start at $59.99 (rated Mature for those 17-up), surpasses opening weekends for blockbuster movies such as "Avengers: Infinity War" and "Star Wars: The Force Awakens."
"},{"type":"text","value":"More: 'Red Dead Redemption 2': First impressions from once upon a time in the West
\n
\nMore: Sony lists the 20 games coming to PlayStation Classic retro video game console
"},{"type":"text","value":""Red Dead Redemption 2" also tallied the biggest full game sales marks for one and for three days on Sony's PlayStation Network, Rockstar said. It was also the most preordered game on Sony's online network.
"},{"type":"text","value":"Reviews for the game have rated it among the best ever. It earned a 97 on Metacritic, earning it a tie for No. 6 all-time, along with games such as "Super Mario Galaxy" and "GTA V."
"},{"type":"text","value":"Piper Jaffray & Co. senior research analyst Michael Olson estimated Rockstar sold about 11 million copies in its first three days. That means Olson's initial estimate of Rockstar selling 15.5 million copies of "Red Dead 2" in its fiscal year, which ends in March 2019, "appears conservative," he said in a note to investors Tuesday.
"},{"type":"text","value":""Clearly, with RDR2 first weekend sell-through exceeding CoD: Black Ops 4, it now appears RDR2 estimates may have been overly conservative," Olson wrote.
"},{"type":"text","value":"Shares of Rockstar’s parent company Take-Two Interactive (TTWO) rose about 8 percent in early trading Tuesday to $120.55.
"},{"type":"asset","value":"1765924002"},{"type":"text","value":"Follow USA TODAY reporter Mike Snider on Twitter: @MikeSnider.
"}],"mobile":[{"type":"asset","value":"1763879002"},{"type":"text","value":"Rockstar Games has another hard-boiled hit on its hands.
"},{"type":"text","value":"Old West adventure game "Red Dead Redemption 2," which landed Friday, lassoed $725 million in sales worldwide in its first three days.
"},{"type":"text","value":"That places the massive explorable open-world game as the No. 2 game out of the gate, just behind Rockstar's "Grand Theft Auto V," the biggest seller of all time, which took in $1 billion in its first three days when it launched on Sept. 17, 2013.
"},{"type":"text","value":""GTA V" has gone on to make more money than any other single game title, selling nearly 100 million copies and reaping $6 billion in revenue, according to MarketWatch.
"},{"type":"text","value":"The three-day start makes "Red Dead Redemption 2" the single-biggest opening weekend in "the history of entertainment," Rockstar said in a press release detailing the game's achievements. That means the three-day sales for the game, prices of which start at $59.99 (rated Mature for those 17-up), surpasses opening weekends for blockbuster movies such as "Avengers: Infinity War" and "Star Wars: The Force Awakens."
"},{"type":"text","value":"More: 'Red Dead Redemption 2': First impressions from once upon a time in the West
\n
\nMore: Sony lists the 20 games coming to PlayStation Classic retro video game console
"},{"type":"text","value":""Red Dead Redemption 2" also tallied the biggest full game sales marks for one and for three days on Sony's PlayStation Network, Rockstar said. It was also the most preordered game on Sony's online network.
"},{"type":"text","value":"Reviews for the game have rated it among the best ever. It earned a 97 on Metacritic, earning it a tie for No. 6 all-time, along with games such as "Super Mario Galaxy" and "GTA V."
"},{"type":"text","value":"Piper Jaffray & Co. senior research analyst Michael Olson estimated Rockstar sold about 11 million copies in its first three days. That means Olson's initial estimate of Rockstar selling 15.5 million copies of "Red Dead 2" in its fiscal year, which ends in March 2019, "appears conservative," he said in a note to investors Tuesday.
"},{"type":"text","value":""Clearly, with RDR2 first weekend sell-through exceeding CoD: Black Ops 4, it now appears RDR2 estimates may have been overly conservative," Olson wrote.
"},{"type":"text","value":"Shares of Rockstar’s parent company Take-Two Interactive (TTWO) rose about 8 percent in early trading Tuesday to $120.55.
"},{"type":"asset","value":"1765924002"},{"type":"text","value":"Follow USA TODAY reporter Mike Snider on Twitter: @MikeSnider.
"}]},"fullText":"![]()
\n\nRockstar Games has another hard-boiled hit on its hands.
\n\nOld West adventure game "Red Dead Redemption 2," which landed Friday, lassoed $725 million in sales worldwide in its first three days.
\n\nThat places the massive explorable open-world game as the No. 2 game out of the gate, just behind Rockstar's "Grand Theft Auto V," the biggest seller of all time, which took in $1 billion in its first three days when it launched on Sept. 17, 2013.
\n\n"GTA V" has gone on to make more money than any other single game title, selling nearly 100 million copies and reaping $6 billion in revenue, according to MarketWatch.
\n\nThe three-day start makes "Red Dead Redemption 2" the single-biggest opening weekend in "the history of entertainment," Rockstar said in a press release detailing the game's achievements. That means the three-day sales for the game, prices of which start at $59.99 (rated Mature for those 17-up), surpasses opening weekends for blockbuster movies such as "Avengers: Infinity War" and "Star Wars: The Force Awakens."
\n\nMore: 'Red Dead Redemption 2': First impressions from once upon a time in the West
\n
\nMore: Sony lists the 20 games coming to PlayStation Classic retro video game console
\n\n"Red Dead Redemption 2" also tallied the biggest full game sales marks for one and for three days on Sony's PlayStation Network, Rockstar said. It was also the most preordered game on Sony's online network.
\n\nReviews for the game have rated it among the best ever. It earned a 97 on Metacritic, earning it a tie for No. 6 all-time, along with games such as "Super Mario Galaxy" and "GTA V."
\n\nPiper Jaffray & Co. senior research analyst Michael Olson estimated Rockstar sold about 11 million copies in its first three days. That means Olson's initial estimate of Rockstar selling 15.5 million copies of "Red Dead 2" in its fiscal year, which ends in March 2019, "appears conservative," he said in a note to investors Tuesday.
\n\n"Clearly, with RDR2 first weekend sell-through exceeding CoD: Black Ops 4, it now appears RDR2 estimates may have been overly conservative," Olson wrote.
\n\nShares of Rockstar’s parent company Take-Two Interactive (TTWO) rose about 8 percent in early trading Tuesday to $120.55.
\n\n![]()
\n\nFollow USA TODAY reporter Mike Snider on Twitter: @MikeSnider.
\n","layoutPriorityAssetId":"1763879002","seoTitle":"Red Dead Redemption 2 makes $725 million in debut for Rockstar Games","type":"text","versionHash":"fe60306b2e7574a8d65e690753deb666"}`)
_ = jsonparser.Delete(data1, "body")
}
}
jsonparser-1.1.1/benchmark/benchmark_easyjson.go 0000664 0000000 0000000 00000035653 13776023740 0022053 0 ustar 00root root 0000000 0000000 package benchmark
import (
json "encoding/json"
jlexer "github.com/mailru/easyjson/jlexer"
jwriter "github.com/mailru/easyjson/jwriter"
)
var _ = json.RawMessage{} // suppress unused package warning
func easyjson_decode_github_com_buger_jsonparser_benchmark_LargePayload(in *jlexer.Lexer, out *LargePayload) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "users":
in.Delim('[')
if !in.IsDelim(']') {
out.Users = make([]*DSUser, 0, 8)
} else {
out.Users = nil
}
for !in.IsDelim(']') {
var v1 *DSUser
if in.IsNull() {
in.Skip()
v1 = nil
} else {
v1 = new(DSUser)
(*v1).UnmarshalEasyJSON(in)
}
out.Users = append(out.Users, v1)
in.WantComma()
}
in.Delim(']')
case "topics":
if in.IsNull() {
in.Skip()
out.Topics = nil
} else {
out.Topics = new(DSTopicsList)
(*out.Topics).UnmarshalEasyJSON(in)
}
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_LargePayload(out *jwriter.Writer, in *LargePayload) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"users\":")
out.RawByte('[')
for v2, v3 := range in.Users {
if v2 > 0 {
out.RawByte(',')
}
if v3 == nil {
out.RawString("null")
} else {
(*v3).MarshalEasyJSON(out)
}
}
out.RawByte(']')
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"topics\":")
if in.Topics == nil {
out.RawString("null")
} else {
(*in.Topics).MarshalEasyJSON(out)
}
out.RawByte('}')
}
func (v *LargePayload) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_LargePayload(w, v)
}
func (v *LargePayload) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_LargePayload(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_DSTopicsList(in *jlexer.Lexer, out *DSTopicsList) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "topics":
in.Delim('[')
if !in.IsDelim(']') {
out.Topics = make([]*DSTopic, 0, 8)
} else {
out.Topics = nil
}
for !in.IsDelim(']') {
var v4 *DSTopic
if in.IsNull() {
in.Skip()
v4 = nil
} else {
v4 = new(DSTopic)
(*v4).UnmarshalEasyJSON(in)
}
out.Topics = append(out.Topics, v4)
in.WantComma()
}
in.Delim(']')
case "more_topics_url":
out.MoreTopicsUrl = in.String()
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_DSTopicsList(out *jwriter.Writer, in *DSTopicsList) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"topics\":")
out.RawByte('[')
for v5, v6 := range in.Topics {
if v5 > 0 {
out.RawByte(',')
}
if v6 == nil {
out.RawString("null")
} else {
(*v6).MarshalEasyJSON(out)
}
}
out.RawByte(']')
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"more_topics_url\":")
out.String(in.MoreTopicsUrl)
out.RawByte('}')
}
func (v *DSTopicsList) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_DSTopicsList(w, v)
}
func (v *DSTopicsList) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_DSTopicsList(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_DSTopic(in *jlexer.Lexer, out *DSTopic) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "id":
out.Id = in.Int()
case "slug":
out.Slug = in.String()
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_DSTopic(out *jwriter.Writer, in *DSTopic) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"id\":")
out.Int(in.Id)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"slug\":")
out.String(in.Slug)
out.RawByte('}')
}
func (v *DSTopic) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_DSTopic(w, v)
}
func (v *DSTopic) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_DSTopic(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_DSUser(in *jlexer.Lexer, out *DSUser) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "username":
out.Username = in.String()
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_DSUser(out *jwriter.Writer, in *DSUser) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"username\":")
out.String(in.Username)
out.RawByte('}')
}
func (v *DSUser) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_DSUser(w, v)
}
func (v *DSUser) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_DSUser(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_MediumPayload(in *jlexer.Lexer, out *MediumPayload) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "person":
if in.IsNull() {
in.Skip()
out.Person = nil
} else {
out.Person = new(CBPerson)
(*out.Person).UnmarshalEasyJSON(in)
}
case "company":
in.Delim('{')
if !in.IsDelim('}') {
out.Company = make(map[string]interface{})
} else {
out.Company = nil
}
for !in.IsDelim('}') {
key := in.String()
in.WantColon()
var v7 interface{}
v7 = in.Interface()
(out.Company)[key] = v7
in.WantComma()
}
in.Delim('}')
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_MediumPayload(out *jwriter.Writer, in *MediumPayload) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"person\":")
if in.Person == nil {
out.RawString("null")
} else {
(*in.Person).MarshalEasyJSON(out)
}
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"company\":")
out.RawByte('{')
v8_first := true
for v8_name, v8_value := range in.Company {
if !v8_first {
out.RawByte(',')
}
v8_first = false
out.String(v8_name)
out.Raw(json.Marshal(v8_value))
}
out.RawByte('}')
out.RawByte('}')
}
func (v *MediumPayload) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_MediumPayload(w, v)
}
func (v *MediumPayload) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_MediumPayload(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_CBPerson(in *jlexer.Lexer, out *CBPerson) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "name":
if in.IsNull() {
in.Skip()
out.Name = nil
} else {
out.Name = new(CBName)
(*out.Name).UnmarshalEasyJSON(in)
}
case "github":
if in.IsNull() {
in.Skip()
out.Github = nil
} else {
out.Github = new(CBGithub)
(*out.Github).UnmarshalEasyJSON(in)
}
case "gravatar":
if in.IsNull() {
in.Skip()
out.Gravatar = nil
} else {
out.Gravatar = new(CBGravatar)
(*out.Gravatar).UnmarshalEasyJSON(in)
}
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_CBPerson(out *jwriter.Writer, in *CBPerson) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"name\":")
if in.Name == nil {
out.RawString("null")
} else {
(*in.Name).MarshalEasyJSON(out)
}
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"github\":")
if in.Github == nil {
out.RawString("null")
} else {
(*in.Github).MarshalEasyJSON(out)
}
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"gravatar\":")
if in.Gravatar == nil {
out.RawString("null")
} else {
(*in.Gravatar).MarshalEasyJSON(out)
}
out.RawByte('}')
}
func (v *CBPerson) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_CBPerson(w, v)
}
func (v *CBPerson) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_CBPerson(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_CBName(in *jlexer.Lexer, out *CBName) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "full_name":
out.FullName = in.String()
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_CBName(out *jwriter.Writer, in *CBName) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"full_name\":")
out.String(in.FullName)
out.RawByte('}')
}
func (v *CBName) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_CBName(w, v)
}
func (v *CBName) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_CBName(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_CBGithub(in *jlexer.Lexer, out *CBGithub) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "followers":
out.Followers = in.Int()
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_CBGithub(out *jwriter.Writer, in *CBGithub) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"followers\":")
out.Int(in.Followers)
out.RawByte('}')
}
func (v *CBGithub) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_CBGithub(w, v)
}
func (v *CBGithub) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_CBGithub(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_CBGravatar(in *jlexer.Lexer, out *CBGravatar) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "avatars":
in.Delim('[')
if !in.IsDelim(']') {
out.Avatars = make([]*CBAvatar, 0, 8)
} else {
out.Avatars = nil
}
for !in.IsDelim(']') {
var v9 *CBAvatar
if in.IsNull() {
in.Skip()
v9 = nil
} else {
v9 = new(CBAvatar)
(*v9).UnmarshalEasyJSON(in)
}
out.Avatars = append(out.Avatars, v9)
in.WantComma()
}
in.Delim(']')
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_CBGravatar(out *jwriter.Writer, in *CBGravatar) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"avatars\":")
out.RawByte('[')
for v10, v11 := range in.Avatars {
if v10 > 0 {
out.RawByte(',')
}
if v11 == nil {
out.RawString("null")
} else {
(*v11).MarshalEasyJSON(out)
}
}
out.RawByte(']')
out.RawByte('}')
}
func (v *CBGravatar) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_CBGravatar(w, v)
}
func (v *CBGravatar) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_CBGravatar(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_CBAvatar(in *jlexer.Lexer, out *CBAvatar) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "url":
out.Url = in.String()
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_CBAvatar(out *jwriter.Writer, in *CBAvatar) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"url\":")
out.String(in.Url)
out.RawByte('}')
}
func (v *CBAvatar) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_CBAvatar(w, v)
}
func (v *CBAvatar) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_CBAvatar(l, v)
}
func easyjson_decode_github_com_buger_jsonparser_benchmark_SmallPayload(in *jlexer.Lexer, out *SmallPayload) {
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "st":
out.St = in.Int()
case "sid":
out.Sid = in.Int()
case "tt":
out.Tt = in.String()
case "gr":
out.Gr = in.Int()
case "uuid":
out.Uuid = in.String()
case "ip":
out.Ip = in.String()
case "ua":
out.Ua = in.String()
case "tz":
out.Tz = in.Int()
case "v":
out.V = in.Int()
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
}
func easyjson_encode_github_com_buger_jsonparser_benchmark_SmallPayload(out *jwriter.Writer, in *SmallPayload) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"st\":")
out.Int(in.St)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"sid\":")
out.Int(in.Sid)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"tt\":")
out.String(in.Tt)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"gr\":")
out.Int(in.Gr)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"uuid\":")
out.String(in.Uuid)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"ip\":")
out.String(in.Ip)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"ua\":")
out.String(in.Ua)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"tz\":")
out.Int(in.Tz)
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"v\":")
out.Int(in.V)
out.RawByte('}')
}
func (v *SmallPayload) MarshalEasyJSON(w *jwriter.Writer) {
easyjson_encode_github_com_buger_jsonparser_benchmark_SmallPayload(w, v)
}
func (v *SmallPayload) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjson_decode_github_com_buger_jsonparser_benchmark_SmallPayload(l, v)
}
jsonparser-1.1.1/benchmark/benchmark_ffjson.go 0000664 0000000 0000000 00000201537 13776023740 0021501 0 ustar 00root root 0000000 0000000 // DO NOT EDIT!
// Code generated by ffjson
// source: benchmark.go
// DO NOT EDIT!
package benchmark
import (
"bytes"
"encoding/json"
"fmt"
fflib "github.com/pquerna/ffjson/fflib/v1"
)
func (mj *CBAvatar) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *CBAvatar) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"Url":`)
fflib.WriteJsonString(buf, string(mj.Url))
buf.WriteByte('}')
return nil
}
const (
ffj_t_CBAvatarbase = iota
ffj_t_CBAvatarno_such_key
ffj_t_CBAvatar_Url
)
var ffj_key_CBAvatar_Url = []byte("Url")
func (uj *CBAvatar) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *CBAvatar) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_CBAvatarbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_CBAvatarno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'U':
if bytes.Equal(ffj_key_CBAvatar_Url, kn) {
currentKey = ffj_t_CBAvatar_Url
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.SimpleLetterEqualFold(ffj_key_CBAvatar_Url, kn) {
currentKey = ffj_t_CBAvatar_Url
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_CBAvatarno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_CBAvatar_Url:
goto handle_Url
case ffj_t_CBAvatarno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Url:
/* handler: uj.Url type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.Url = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *CBGithub) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *CBGithub) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"Followers":`)
fflib.FormatBits2(buf, uint64(mj.Followers), 10, mj.Followers < 0)
buf.WriteByte('}')
return nil
}
const (
ffj_t_CBGithubbase = iota
ffj_t_CBGithubno_such_key
ffj_t_CBGithub_Followers
)
var ffj_key_CBGithub_Followers = []byte("Followers")
func (uj *CBGithub) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *CBGithub) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_CBGithubbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_CBGithubno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'F':
if bytes.Equal(ffj_key_CBGithub_Followers, kn) {
currentKey = ffj_t_CBGithub_Followers
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.EqualFoldRight(ffj_key_CBGithub_Followers, kn) {
currentKey = ffj_t_CBGithub_Followers
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_CBGithubno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_CBGithub_Followers:
goto handle_Followers
case ffj_t_CBGithubno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Followers:
/* handler: uj.Followers type=int kind=int quoted=false*/
{
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
}
}
{
if tok == fflib.FFTok_null {
} else {
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
if err != nil {
return fs.WrapErr(err)
}
uj.Followers = int(tval)
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *CBGravatar) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *CBGravatar) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"Avatars":`)
if mj.Avatars != nil {
buf.WriteString(`[`)
for i, v := range mj.Avatars {
if i != 0 {
buf.WriteString(`,`)
}
{
if v == nil {
buf.WriteString("null")
return nil
}
err = v.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
}
buf.WriteString(`]`)
} else {
buf.WriteString(`null`)
}
buf.WriteByte('}')
return nil
}
const (
ffj_t_CBGravatarbase = iota
ffj_t_CBGravatarno_such_key
ffj_t_CBGravatar_Avatars
)
var ffj_key_CBGravatar_Avatars = []byte("Avatars")
func (uj *CBGravatar) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *CBGravatar) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_CBGravatarbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_CBGravatarno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'A':
if bytes.Equal(ffj_key_CBGravatar_Avatars, kn) {
currentKey = ffj_t_CBGravatar_Avatars
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.EqualFoldRight(ffj_key_CBGravatar_Avatars, kn) {
currentKey = ffj_t_CBGravatar_Avatars
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_CBGravatarno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_CBGravatar_Avatars:
goto handle_Avatars
case ffj_t_CBGravatarno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Avatars:
/* handler: uj.Avatars type=[]*benchmark.CBAvatar kind=slice quoted=false*/
{
{
if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok))
}
}
if tok == fflib.FFTok_null {
uj.Avatars = nil
} else {
uj.Avatars = make([]*CBAvatar, 0)
wantVal := true
for {
var tmp_uj__Avatars *CBAvatar
tok = fs.Scan()
if tok == fflib.FFTok_error {
goto tokerror
}
if tok == fflib.FFTok_right_brace {
break
}
if tok == fflib.FFTok_comma {
if wantVal == true {
// TODO(pquerna): this isn't an ideal error message, this handles
// things like [,,,] as an array value.
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
}
continue
} else {
wantVal = true
}
/* handler: tmp_uj__Avatars type=*benchmark.CBAvatar kind=ptr quoted=false*/
{
if tok == fflib.FFTok_null {
tmp_uj__Avatars = nil
state = fflib.FFParse_after_value
goto mainparse
}
if tmp_uj__Avatars == nil {
tmp_uj__Avatars = new(CBAvatar)
}
err = tmp_uj__Avatars.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
uj.Avatars = append(uj.Avatars, tmp_uj__Avatars)
wantVal = false
}
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *CBName) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *CBName) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"FullName":`)
fflib.WriteJsonString(buf, string(mj.FullName))
buf.WriteByte('}')
return nil
}
const (
ffj_t_CBNamebase = iota
ffj_t_CBNameno_such_key
ffj_t_CBName_FullName
)
var ffj_key_CBName_FullName = []byte("FullName")
func (uj *CBName) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *CBName) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_CBNamebase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_CBNameno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'F':
if bytes.Equal(ffj_key_CBName_FullName, kn) {
currentKey = ffj_t_CBName_FullName
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.SimpleLetterEqualFold(ffj_key_CBName_FullName, kn) {
currentKey = ffj_t_CBName_FullName
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_CBNameno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_CBName_FullName:
goto handle_FullName
case ffj_t_CBNameno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_FullName:
/* handler: uj.FullName type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.FullName = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *CBPerson) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *CBPerson) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
if mj.Name != nil {
buf.WriteString(`{"Name":`)
{
err = mj.Name.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
} else {
buf.WriteString(`{"Name":null`)
}
if mj.Github != nil {
buf.WriteString(`,"Github":`)
{
err = mj.Github.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
} else {
buf.WriteString(`,"Github":null`)
}
if mj.Gravatar != nil {
buf.WriteString(`,"Gravatar":`)
{
err = mj.Gravatar.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
} else {
buf.WriteString(`,"Gravatar":null`)
}
buf.WriteByte('}')
return nil
}
const (
ffj_t_CBPersonbase = iota
ffj_t_CBPersonno_such_key
ffj_t_CBPerson_Name
ffj_t_CBPerson_Github
ffj_t_CBPerson_Gravatar
)
var ffj_key_CBPerson_Name = []byte("Name")
var ffj_key_CBPerson_Github = []byte("Github")
var ffj_key_CBPerson_Gravatar = []byte("Gravatar")
func (uj *CBPerson) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *CBPerson) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_CBPersonbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_CBPersonno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'G':
if bytes.Equal(ffj_key_CBPerson_Github, kn) {
currentKey = ffj_t_CBPerson_Github
state = fflib.FFParse_want_colon
goto mainparse
} else if bytes.Equal(ffj_key_CBPerson_Gravatar, kn) {
currentKey = ffj_t_CBPerson_Gravatar
state = fflib.FFParse_want_colon
goto mainparse
}
case 'N':
if bytes.Equal(ffj_key_CBPerson_Name, kn) {
currentKey = ffj_t_CBPerson_Name
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.SimpleLetterEqualFold(ffj_key_CBPerson_Gravatar, kn) {
currentKey = ffj_t_CBPerson_Gravatar
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_CBPerson_Github, kn) {
currentKey = ffj_t_CBPerson_Github
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_CBPerson_Name, kn) {
currentKey = ffj_t_CBPerson_Name
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_CBPersonno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_CBPerson_Name:
goto handle_Name
case ffj_t_CBPerson_Github:
goto handle_Github
case ffj_t_CBPerson_Gravatar:
goto handle_Gravatar
case ffj_t_CBPersonno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Name:
/* handler: uj.Name type=benchmark.CBName kind=struct quoted=false*/
{
if tok == fflib.FFTok_null {
uj.Name = nil
state = fflib.FFParse_after_value
goto mainparse
}
if uj.Name == nil {
uj.Name = new(CBName)
}
err = uj.Name.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
state = fflib.FFParse_after_value
goto mainparse
handle_Github:
/* handler: uj.Github type=benchmark.CBGithub kind=struct quoted=false*/
{
if tok == fflib.FFTok_null {
uj.Github = nil
state = fflib.FFParse_after_value
goto mainparse
}
if uj.Github == nil {
uj.Github = new(CBGithub)
}
err = uj.Github.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
state = fflib.FFParse_after_value
goto mainparse
handle_Gravatar:
/* handler: uj.Gravatar type=benchmark.CBGravatar kind=struct quoted=false*/
{
if tok == fflib.FFTok_null {
uj.Gravatar = nil
state = fflib.FFParse_after_value
goto mainparse
}
if uj.Gravatar == nil {
uj.Gravatar = new(CBGravatar)
}
err = uj.Gravatar.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *DSTopic) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *DSTopic) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"Id":`)
fflib.FormatBits2(buf, uint64(mj.Id), 10, mj.Id < 0)
buf.WriteString(`,"Slug":`)
fflib.WriteJsonString(buf, string(mj.Slug))
buf.WriteByte('}')
return nil
}
const (
ffj_t_DSTopicbase = iota
ffj_t_DSTopicno_such_key
ffj_t_DSTopic_Id
ffj_t_DSTopic_Slug
)
var ffj_key_DSTopic_Id = []byte("Id")
var ffj_key_DSTopic_Slug = []byte("Slug")
func (uj *DSTopic) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *DSTopic) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_DSTopicbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_DSTopicno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'I':
if bytes.Equal(ffj_key_DSTopic_Id, kn) {
currentKey = ffj_t_DSTopic_Id
state = fflib.FFParse_want_colon
goto mainparse
}
case 'S':
if bytes.Equal(ffj_key_DSTopic_Slug, kn) {
currentKey = ffj_t_DSTopic_Slug
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.EqualFoldRight(ffj_key_DSTopic_Slug, kn) {
currentKey = ffj_t_DSTopic_Slug
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_DSTopic_Id, kn) {
currentKey = ffj_t_DSTopic_Id
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_DSTopicno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_DSTopic_Id:
goto handle_Id
case ffj_t_DSTopic_Slug:
goto handle_Slug
case ffj_t_DSTopicno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Id:
/* handler: uj.Id type=int kind=int quoted=false*/
{
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
}
}
{
if tok == fflib.FFTok_null {
} else {
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
if err != nil {
return fs.WrapErr(err)
}
uj.Id = int(tval)
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Slug:
/* handler: uj.Slug type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.Slug = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *DSTopicsList) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *DSTopicsList) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"Topics":`)
if mj.Topics != nil {
buf.WriteString(`[`)
for i, v := range mj.Topics {
if i != 0 {
buf.WriteString(`,`)
}
{
if v == nil {
buf.WriteString("null")
return nil
}
err = v.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
}
buf.WriteString(`]`)
} else {
buf.WriteString(`null`)
}
buf.WriteString(`,"MoreTopicsUrl":`)
fflib.WriteJsonString(buf, string(mj.MoreTopicsUrl))
buf.WriteByte('}')
return nil
}
const (
ffj_t_DSTopicsListbase = iota
ffj_t_DSTopicsListno_such_key
ffj_t_DSTopicsList_Topics
ffj_t_DSTopicsList_MoreTopicsUrl
)
var ffj_key_DSTopicsList_Topics = []byte("Topics")
var ffj_key_DSTopicsList_MoreTopicsUrl = []byte("MoreTopicsUrl")
func (uj *DSTopicsList) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *DSTopicsList) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_DSTopicsListbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_DSTopicsListno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'M':
if bytes.Equal(ffj_key_DSTopicsList_MoreTopicsUrl, kn) {
currentKey = ffj_t_DSTopicsList_MoreTopicsUrl
state = fflib.FFParse_want_colon
goto mainparse
}
case 'T':
if bytes.Equal(ffj_key_DSTopicsList_Topics, kn) {
currentKey = ffj_t_DSTopicsList_Topics
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.EqualFoldRight(ffj_key_DSTopicsList_MoreTopicsUrl, kn) {
currentKey = ffj_t_DSTopicsList_MoreTopicsUrl
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.EqualFoldRight(ffj_key_DSTopicsList_Topics, kn) {
currentKey = ffj_t_DSTopicsList_Topics
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_DSTopicsListno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_DSTopicsList_Topics:
goto handle_Topics
case ffj_t_DSTopicsList_MoreTopicsUrl:
goto handle_MoreTopicsUrl
case ffj_t_DSTopicsListno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Topics:
/* handler: uj.Topics type=[]*benchmark.DSTopic kind=slice quoted=false*/
{
{
if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok))
}
}
if tok == fflib.FFTok_null {
uj.Topics = nil
} else {
uj.Topics = make([]*DSTopic, 0)
wantVal := true
for {
var tmp_uj__Topics *DSTopic
tok = fs.Scan()
if tok == fflib.FFTok_error {
goto tokerror
}
if tok == fflib.FFTok_right_brace {
break
}
if tok == fflib.FFTok_comma {
if wantVal == true {
// TODO(pquerna): this isn't an ideal error message, this handles
// things like [,,,] as an array value.
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
}
continue
} else {
wantVal = true
}
/* handler: tmp_uj__Topics type=*benchmark.DSTopic kind=ptr quoted=false*/
{
if tok == fflib.FFTok_null {
tmp_uj__Topics = nil
state = fflib.FFParse_after_value
goto mainparse
}
if tmp_uj__Topics == nil {
tmp_uj__Topics = new(DSTopic)
}
err = tmp_uj__Topics.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
uj.Topics = append(uj.Topics, tmp_uj__Topics)
wantVal = false
}
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_MoreTopicsUrl:
/* handler: uj.MoreTopicsUrl type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.MoreTopicsUrl = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *DSUser) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *DSUser) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"Username":`)
fflib.WriteJsonString(buf, string(mj.Username))
buf.WriteByte('}')
return nil
}
const (
ffj_t_DSUserbase = iota
ffj_t_DSUserno_such_key
ffj_t_DSUser_Username
)
var ffj_key_DSUser_Username = []byte("Username")
func (uj *DSUser) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *DSUser) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_DSUserbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_DSUserno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'U':
if bytes.Equal(ffj_key_DSUser_Username, kn) {
currentKey = ffj_t_DSUser_Username
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.EqualFoldRight(ffj_key_DSUser_Username, kn) {
currentKey = ffj_t_DSUser_Username
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_DSUserno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_DSUser_Username:
goto handle_Username
case ffj_t_DSUserno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Username:
/* handler: uj.Username type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.Username = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *LargePayload) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *LargePayload) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"Users":`)
if mj.Users != nil {
buf.WriteString(`[`)
for i, v := range mj.Users {
if i != 0 {
buf.WriteString(`,`)
}
{
if v == nil {
buf.WriteString("null")
return nil
}
err = v.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
}
buf.WriteString(`]`)
} else {
buf.WriteString(`null`)
}
if mj.Topics != nil {
buf.WriteString(`,"Topics":`)
{
err = mj.Topics.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
} else {
buf.WriteString(`,"Topics":null`)
}
buf.WriteByte('}')
return nil
}
const (
ffj_t_LargePayloadbase = iota
ffj_t_LargePayloadno_such_key
ffj_t_LargePayload_Users
ffj_t_LargePayload_Topics
)
var ffj_key_LargePayload_Users = []byte("Users")
var ffj_key_LargePayload_Topics = []byte("Topics")
func (uj *LargePayload) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *LargePayload) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_LargePayloadbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_LargePayloadno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'T':
if bytes.Equal(ffj_key_LargePayload_Topics, kn) {
currentKey = ffj_t_LargePayload_Topics
state = fflib.FFParse_want_colon
goto mainparse
}
case 'U':
if bytes.Equal(ffj_key_LargePayload_Users, kn) {
currentKey = ffj_t_LargePayload_Users
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.EqualFoldRight(ffj_key_LargePayload_Topics, kn) {
currentKey = ffj_t_LargePayload_Topics
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.EqualFoldRight(ffj_key_LargePayload_Users, kn) {
currentKey = ffj_t_LargePayload_Users
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_LargePayloadno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_LargePayload_Users:
goto handle_Users
case ffj_t_LargePayload_Topics:
goto handle_Topics
case ffj_t_LargePayloadno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Users:
/* handler: uj.Users type=[]*benchmark.DSUser kind=slice quoted=false*/
{
{
if tok != fflib.FFTok_left_brace && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok))
}
}
if tok == fflib.FFTok_null {
uj.Users = nil
} else {
uj.Users = make([]*DSUser, 0)
wantVal := true
for {
var tmp_uj__Users *DSUser
tok = fs.Scan()
if tok == fflib.FFTok_error {
goto tokerror
}
if tok == fflib.FFTok_right_brace {
break
}
if tok == fflib.FFTok_comma {
if wantVal == true {
// TODO(pquerna): this isn't an ideal error message, this handles
// things like [,,,] as an array value.
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
}
continue
} else {
wantVal = true
}
/* handler: tmp_uj__Users type=*benchmark.DSUser kind=ptr quoted=false*/
{
if tok == fflib.FFTok_null {
tmp_uj__Users = nil
state = fflib.FFParse_after_value
goto mainparse
}
if tmp_uj__Users == nil {
tmp_uj__Users = new(DSUser)
}
err = tmp_uj__Users.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
uj.Users = append(uj.Users, tmp_uj__Users)
wantVal = false
}
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Topics:
/* handler: uj.Topics type=benchmark.DSTopicsList kind=struct quoted=false*/
{
if tok == fflib.FFTok_null {
uj.Topics = nil
state = fflib.FFParse_after_value
goto mainparse
}
if uj.Topics == nil {
uj.Topics = new(DSTopicsList)
}
err = uj.Topics.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *MediumPayload) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *MediumPayload) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
if mj.Person != nil {
buf.WriteString(`{"Person":`)
{
err = mj.Person.MarshalJSONBuf(buf)
if err != nil {
return err
}
}
} else {
buf.WriteString(`{"Person":null`)
}
buf.WriteString(`,"Company":`)
/* Falling back. type=map[string]interface {} kind=map */
err = buf.Encode(mj.Company)
if err != nil {
return err
}
buf.WriteByte('}')
return nil
}
const (
ffj_t_MediumPayloadbase = iota
ffj_t_MediumPayloadno_such_key
ffj_t_MediumPayload_Person
ffj_t_MediumPayload_Company
)
var ffj_key_MediumPayload_Person = []byte("Person")
var ffj_key_MediumPayload_Company = []byte("Company")
func (uj *MediumPayload) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *MediumPayload) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_MediumPayloadbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_MediumPayloadno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'C':
if bytes.Equal(ffj_key_MediumPayload_Company, kn) {
currentKey = ffj_t_MediumPayload_Company
state = fflib.FFParse_want_colon
goto mainparse
}
case 'P':
if bytes.Equal(ffj_key_MediumPayload_Person, kn) {
currentKey = ffj_t_MediumPayload_Person
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.SimpleLetterEqualFold(ffj_key_MediumPayload_Company, kn) {
currentKey = ffj_t_MediumPayload_Company
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.EqualFoldRight(ffj_key_MediumPayload_Person, kn) {
currentKey = ffj_t_MediumPayload_Person
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_MediumPayloadno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_MediumPayload_Person:
goto handle_Person
case ffj_t_MediumPayload_Company:
goto handle_Company
case ffj_t_MediumPayloadno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_Person:
/* handler: uj.Person type=benchmark.CBPerson kind=struct quoted=false*/
{
if tok == fflib.FFTok_null {
uj.Person = nil
state = fflib.FFParse_after_value
goto mainparse
}
if uj.Person == nil {
uj.Person = new(CBPerson)
}
err = uj.Person.UnmarshalJSONFFLexer(fs, fflib.FFParse_want_key)
if err != nil {
return err
}
state = fflib.FFParse_after_value
}
state = fflib.FFParse_after_value
goto mainparse
handle_Company:
/* handler: uj.Company type=map[string]interface {} kind=map quoted=false*/
{
{
if tok != fflib.FFTok_left_bracket && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for ", tok))
}
}
if tok == fflib.FFTok_null {
uj.Company = nil
} else {
uj.Company = make(map[string]interface{}, 0)
wantVal := true
for {
var k string
var tmp_uj__Company interface{}
tok = fs.Scan()
if tok == fflib.FFTok_error {
goto tokerror
}
if tok == fflib.FFTok_right_bracket {
break
}
if tok == fflib.FFTok_comma {
if wantVal == true {
// TODO(pquerna): this isn't an ideal error message, this handles
// things like [,,,] as an array value.
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
}
continue
} else {
wantVal = true
}
/* handler: k type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
k = string(string(outBuf))
}
}
// Expect ':' after key
tok = fs.Scan()
if tok != fflib.FFTok_colon {
return fs.WrapErr(fmt.Errorf("wanted colon token, but got token: %v", tok))
}
tok = fs.Scan()
/* handler: tmp_uj__Company type=interface {} kind=interface quoted=false*/
{
/* Falling back. type=interface {} kind=interface */
tbuf, err := fs.CaptureField(tok)
if err != nil {
return fs.WrapErr(err)
}
err = json.Unmarshal(tbuf, &tmp_uj__Company)
if err != nil {
return fs.WrapErr(err)
}
}
uj.Company[k] = tmp_uj__Company
wantVal = false
}
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
func (mj *SmallPayload) MarshalJSON() ([]byte, error) {
var buf fflib.Buffer
if mj == nil {
buf.WriteString("null")
return buf.Bytes(), nil
}
err := mj.MarshalJSONBuf(&buf)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func (mj *SmallPayload) MarshalJSONBuf(buf fflib.EncodingBuffer) error {
if mj == nil {
buf.WriteString("null")
return nil
}
var err error
var obj []byte
_ = obj
_ = err
buf.WriteString(`{"St":`)
fflib.FormatBits2(buf, uint64(mj.St), 10, mj.St < 0)
buf.WriteString(`,"Sid":`)
fflib.FormatBits2(buf, uint64(mj.Sid), 10, mj.Sid < 0)
buf.WriteString(`,"Tt":`)
fflib.WriteJsonString(buf, string(mj.Tt))
buf.WriteString(`,"Gr":`)
fflib.FormatBits2(buf, uint64(mj.Gr), 10, mj.Gr < 0)
buf.WriteString(`,"Uuid":`)
fflib.WriteJsonString(buf, string(mj.Uuid))
buf.WriteString(`,"Ip":`)
fflib.WriteJsonString(buf, string(mj.Ip))
buf.WriteString(`,"Ua":`)
fflib.WriteJsonString(buf, string(mj.Ua))
buf.WriteString(`,"Tz":`)
fflib.FormatBits2(buf, uint64(mj.Tz), 10, mj.Tz < 0)
buf.WriteString(`,"V":`)
fflib.FormatBits2(buf, uint64(mj.V), 10, mj.V < 0)
buf.WriteByte('}')
return nil
}
const (
ffj_t_SmallPayloadbase = iota
ffj_t_SmallPayloadno_such_key
ffj_t_SmallPayload_St
ffj_t_SmallPayload_Sid
ffj_t_SmallPayload_Tt
ffj_t_SmallPayload_Gr
ffj_t_SmallPayload_Uuid
ffj_t_SmallPayload_Ip
ffj_t_SmallPayload_Ua
ffj_t_SmallPayload_Tz
ffj_t_SmallPayload_V
)
var ffj_key_SmallPayload_St = []byte("St")
var ffj_key_SmallPayload_Sid = []byte("Sid")
var ffj_key_SmallPayload_Tt = []byte("Tt")
var ffj_key_SmallPayload_Gr = []byte("Gr")
var ffj_key_SmallPayload_Uuid = []byte("Uuid")
var ffj_key_SmallPayload_Ip = []byte("Ip")
var ffj_key_SmallPayload_Ua = []byte("Ua")
var ffj_key_SmallPayload_Tz = []byte("Tz")
var ffj_key_SmallPayload_V = []byte("V")
func (uj *SmallPayload) UnmarshalJSON(input []byte) error {
fs := fflib.NewFFLexer(input)
return uj.UnmarshalJSONFFLexer(fs, fflib.FFParse_map_start)
}
func (uj *SmallPayload) UnmarshalJSONFFLexer(fs *fflib.FFLexer, state fflib.FFParseState) error {
var err error = nil
currentKey := ffj_t_SmallPayloadbase
_ = currentKey
tok := fflib.FFTok_init
wantedTok := fflib.FFTok_init
mainparse:
for {
tok = fs.Scan()
// println(fmt.Sprintf("debug: tok: %v state: %v", tok, state))
if tok == fflib.FFTok_error {
goto tokerror
}
switch state {
case fflib.FFParse_map_start:
if tok != fflib.FFTok_left_bracket {
wantedTok = fflib.FFTok_left_bracket
goto wrongtokenerror
}
state = fflib.FFParse_want_key
continue
case fflib.FFParse_after_value:
if tok == fflib.FFTok_comma {
state = fflib.FFParse_want_key
} else if tok == fflib.FFTok_right_bracket {
goto done
} else {
wantedTok = fflib.FFTok_comma
goto wrongtokenerror
}
case fflib.FFParse_want_key:
// json {} ended. goto exit. woo.
if tok == fflib.FFTok_right_bracket {
goto done
}
if tok != fflib.FFTok_string {
wantedTok = fflib.FFTok_string
goto wrongtokenerror
}
kn := fs.Output.Bytes()
if len(kn) <= 0 {
// "" case. hrm.
currentKey = ffj_t_SmallPayloadno_such_key
state = fflib.FFParse_want_colon
goto mainparse
} else {
switch kn[0] {
case 'G':
if bytes.Equal(ffj_key_SmallPayload_Gr, kn) {
currentKey = ffj_t_SmallPayload_Gr
state = fflib.FFParse_want_colon
goto mainparse
}
case 'I':
if bytes.Equal(ffj_key_SmallPayload_Ip, kn) {
currentKey = ffj_t_SmallPayload_Ip
state = fflib.FFParse_want_colon
goto mainparse
}
case 'S':
if bytes.Equal(ffj_key_SmallPayload_St, kn) {
currentKey = ffj_t_SmallPayload_St
state = fflib.FFParse_want_colon
goto mainparse
} else if bytes.Equal(ffj_key_SmallPayload_Sid, kn) {
currentKey = ffj_t_SmallPayload_Sid
state = fflib.FFParse_want_colon
goto mainparse
}
case 'T':
if bytes.Equal(ffj_key_SmallPayload_Tt, kn) {
currentKey = ffj_t_SmallPayload_Tt
state = fflib.FFParse_want_colon
goto mainparse
} else if bytes.Equal(ffj_key_SmallPayload_Tz, kn) {
currentKey = ffj_t_SmallPayload_Tz
state = fflib.FFParse_want_colon
goto mainparse
}
case 'U':
if bytes.Equal(ffj_key_SmallPayload_Uuid, kn) {
currentKey = ffj_t_SmallPayload_Uuid
state = fflib.FFParse_want_colon
goto mainparse
} else if bytes.Equal(ffj_key_SmallPayload_Ua, kn) {
currentKey = ffj_t_SmallPayload_Ua
state = fflib.FFParse_want_colon
goto mainparse
}
case 'V':
if bytes.Equal(ffj_key_SmallPayload_V, kn) {
currentKey = ffj_t_SmallPayload_V
state = fflib.FFParse_want_colon
goto mainparse
}
}
if fflib.SimpleLetterEqualFold(ffj_key_SmallPayload_V, kn) {
currentKey = ffj_t_SmallPayload_V
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_SmallPayload_Tz, kn) {
currentKey = ffj_t_SmallPayload_Tz
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_SmallPayload_Ua, kn) {
currentKey = ffj_t_SmallPayload_Ua
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_SmallPayload_Ip, kn) {
currentKey = ffj_t_SmallPayload_Ip
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_SmallPayload_Uuid, kn) {
currentKey = ffj_t_SmallPayload_Uuid
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_SmallPayload_Gr, kn) {
currentKey = ffj_t_SmallPayload_Gr
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.SimpleLetterEqualFold(ffj_key_SmallPayload_Tt, kn) {
currentKey = ffj_t_SmallPayload_Tt
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.EqualFoldRight(ffj_key_SmallPayload_Sid, kn) {
currentKey = ffj_t_SmallPayload_Sid
state = fflib.FFParse_want_colon
goto mainparse
}
if fflib.EqualFoldRight(ffj_key_SmallPayload_St, kn) {
currentKey = ffj_t_SmallPayload_St
state = fflib.FFParse_want_colon
goto mainparse
}
currentKey = ffj_t_SmallPayloadno_such_key
state = fflib.FFParse_want_colon
goto mainparse
}
case fflib.FFParse_want_colon:
if tok != fflib.FFTok_colon {
wantedTok = fflib.FFTok_colon
goto wrongtokenerror
}
state = fflib.FFParse_want_value
continue
case fflib.FFParse_want_value:
if tok == fflib.FFTok_left_brace || tok == fflib.FFTok_left_bracket || tok == fflib.FFTok_integer || tok == fflib.FFTok_double || tok == fflib.FFTok_string || tok == fflib.FFTok_bool || tok == fflib.FFTok_null {
switch currentKey {
case ffj_t_SmallPayload_St:
goto handle_St
case ffj_t_SmallPayload_Sid:
goto handle_Sid
case ffj_t_SmallPayload_Tt:
goto handle_Tt
case ffj_t_SmallPayload_Gr:
goto handle_Gr
case ffj_t_SmallPayload_Uuid:
goto handle_Uuid
case ffj_t_SmallPayload_Ip:
goto handle_Ip
case ffj_t_SmallPayload_Ua:
goto handle_Ua
case ffj_t_SmallPayload_Tz:
goto handle_Tz
case ffj_t_SmallPayload_V:
goto handle_V
case ffj_t_SmallPayloadno_such_key:
err = fs.SkipField(tok)
if err != nil {
return fs.WrapErr(err)
}
state = fflib.FFParse_after_value
goto mainparse
}
} else {
goto wantedvalue
}
}
}
handle_St:
/* handler: uj.St type=int kind=int quoted=false*/
{
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
}
}
{
if tok == fflib.FFTok_null {
} else {
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
if err != nil {
return fs.WrapErr(err)
}
uj.St = int(tval)
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Sid:
/* handler: uj.Sid type=int kind=int quoted=false*/
{
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
}
}
{
if tok == fflib.FFTok_null {
} else {
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
if err != nil {
return fs.WrapErr(err)
}
uj.Sid = int(tval)
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Tt:
/* handler: uj.Tt type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.Tt = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Gr:
/* handler: uj.Gr type=int kind=int quoted=false*/
{
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
}
}
{
if tok == fflib.FFTok_null {
} else {
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
if err != nil {
return fs.WrapErr(err)
}
uj.Gr = int(tval)
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Uuid:
/* handler: uj.Uuid type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.Uuid = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Ip:
/* handler: uj.Ip type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.Ip = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Ua:
/* handler: uj.Ua type=string kind=string quoted=false*/
{
{
if tok != fflib.FFTok_string && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for string", tok))
}
}
if tok == fflib.FFTok_null {
} else {
outBuf := fs.Output.Bytes()
uj.Ua = string(string(outBuf))
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_Tz:
/* handler: uj.Tz type=int kind=int quoted=false*/
{
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
}
}
{
if tok == fflib.FFTok_null {
} else {
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
if err != nil {
return fs.WrapErr(err)
}
uj.Tz = int(tval)
}
}
state = fflib.FFParse_after_value
goto mainparse
handle_V:
/* handler: uj.V type=int kind=int quoted=false*/
{
if tok != fflib.FFTok_integer && tok != fflib.FFTok_null {
return fs.WrapErr(fmt.Errorf("cannot unmarshal %s into Go value for int", tok))
}
}
{
if tok == fflib.FFTok_null {
} else {
tval, err := fflib.ParseInt(fs.Output.Bytes(), 10, 64)
if err != nil {
return fs.WrapErr(err)
}
uj.V = int(tval)
}
}
state = fflib.FFParse_after_value
goto mainparse
wantedvalue:
return fs.WrapErr(fmt.Errorf("wanted value token, but got token: %v", tok))
wrongtokenerror:
return fs.WrapErr(fmt.Errorf("ffjson: wanted token: %v, but got token: %v output=%s", wantedTok, tok, fs.Output.String()))
tokerror:
if fs.BigError != nil {
return fs.WrapErr(fs.BigError)
}
err = fs.Error.ToError()
if err != nil {
return fs.WrapErr(err)
}
panic("ffjson-generated: unreachable, please report bug.")
done:
return nil
}
jsonparser-1.1.1/benchmark/benchmark_large_payload_test.go 0000664 0000000 0000000 00000005633 13776023740 0024055 0 ustar 00root root 0000000 0000000 /*
Each test should process 24kb json record (based on Discourse API)
It should read 2 arrays, and for each item in array get few fields.
Basically it means processing full JSON file.
*/
package benchmark
import (
"github.com/buger/jsonparser"
"testing"
// "github.com/Jeffail/gabs"
// "github.com/bitly/go-simplejson"
"encoding/json"
"github.com/a8m/djson"
jlexer "github.com/mailru/easyjson/jlexer"
"github.com/pquerna/ffjson/ffjson"
// "github.com/antonholmquist/jason"
// "fmt"
)
/*
github.com/buger/jsonparser
*/
func BenchmarkJsonParserLarge(b *testing.B) {
for i := 0; i < b.N; i++ {
jsonparser.ArrayEach(largeFixture, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
jsonparser.Get(value, "username")
nothing()
}, "users")
jsonparser.ArrayEach(largeFixture, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
jsonparser.GetInt(value, "id")
jsonparser.Get(value, "slug")
nothing()
}, "topics", "topics")
}
}
/*
encoding/json
*/
func BenchmarkEncodingJsonStructLarge(b *testing.B) {
for i := 0; i < b.N; i++ {
var data LargePayload
json.Unmarshal(largeFixture, &data)
for _, u := range data.Users {
nothing(u.Username)
}
for _, t := range data.Topics.Topics {
nothing(t.Id, t.Slug)
}
}
}
func BenchmarkEncodingJsonInterfaceLarge(b *testing.B) {
for i := 0; i < b.N; i++ {
var data interface{}
json.Unmarshal(largeFixture, &data)
m := data.(map[string]interface{})
users := m["users"].([]interface{})
for _, u := range users {
nothing(u.(map[string]interface{})["username"].(string))
}
topics := m["topics"].(map[string]interface{})["topics"].([]interface{})
for _, t := range topics {
tI := t.(map[string]interface{})
nothing(tI["id"].(float64), tI["slug"].(string))
}
}
}
/*
github.com/pquerna/ffjson
*/
func BenchmarkFFJsonLarge(b *testing.B) {
for i := 0; i < b.N; i++ {
var data LargePayload
ffjson.Unmarshal(largeFixture, &data)
for _, u := range data.Users {
nothing(u.Username)
}
for _, t := range data.Topics.Topics {
nothing(t.Id, t.Slug)
}
}
}
/*
github.com/mailru/easyjson
*/
func BenchmarkEasyJsonLarge(b *testing.B) {
for i := 0; i < b.N; i++ {
lexer := &jlexer.Lexer{Data: largeFixture}
data := new(LargePayload)
data.UnmarshalEasyJSON(lexer)
for _, u := range data.Users {
nothing(u.Username)
}
for _, t := range data.Topics.Topics {
nothing(t.Id, t.Slug)
}
}
}
/*
github.com/a8m/djson
*/
func BenchmarkDjsonLarge(b *testing.B) {
for i := 0; i < b.N; i++ {
m, _ := djson.DecodeObject(largeFixture)
users := m["users"].([]interface{})
for _, u := range users {
nothing(u.(map[string]interface{})["username"].(string))
}
topics := m["topics"].(map[string]interface{})["topics"].([]interface{})
for _, t := range topics {
tI := t.(map[string]interface{})
nothing(tI["id"].(float64), tI["slug"].(string))
}
}
}
jsonparser-1.1.1/benchmark/benchmark_medium_payload_test.go 0000664 0000000 0000000 00000021524 13776023740 0024240 0 ustar 00root root 0000000 0000000 /*
Each test should process 2.4kb json record (based on Clearbit API)
It should read multiple nested fields and 1 array
*/
package benchmark
import (
"encoding/json"
"testing"
"github.com/Jeffail/gabs"
"github.com/a8m/djson"
"github.com/antonholmquist/jason"
"github.com/bitly/go-simplejson"
"github.com/buger/jsonparser"
jlexer "github.com/mailru/easyjson/jlexer"
"github.com/mreiferson/go-ujson"
"github.com/pquerna/ffjson/ffjson"
"github.com/ugorji/go/codec"
// "fmt"
"bytes"
"errors"
)
/*
github.com/buger/jsonparser
*/
func BenchmarkJsonParserMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
jsonparser.Get(mediumFixture, "person", "name", "fullName")
jsonparser.GetInt(mediumFixture, "person", "github", "followers")
jsonparser.Get(mediumFixture, "company")
jsonparser.ArrayEach(mediumFixture, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
jsonparser.Get(value, "url")
nothing()
}, "person", "gravatar", "avatars")
}
}
func BenchmarkJsonParserDeleteMedium(b *testing.B) {
fixture := make([]byte, 0, len(mediumFixture))
b.ResetTimer()
for i := 0; i < b.N; i++ {
fixture = append(fixture[:0], mediumFixture...)
fixture = jsonparser.Delete(fixture, "person", "name", "fullName")
fixture = jsonparser.Delete(fixture, "person", "github", "followers")
fixture = jsonparser.Delete(fixture, "company")
nothing()
}
}
func BenchmarkJsonParserEachKeyManualMedium(b *testing.B) {
paths := [][]string{
[]string{"person", "name", "fullName"},
[]string{"person", "github", "followers"},
[]string{"company"},
[]string{"person", "gravatar", "avatars"},
}
for i := 0; i < b.N; i++ {
jsonparser.EachKey(mediumFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error) {
switch idx {
case 0:
// jsonparser.ParseString(value)
case 1:
jsonparser.ParseInt(value)
case 2:
// jsonparser.ParseString(value)
case 3:
jsonparser.ArrayEach(value, func(avalue []byte, dataType jsonparser.ValueType, offset int, err error) {
jsonparser.Get(avalue, "url")
})
}
}, paths...)
}
}
func BenchmarkJsonParserEachKeyStructMedium(b *testing.B) {
paths := [][]string{
[]string{"person", "name", "fullName"},
[]string{"person", "github", "followers"},
[]string{"company"},
[]string{"person", "gravatar", "avatars"},
}
for i := 0; i < b.N; i++ {
data := MediumPayload{
Person: &CBPerson{
Name: &CBName{},
Github: &CBGithub{},
Gravatar: &CBGravatar{},
},
}
jsonparser.EachKey(mediumFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error) {
switch idx {
case 0:
data.Person.Name.FullName, _ = jsonparser.ParseString(value)
case 1:
v, _ := jsonparser.ParseInt(value)
data.Person.Github.Followers = int(v)
case 2:
json.Unmarshal(value, &data.Company) // we don't have a JSON -> map[string]interface{} function yet, so use standard encoding/json here
case 3:
var avatars []*CBAvatar
jsonparser.ArrayEach(value, func(avalue []byte, dataType jsonparser.ValueType, offset int, err error) {
url, _ := jsonparser.ParseString(avalue)
avatars = append(avatars, &CBAvatar{Url: url})
})
data.Person.Gravatar.Avatars = avatars
}
}, paths...)
}
}
func BenchmarkJsonParserObjectEachStructMedium(b *testing.B) {
nameKey, githubKey, gravatarKey := []byte("name"), []byte("github"), []byte("gravatar")
errStop := errors.New("stop")
for i := 0; i < b.N; i++ {
data := MediumPayload{
Person: &CBPerson{
Name: &CBName{},
Github: &CBGithub{},
Gravatar: &CBGravatar{},
},
}
missing := 3
jsonparser.ObjectEach(mediumFixture, func(k, v []byte, vt jsonparser.ValueType, o int) error {
switch {
case bytes.Equal(k, nameKey):
data.Person.Name.FullName, _ = jsonparser.GetString(v, "fullName")
missing--
case bytes.Equal(k, githubKey):
x, _ := jsonparser.GetInt(v, "followers")
data.Person.Github.Followers = int(x)
missing--
case bytes.Equal(k, gravatarKey):
var avatars []*CBAvatar
jsonparser.ArrayEach(v, func(avalue []byte, dataType jsonparser.ValueType, offset int, err error) {
url, _ := jsonparser.ParseString(avalue)
avatars = append(avatars, &CBAvatar{Url: url})
}, "avatars")
data.Person.Gravatar.Avatars = avatars
missing--
}
if missing == 0 {
return errStop
} else {
return nil
}
}, "person")
cv, _, _, _ := jsonparser.Get(mediumFixture, "company")
json.Unmarshal(cv, &data.Company)
}
}
/*
encoding/json
*/
func BenchmarkEncodingJsonStructMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
var data MediumPayload
json.Unmarshal(mediumFixture, &data)
nothing(data.Person.Name.FullName, data.Person.Github.Followers, data.Company)
for _, el := range data.Person.Gravatar.Avatars {
nothing(el.Url)
}
}
}
func BenchmarkEncodingJsonInterfaceMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
var data interface{}
json.Unmarshal(mediumFixture, &data)
m := data.(map[string]interface{})
person := m["person"].(map[string]interface{})
name := person["name"].(map[string]interface{})
github := person["github"].(map[string]interface{})
company := m["company"]
gravatar := person["gravatar"].(map[string]interface{})
avatars := gravatar["avatars"].([]interface{})
nothing(name["fullName"].(string), github["followers"].(float64), company)
for _, a := range avatars {
nothing(a.(map[string]interface{})["url"])
}
}
}
/*
github.com/Jeffail/gabs
*/
func BenchmarkGabsMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := gabs.ParseJSON(mediumFixture)
person := json.Path("person")
nothing(
person.Path("name.fullName").Data().(string),
person.Path("github.followers").Data().(float64),
)
json.Path("company").ChildrenMap()
arr, _ := person.Path("gravatar.avatars.url").Children()
for _, el := range arr {
nothing(el.String())
}
}
}
/*
github.com/bitly/go-simplejson
*/
func BenchmarkGoSimpleJsonMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := simplejson.NewJson(mediumFixture)
person := json.Get("person")
person.Get("name").Get("fullName").String()
person.Get("github").Get("followers").Float64()
json.Get("company")
arr, _ := person.Get("gravatar").Get("avatars").Array()
for _, el := range arr {
nothing(el.(map[string]interface{})["url"])
}
}
}
/*
github.com/pquerna/ffjson
*/
func BenchmarkFFJsonMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
var data MediumPayload
ffjson.Unmarshal(mediumFixture, &data)
nothing(data.Person.Name.FullName, data.Person.Github.Followers, data.Company)
for _, el := range data.Person.Gravatar.Avatars {
nothing(el.Url)
}
}
}
/*
github.com/bitly/go-simplejson
*/
func BenchmarkJasonMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := jason.NewObjectFromBytes(mediumFixture)
json.GetString("person.name.fullName")
json.GetFloat64("person.github.followers")
json.GetObject("company")
arr, _ := json.GetObjectArray("person.gravatar.avatars")
for _, el := range arr {
el.GetString("url")
}
nothing()
}
}
/*
github.com/mreiferson/go-ujson
*/
func BenchmarkUjsonMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := ujson.NewFromBytes(mediumFixture)
person := json.Get("person")
person.Get("name").Get("fullName").String()
person.Get("github").Get("followers").Float64()
json.Get("company").String()
arr := person.Get("gravatar").Get("avatars").Array()
for _, el := range arr {
el.Get("url").String()
}
nothing()
}
}
/*
github.com/a8m/djson
*/
func BenchmarkDjsonMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
m, _ := djson.DecodeObject(mediumFixture)
person := m["person"].(map[string]interface{})
name := person["name"].(map[string]interface{})
github := person["github"].(map[string]interface{})
company := m["company"]
gravatar := person["gravatar"].(map[string]interface{})
avatars := gravatar["avatars"].([]interface{})
nothing(name["fullName"].(string), github["followers"].(float64), company)
for _, a := range avatars {
nothing(a.(map[string]interface{})["url"])
}
}
}
/*
github.com/ugorji/go/codec
*/
func BenchmarkUgirjiMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
decoder := codec.NewDecoderBytes(mediumFixture, new(codec.JsonHandle))
data := new(MediumPayload)
json.Unmarshal(mediumFixture, &data)
data.CodecDecodeSelf(decoder)
nothing(data.Person.Name.FullName, data.Person.Github.Followers, data.Company)
for _, el := range data.Person.Gravatar.Avatars {
nothing(el.Url)
}
}
}
/*
github.com/mailru/easyjson
*/
func BenchmarkEasyJsonMedium(b *testing.B) {
for i := 0; i < b.N; i++ {
lexer := &jlexer.Lexer{Data: mediumFixture}
data := new(MediumPayload)
data.UnmarshalEasyJSON(lexer)
nothing(data.Person.Name.FullName, data.Person.Github.Followers, data.Company)
for _, el := range data.Person.Gravatar.Avatars {
nothing(el.Url)
}
}
}
jsonparser-1.1.1/benchmark/benchmark_set_test.go 0000664 0000000 0000000 00000000551 13776023740 0022037 0 ustar 00root root 0000000 0000000 package benchmark
import (
"github.com/buger/jsonparser"
"strconv"
"testing"
)
func BenchmarkSetLarge(b *testing.B) {
b.ReportAllocs()
keyPath := make([]string, 20000)
for i := range keyPath {
keyPath[i] = "keyPath" + strconv.Itoa(i)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
_, _ = jsonparser.Set(largeFixture, largeFixture, keyPath...)
}
}
jsonparser-1.1.1/benchmark/benchmark_small_payload_test.go 0000664 0000000 0000000 00000015116 13776023740 0024070 0 ustar 00root root 0000000 0000000 /*
Each test should process 190 byte http log like json record
It should read multiple fields
*/
package benchmark
import (
"encoding/json"
"testing"
"github.com/Jeffail/gabs"
"github.com/a8m/djson"
"github.com/antonholmquist/jason"
"github.com/bitly/go-simplejson"
"github.com/buger/jsonparser"
jlexer "github.com/mailru/easyjson/jlexer"
"github.com/mreiferson/go-ujson"
"github.com/pquerna/ffjson/ffjson"
"github.com/ugorji/go/codec"
// "fmt"
"bytes"
"errors"
)
// Just for emulating field access, so it will not throw "evaluated but not used"
func nothing(_ ...interface{}) {}
/*
github.com/buger/jsonparser
*/
func BenchmarkJsonParserSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
jsonparser.Get(smallFixture, "uuid")
jsonparser.GetInt(smallFixture, "tz")
jsonparser.Get(smallFixture, "ua")
jsonparser.GetInt(smallFixture, "st")
nothing()
}
}
func BenchmarkJsonParserEachKeyManualSmall(b *testing.B) {
paths := [][]string{
[]string{"uuid"},
[]string{"tz"},
[]string{"ua"},
[]string{"st"},
}
for i := 0; i < b.N; i++ {
jsonparser.EachKey(smallFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error) {
switch idx {
case 0:
// jsonparser.ParseString(value)
case 1:
jsonparser.ParseInt(value)
case 2:
// jsonparser.ParseString(value)
case 3:
jsonparser.ParseInt(value)
}
}, paths...)
}
}
func BenchmarkJsonParserEachKeyStructSmall(b *testing.B) {
paths := [][]string{
[]string{"uuid"},
[]string{"tz"},
[]string{"ua"},
[]string{"st"},
}
for i := 0; i < b.N; i++ {
var data SmallPayload
jsonparser.EachKey(smallFixture, func(idx int, value []byte, vt jsonparser.ValueType, err error) {
switch idx {
case 0:
data.Uuid, _ = jsonparser.ParseString(value)
case 1:
v, _ := jsonparser.ParseInt(value)
data.Tz = int(v)
case 2:
data.Ua, _ = jsonparser.ParseString(value)
case 3:
v, _ := jsonparser.ParseInt(value)
data.St = int(v)
}
}, paths...)
nothing(data.Uuid, data.Tz, data.Ua, data.St)
}
}
func BenchmarkJsonParserObjectEachStructSmall(b *testing.B) {
uuidKey, tzKey, uaKey, stKey := []byte("uuid"), []byte("tz"), []byte("ua"), []byte("st")
errStop := errors.New("stop")
for i := 0; i < b.N; i++ {
var data SmallPayload
missing := 4
jsonparser.ObjectEach(smallFixture, func(key, value []byte, vt jsonparser.ValueType, off int) error {
switch {
case bytes.Equal(key, uuidKey):
data.Uuid, _ = jsonparser.ParseString(value)
missing--
case bytes.Equal(key, tzKey):
v, _ := jsonparser.ParseInt(value)
data.Tz = int(v)
missing--
case bytes.Equal(key, uaKey):
data.Ua, _ = jsonparser.ParseString(value)
missing--
case bytes.Equal(key, stKey):
v, _ := jsonparser.ParseInt(value)
data.St = int(v)
missing--
}
if missing == 0 {
return errStop
} else {
return nil
}
})
nothing(data.Uuid, data.Tz, data.Ua, data.St)
}
}
func BenchmarkJsonParserSetSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
jsonparser.Set(smallFixture, []byte(`"c90927dd-1588-4fe7-a14f-8a8950cfcbd8"`), "uuid")
jsonparser.Set(smallFixture, []byte("-3"), "tz")
jsonparser.Set(smallFixture, []byte(`"server_agent"`), "ua")
jsonparser.Set(smallFixture, []byte("3"), "st")
nothing()
}
}
func BenchmarkJsonParserDelSmall(b *testing.B) {
fixture := make([]byte, 0, len(smallFixture))
b.ResetTimer()
for i := 0; i < b.N; i++ {
fixture = append(fixture[:0], smallFixture...)
fixture = jsonparser.Delete(fixture, "uuid")
fixture = jsonparser.Delete(fixture, "tz")
fixture = jsonparser.Delete(fixture, "ua")
fixture = jsonparser.Delete(fixture, "stt")
nothing()
}
}
/*
encoding/json
*/
func BenchmarkEncodingJsonStructSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
var data SmallPayload
json.Unmarshal(smallFixture, &data)
nothing(data.Uuid, data.Tz, data.Ua, data.St)
}
}
func BenchmarkEncodingJsonInterfaceSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
var data interface{}
json.Unmarshal(smallFixture, &data)
m := data.(map[string]interface{})
nothing(m["uuid"].(string), m["tz"].(float64), m["ua"].(string), m["st"].(float64))
}
}
/*
github.com/Jeffail/gabs
*/
func BenchmarkGabsSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := gabs.ParseJSON(smallFixture)
nothing(
json.Path("uuid").Data().(string),
json.Path("tz").Data().(float64),
json.Path("ua").Data().(string),
json.Path("st").Data().(float64),
)
}
}
/*
github.com/bitly/go-simplejson
*/
func BenchmarkGoSimplejsonSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := simplejson.NewJson(smallFixture)
json.Get("uuid").String()
json.Get("tz").Float64()
json.Get("ua").String()
json.Get("st").Float64()
nothing()
}
}
func BenchmarkGoSimplejsonSetSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := simplejson.NewJson(smallFixture)
json.SetPath([]string{"uuid"}, "c90927dd-1588-4fe7-a14f-8a8950cfcbd8")
json.SetPath([]string{"tz"}, -3)
json.SetPath([]string{"ua"}, "server_agent")
json.SetPath([]string{"st"}, 3)
nothing()
}
}
/*
github.com/pquerna/ffjson
*/
func BenchmarkFFJsonSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
var data SmallPayload
ffjson.Unmarshal(smallFixture, &data)
nothing(data.Uuid, data.Tz, data.Ua, data.St)
}
}
/*
github.com/bitly/go-simplejson
*/
func BenchmarkJasonSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := jason.NewObjectFromBytes(smallFixture)
json.GetString("uuid")
json.GetFloat64("tz")
json.GetString("ua")
json.GetFloat64("st")
nothing()
}
}
/*
github.com/mreiferson/go-ujson
*/
func BenchmarkUjsonSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
json, _ := ujson.NewFromBytes(smallFixture)
json.Get("uuid").String()
json.Get("tz").Float64()
json.Get("ua").String()
json.Get("st").Float64()
nothing()
}
}
/*
github.com/a8m/djson
*/
func BenchmarkDjsonSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
m, _ := djson.DecodeObject(smallFixture)
nothing(m["uuid"].(string), m["tz"].(float64), m["ua"].(string), m["st"].(float64))
}
}
/*
github.com/ugorji/go/codec
*/
func BenchmarkUgirjiSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
decoder := codec.NewDecoderBytes(smallFixture, new(codec.JsonHandle))
data := new(SmallPayload)
data.CodecDecodeSelf(decoder)
nothing(data.Uuid, data.Tz, data.Ua, data.St)
}
}
/*
github.com/mailru/easyjson
*/
func BenchmarkEasyJsonSmall(b *testing.B) {
for i := 0; i < b.N; i++ {
lexer := &jlexer.Lexer{Data: smallFixture}
data := new(SmallPayload)
data.UnmarshalEasyJSON(lexer)
nothing(data.Uuid, data.Tz, data.Ua, data.St)
}
}
jsonparser-1.1.1/benchmark/go.mod 0000664 0000000 0000000 00000001131 13776023740 0016745 0 ustar 00root root 0000000 0000000 module benchmarks
require (
github.com/Jeffail/gabs v1.2.0
github.com/a8m/djson v0.0.0-20170509170705-c02c5aef757f
github.com/antonholmquist/jason v1.0.0
github.com/bitly/go-simplejson v0.5.0
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23
github.com/kr/pretty v0.1.0 // indirect
github.com/mailru/easyjson v0.0.0-20190403194419-1ea4449da983
github.com/mreiferson/go-ujson v0.0.0-20160507014224-e88340868a14
github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7
github.com/ugorji/go v1.1.4
)
jsonparser-1.1.1/benchmark/go.sum 0000664 0000000 0000000 00000004606 13776023740 0017004 0 ustar 00root root 0000000 0000000 github.com/Jeffail/gabs v1.2.0 h1:uFhoIVTtsX7hV2RxNgWad8gMU+8OJdzFbOathJdhD3o=
github.com/Jeffail/gabs v1.2.0/go.mod h1:6xMvQMK4k33lb7GUUpaAPh6nKMmemQeg5d4gn7/bOXc=
github.com/a8m/djson v0.0.0-20170509170705-c02c5aef757f h1:su5fhWd5UCmmRQEFPQPalJ304Qtcgk9ZDDnKnvpsraU=
github.com/a8m/djson v0.0.0-20170509170705-c02c5aef757f/go.mod h1:w3s8fnedJo6LJQ7dUUf1OcetqgS1hGpIDjY5bBowg1Y=
github.com/antonholmquist/jason v1.0.0 h1:Ytg94Bcf1Bfi965K2q0s22mig/n4eGqEij/atENBhA0=
github.com/antonholmquist/jason v1.0.0/go.mod h1:+GxMEKI0Va2U8h3os6oiUAetHAlGMvxjdpAH/9uvUMA=
github.com/bitly/go-simplejson v0.5.0 h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=
github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23 h1:D21IyuvjDCshj1/qq+pCNd3VZOAEI9jy6Bi131YlXgI=
github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mailru/easyjson v0.0.0-20190403194419-1ea4449da983 h1:wL11wNW7dhKIcRCHSm4sHKPWz0tt4mwBsVodG7+Xyqg=
github.com/mailru/easyjson v0.0.0-20190403194419-1ea4449da983/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
github.com/mreiferson/go-ujson v0.0.0-20160507014224-e88340868a14 h1:OtnQzNv3lnjBbnE2rEz9vaoeWtiXL+U5IDUHO29YnJU=
github.com/mreiferson/go-ujson v0.0.0-20160507014224-e88340868a14/go.mod h1:pRizrH03mzcoHZVa3eK2eoMfq4COW0kGOqapG3/ewkE=
github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7 h1:gGBSHPOU7g8YjTbhwn+lvFm2VDEhhA+PwDIlstkgSxE=
github.com/pquerna/ffjson v0.0.0-20181028064349-e517b90714f7/go.mod h1:YARuvh7BUWHNhzDq2OM5tzR2RiCcN2D7sapiKyCel/M=
github.com/ugorji/go v1.1.4 h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
jsonparser-1.1.1/bytes.go 0000664 0000000 0000000 00000001536 13776023740 0015373 0 ustar 00root root 0000000 0000000 package jsonparser
import (
bio "bytes"
)
// minInt64 '-9223372036854775808' is the smallest representable number in int64
const minInt64 = `9223372036854775808`
// About 2x faster then strconv.ParseInt because it only supports base 10, which is enough for JSON
func parseInt(bytes []byte) (v int64, ok bool, overflow bool) {
if len(bytes) == 0 {
return 0, false, false
}
var neg bool = false
if bytes[0] == '-' {
neg = true
bytes = bytes[1:]
}
var b int64 = 0
for _, c := range bytes {
if c >= '0' && c <= '9' {
b = (10 * v) + int64(c-'0')
} else {
return 0, false, false
}
if overflow = (b < v); overflow {
break
}
v = b
}
if overflow {
if neg && bio.Equal(bytes, []byte(minInt64)) {
return b, true, false
}
return 0, false, true
}
if neg {
return -v, true, false
} else {
return v, true, false
}
}
jsonparser-1.1.1/bytes_safe.go 0000664 0000000 0000000 00000000737 13776023740 0016373 0 ustar 00root root 0000000 0000000 // +build appengine appenginevm
package jsonparser
import (
"strconv"
)
// See fastbytes_unsafe.go for explanation on why *[]byte is used (signatures must be consistent with those in that file)
func equalStr(b *[]byte, s string) bool {
return string(*b) == s
}
func parseFloat(b *[]byte) (float64, error) {
return strconv.ParseFloat(string(*b), 64)
}
func bytesToString(b *[]byte) string {
return string(*b)
}
func StringToBytes(s string) []byte {
return []byte(s)
}
jsonparser-1.1.1/bytes_test.go 0000664 0000000 0000000 00000005246 13776023740 0016434 0 ustar 00root root 0000000 0000000 package jsonparser
import (
"strconv"
"testing"
"unsafe"
)
type ParseIntTest struct {
in string
out int64
isErr bool
isOverflow bool
}
var parseIntTests = []ParseIntTest{
{
in: "0",
out: 0,
},
{
in: "1",
out: 1,
},
{
in: "-1",
out: -1,
},
{
in: "12345",
out: 12345,
},
{
in: "-12345",
out: -12345,
},
{
in: "9223372036854775807", // = math.MaxInt64
out: 9223372036854775807,
},
{
in: "-9223372036854775808", // = math.MinInt64
out: -9223372036854775808,
},
{
in: "-92233720368547758081",
out: 0,
isErr: true,
isOverflow: true,
},
{
in: "18446744073709551616", // = 2^64
out: 0,
isErr: true,
isOverflow: true,
},
{
in: "9223372036854775808", // = math.MaxInt64 - 1
out: 0,
isErr: true,
isOverflow: true,
},
{
in: "-9223372036854775809", // = math.MaxInt64 - 1
out: 0,
isErr: true,
isOverflow: true,
},
{
in: "",
isErr: true,
},
{
in: "abc",
isErr: true,
},
{
in: "12345x",
isErr: true,
},
{
in: "123e5",
isErr: true,
},
{
in: "9223372036854775807x",
isErr: true,
},
}
func TestBytesParseInt(t *testing.T) {
for _, test := range parseIntTests {
out, ok, overflow := parseInt([]byte(test.in))
if overflow != test.isOverflow {
t.Errorf("Test '%s' error return did not overflow expectation (obtained %t, expected %t)", test.in, overflow, test.isOverflow)
}
if ok != !test.isErr {
t.Errorf("Test '%s' error return did not match expectation (obtained %t, expected %t)", test.in, !ok, test.isErr)
} else if ok && out != test.out {
t.Errorf("Test '%s' did not return the expected value (obtained %d, expected %d)", test.in, out, test.out)
}
}
}
func BenchmarkParseInt(b *testing.B) {
bytes := []byte("123")
for i := 0; i < b.N; i++ {
parseInt(bytes)
}
}
// Alternative implementation using unsafe and delegating to strconv.ParseInt
func BenchmarkParseIntUnsafeSlower(b *testing.B) {
bytes := []byte("123")
for i := 0; i < b.N; i++ {
strconv.ParseInt(*(*string)(unsafe.Pointer(&bytes)), 10, 64)
}
}
// Old implementation that did not check for overflows.
func BenchmarkParseIntOverflows(b *testing.B) {
bytes := []byte("123")
for i := 0; i < b.N; i++ {
parseIntOverflows(bytes)
}
}
func parseIntOverflows(bytes []byte) (v int64, ok bool) {
if len(bytes) == 0 {
return 0, false
}
var neg bool = false
if bytes[0] == '-' {
neg = true
bytes = bytes[1:]
}
for _, c := range bytes {
if c >= '0' && c <= '9' {
v = (10 * v) + int64(c-'0')
} else {
return 0, false
}
}
if neg {
return -v, true
} else {
return v, true
}
}
jsonparser-1.1.1/bytes_unsafe.go 0000664 0000000 0000000 00000002350 13776023740 0016727 0 ustar 00root root 0000000 0000000 // +build !appengine,!appenginevm
package jsonparser
import (
"reflect"
"strconv"
"unsafe"
"runtime"
)
//
// The reason for using *[]byte rather than []byte in parameters is an optimization. As of Go 1.6,
// the compiler cannot perfectly inline the function when using a non-pointer slice. That is,
// the non-pointer []byte parameter version is slower than if its function body is manually
// inlined, whereas the pointer []byte version is equally fast to the manually inlined
// version. Instruction count in assembly taken from "go tool compile" confirms this difference.
//
// TODO: Remove hack after Go 1.7 release
//
func equalStr(b *[]byte, s string) bool {
return *(*string)(unsafe.Pointer(b)) == s
}
func parseFloat(b *[]byte) (float64, error) {
return strconv.ParseFloat(*(*string)(unsafe.Pointer(b)), 64)
}
// A hack until issue golang/go#2632 is fixed.
// See: https://github.com/golang/go/issues/2632
func bytesToString(b *[]byte) string {
return *(*string)(unsafe.Pointer(b))
}
func StringToBytes(s string) []byte {
b := make([]byte, 0, 0)
bh := (*reflect.SliceHeader)(unsafe.Pointer(&b))
sh := (*reflect.StringHeader)(unsafe.Pointer(&s))
bh.Data = sh.Data
bh.Cap = sh.Len
bh.Len = sh.Len
runtime.KeepAlive(s)
return b
}
jsonparser-1.1.1/bytes_unsafe_test.go 0000664 0000000 0000000 00000003314 13776023740 0017767 0 ustar 00root root 0000000 0000000 // +build !appengine,!appenginevm
package jsonparser
import (
"reflect"
"strings"
"testing"
"unsafe"
)
var (
// short string/[]byte sequences, as the difference between these
// three methods is a constant overhead
benchmarkString = "0123456789x"
benchmarkBytes = []byte("0123456789y")
)
func bytesEqualStrSafe(abytes []byte, bstr string) bool {
return bstr == string(abytes)
}
func bytesEqualStrUnsafeSlower(abytes *[]byte, bstr string) bool {
aslicehdr := (*reflect.SliceHeader)(unsafe.Pointer(abytes))
astrhdr := reflect.StringHeader{Data: aslicehdr.Data, Len: aslicehdr.Len}
return *(*string)(unsafe.Pointer(&astrhdr)) == bstr
}
func TestEqual(t *testing.T) {
if !equalStr(&[]byte{}, "") {
t.Errorf(`equalStr("", ""): expected true, obtained false`)
return
}
longstr := strings.Repeat("a", 1000)
for i := 0; i < len(longstr); i++ {
s1, s2 := longstr[:i]+"1", longstr[:i]+"2"
b1 := []byte(s1)
if !equalStr(&b1, s1) {
t.Errorf(`equalStr("a"*%d + "1", "a"*%d + "1"): expected true, obtained false`, i, i)
break
}
if equalStr(&b1, s2) {
t.Errorf(`equalStr("a"*%d + "1", "a"*%d + "2"): expected false, obtained true`, i, i)
break
}
}
}
func BenchmarkEqualStr(b *testing.B) {
for i := 0; i < b.N; i++ {
equalStr(&benchmarkBytes, benchmarkString)
}
}
// Alternative implementation without using unsafe
func BenchmarkBytesEqualStrSafe(b *testing.B) {
for i := 0; i < b.N; i++ {
bytesEqualStrSafe(benchmarkBytes, benchmarkString)
}
}
// Alternative implementation using unsafe, but that is slower than the current implementation
func BenchmarkBytesEqualStrUnsafeSlower(b *testing.B) {
for i := 0; i < b.N; i++ {
bytesEqualStrUnsafeSlower(&benchmarkBytes, benchmarkString)
}
}
jsonparser-1.1.1/escape.go 0000664 0000000 0000000 00000012174 13776023740 0015505 0 ustar 00root root 0000000 0000000 package jsonparser
import (
"bytes"
"unicode/utf8"
)
// JSON Unicode stuff: see https://tools.ietf.org/html/rfc7159#section-7
const supplementalPlanesOffset = 0x10000
const highSurrogateOffset = 0xD800
const lowSurrogateOffset = 0xDC00
const basicMultilingualPlaneReservedOffset = 0xDFFF
const basicMultilingualPlaneOffset = 0xFFFF
func combineUTF16Surrogates(high, low rune) rune {
return supplementalPlanesOffset + (high-highSurrogateOffset)<<10 + (low - lowSurrogateOffset)
}
const badHex = -1
func h2I(c byte) int {
switch {
case c >= '0' && c <= '9':
return int(c - '0')
case c >= 'A' && c <= 'F':
return int(c - 'A' + 10)
case c >= 'a' && c <= 'f':
return int(c - 'a' + 10)
}
return badHex
}
// decodeSingleUnicodeEscape decodes a single \uXXXX escape sequence. The prefix \u is assumed to be present and
// is not checked.
// In JSON, these escapes can either come alone or as part of "UTF16 surrogate pairs" that must be handled together.
// This function only handles one; decodeUnicodeEscape handles this more complex case.
func decodeSingleUnicodeEscape(in []byte) (rune, bool) {
// We need at least 6 characters total
if len(in) < 6 {
return utf8.RuneError, false
}
// Convert hex to decimal
h1, h2, h3, h4 := h2I(in[2]), h2I(in[3]), h2I(in[4]), h2I(in[5])
if h1 == badHex || h2 == badHex || h3 == badHex || h4 == badHex {
return utf8.RuneError, false
}
// Compose the hex digits
return rune(h1<<12 + h2<<8 + h3<<4 + h4), true
}
// isUTF16EncodedRune checks if a rune is in the range for non-BMP characters,
// which is used to describe UTF16 chars.
// Source: https://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane
func isUTF16EncodedRune(r rune) bool {
return highSurrogateOffset <= r && r <= basicMultilingualPlaneReservedOffset
}
func decodeUnicodeEscape(in []byte) (rune, int) {
if r, ok := decodeSingleUnicodeEscape(in); !ok {
// Invalid Unicode escape
return utf8.RuneError, -1
} else if r <= basicMultilingualPlaneOffset && !isUTF16EncodedRune(r) {
// Valid Unicode escape in Basic Multilingual Plane
return r, 6
} else if r2, ok := decodeSingleUnicodeEscape(in[6:]); !ok { // Note: previous decodeSingleUnicodeEscape success guarantees at least 6 bytes remain
// UTF16 "high surrogate" without manditory valid following Unicode escape for the "low surrogate"
return utf8.RuneError, -1
} else if r2 < lowSurrogateOffset {
// Invalid UTF16 "low surrogate"
return utf8.RuneError, -1
} else {
// Valid UTF16 surrogate pair
return combineUTF16Surrogates(r, r2), 12
}
}
// backslashCharEscapeTable: when '\X' is found for some byte X, it is to be replaced with backslashCharEscapeTable[X]
var backslashCharEscapeTable = [...]byte{
'"': '"',
'\\': '\\',
'/': '/',
'b': '\b',
'f': '\f',
'n': '\n',
'r': '\r',
't': '\t',
}
// unescapeToUTF8 unescapes the single escape sequence starting at 'in' into 'out' and returns
// how many characters were consumed from 'in' and emitted into 'out'.
// If a valid escape sequence does not appear as a prefix of 'in', (-1, -1) to signal the error.
func unescapeToUTF8(in, out []byte) (inLen int, outLen int) {
if len(in) < 2 || in[0] != '\\' {
// Invalid escape due to insufficient characters for any escape or no initial backslash
return -1, -1
}
// https://tools.ietf.org/html/rfc7159#section-7
switch e := in[1]; e {
case '"', '\\', '/', 'b', 'f', 'n', 'r', 't':
// Valid basic 2-character escapes (use lookup table)
out[0] = backslashCharEscapeTable[e]
return 2, 1
case 'u':
// Unicode escape
if r, inLen := decodeUnicodeEscape(in); inLen == -1 {
// Invalid Unicode escape
return -1, -1
} else {
// Valid Unicode escape; re-encode as UTF8
outLen := utf8.EncodeRune(out, r)
return inLen, outLen
}
}
return -1, -1
}
// unescape unescapes the string contained in 'in' and returns it as a slice.
// If 'in' contains no escaped characters:
// Returns 'in'.
// Else, if 'out' is of sufficient capacity (guaranteed if cap(out) >= len(in)):
// 'out' is used to build the unescaped string and is returned with no extra allocation
// Else:
// A new slice is allocated and returned.
func Unescape(in, out []byte) ([]byte, error) {
firstBackslash := bytes.IndexByte(in, '\\')
if firstBackslash == -1 {
return in, nil
}
// Get a buffer of sufficient size (allocate if needed)
if cap(out) < len(in) {
out = make([]byte, len(in))
} else {
out = out[0:len(in)]
}
// Copy the first sequence of unescaped bytes to the output and obtain a buffer pointer (subslice)
copy(out, in[:firstBackslash])
in = in[firstBackslash:]
buf := out[firstBackslash:]
for len(in) > 0 {
// Unescape the next escaped character
inLen, bufLen := unescapeToUTF8(in, buf)
if inLen == -1 {
return nil, MalformedStringEscapeError
}
in = in[inLen:]
buf = buf[bufLen:]
// Copy everything up until the next backslash
nextBackslash := bytes.IndexByte(in, '\\')
if nextBackslash == -1 {
copy(buf, in)
buf = buf[len(in):]
break
} else {
copy(buf, in[:nextBackslash])
buf = buf[nextBackslash:]
in = in[nextBackslash:]
}
}
// Trim the out buffer to the amount that was actually emitted
return out[:len(out)-len(buf)], nil
}
jsonparser-1.1.1/escape_test.go 0000664 0000000 0000000 00000014110 13776023740 0016534 0 ustar 00root root 0000000 0000000 package jsonparser
import (
"bytes"
"testing"
)
func TestH2I(t *testing.T) {
hexChars := []byte{'0', '9', 'A', 'F', 'a', 'f', 'x', '\000'}
hexValues := []int{0, 9, 10, 15, 10, 15, -1, -1}
for i, c := range hexChars {
if v := h2I(c); v != hexValues[i] {
t.Errorf("h2I('%c') returned wrong value (obtained %d, expected %d)", c, v, hexValues[i])
}
}
}
type escapedUnicodeRuneTest struct {
in string
isErr bool
out rune
len int
}
var commonUnicodeEscapeTests = []escapedUnicodeRuneTest{
{in: `\u0041`, out: 'A', len: 6},
{in: `\u0000`, out: 0, len: 6},
{in: `\u00b0`, out: '°', len: 6},
{in: `\u00B0`, out: '°', len: 6},
{in: `\x1234`, out: 0x1234, len: 6}, // These functions do not check the \u prefix
{in: ``, isErr: true},
{in: `\`, isErr: true},
{in: `\u`, isErr: true},
{in: `\u1`, isErr: true},
{in: `\u11`, isErr: true},
{in: `\u111`, isErr: true},
{in: `\u123X`, isErr: true},
}
var singleUnicodeEscapeTests = append([]escapedUnicodeRuneTest{
{in: `\uD83D`, out: 0xD83D, len: 6},
{in: `\uDE03`, out: 0xDE03, len: 6},
{in: `\uFFFF`, out: 0xFFFF, len: 6},
{in: `\uFF11`, out: '1', len: 6},
}, commonUnicodeEscapeTests...)
var multiUnicodeEscapeTests = append([]escapedUnicodeRuneTest{
{in: `\uD83D`, isErr: true},
{in: `\uDE03`, isErr: true},
{in: `\uFFFF`, out: '\uFFFF', len: 6},
{in: `\uFF11`, out: '1', len: 6},
{in: `\uD83D\uDE03`, out: '\U0001F603', len: 12},
{in: `\uD800\uDC00`, out: '\U00010000', len: 12},
{in: `\uD800\`, isErr: true},
{in: `\uD800\u`, isErr: true},
{in: `\uD800\uD`, isErr: true},
{in: `\uD800\uDC`, isErr: true},
{in: `\uD800\uDC0`, isErr: true},
{in: `\uD800\uDBFF`, isErr: true}, // invalid low surrogate
}, commonUnicodeEscapeTests...)
func TestDecodeSingleUnicodeEscape(t *testing.T) {
for _, test := range singleUnicodeEscapeTests {
r, ok := decodeSingleUnicodeEscape([]byte(test.in))
isErr := !ok
if isErr != test.isErr {
t.Errorf("decodeSingleUnicodeEscape(%s) returned isErr mismatch: expected %t, obtained %t", test.in, test.isErr, isErr)
} else if isErr {
continue
} else if r != test.out {
t.Errorf("decodeSingleUnicodeEscape(%s) returned rune mismatch: expected %x (%c), obtained %x (%c)", test.in, test.out, test.out, r, r)
}
}
}
func TestDecodeUnicodeEscape(t *testing.T) {
for _, test := range multiUnicodeEscapeTests {
r, len := decodeUnicodeEscape([]byte(test.in))
isErr := (len == -1)
if isErr != test.isErr {
t.Errorf("decodeUnicodeEscape(%s) returned isErr mismatch: expected %t, obtained %t", test.in, test.isErr, isErr)
} else if isErr {
continue
} else if len != test.len {
t.Errorf("decodeUnicodeEscape(%s) returned length mismatch: expected %d, obtained %d", test.in, test.len, len)
} else if r != test.out {
t.Errorf("decodeUnicodeEscape(%s) returned rune mismatch: expected %x (%c), obtained %x (%c)", test.in, test.out, test.out, r, r)
}
}
}
type unescapeTest struct {
in string // escaped string
out string // expected unescaped string
canAlloc bool // can unescape cause an allocation (depending on buffer size)? true iff 'in' contains escape sequence(s)
isErr bool // should this operation result in an error
}
var unescapeTests = []unescapeTest{
{in: ``, out: ``, canAlloc: false},
{in: `a`, out: `a`, canAlloc: false},
{in: `abcde`, out: `abcde`, canAlloc: false},
{in: `ab\\de`, out: `ab\de`, canAlloc: true},
{in: `ab\"de`, out: `ab"de`, canAlloc: true},
{in: `ab \u00B0 de`, out: `ab ° de`, canAlloc: true},
{in: `ab \uFF11 de`, out: `ab 1 de`, canAlloc: true},
{in: `\uFFFF`, out: "\uFFFF", canAlloc: true},
{in: `ab \uD83D\uDE03 de`, out: "ab \U0001F603 de", canAlloc: true},
{in: `\u0000\u0000\u0000\u0000\u0000`, out: "\u0000\u0000\u0000\u0000\u0000", canAlloc: true},
{in: `\u0000 \u0000 \u0000 \u0000 \u0000`, out: "\u0000 \u0000 \u0000 \u0000 \u0000", canAlloc: true},
{in: ` \u0000 \u0000 \u0000 \u0000 \u0000 `, out: " \u0000 \u0000 \u0000 \u0000 \u0000 ", canAlloc: true},
{in: `\uD800`, isErr: true},
{in: `abcde\`, isErr: true},
{in: `abcde\x`, isErr: true},
{in: `abcde\u`, isErr: true},
{in: `abcde\u1`, isErr: true},
{in: `abcde\u12`, isErr: true},
{in: `abcde\u123`, isErr: true},
{in: `abcde\uD800`, isErr: true},
{in: `ab\uD800de`, isErr: true},
{in: `\uD800abcde`, isErr: true},
}
// isSameMemory checks if two slices contain the same memory pointer (meaning one is a
// subslice of the other, with possibly differing lengths/capacities).
func isSameMemory(a, b []byte) bool {
if cap(a) == 0 || cap(b) == 0 {
return cap(a) == cap(b)
} else if a, b = a[:1], b[:1]; a[0] != b[0] {
return false
} else {
a[0]++
same := (a[0] == b[0])
a[0]--
return same
}
}
func TestUnescape(t *testing.T) {
for _, test := range unescapeTests {
type bufferTestCase struct {
buf []byte
isTooSmall bool
}
var bufs []bufferTestCase
if len(test.in) == 0 {
// If the input string is length 0, only a buffer of size 0 is a meaningful test
bufs = []bufferTestCase{{nil, false}}
} else {
// For non-empty input strings, we can try several buffer sizes (0, len-1, len)
bufs = []bufferTestCase{
{nil, true},
{make([]byte, 0, len(test.in)-1), true},
{make([]byte, 0, len(test.in)), false},
}
}
for _, buftest := range bufs {
in := []byte(test.in)
buf := buftest.buf
out, err := Unescape(in, buf)
isErr := (err != nil)
isAlloc := !isSameMemory(out, in) && !isSameMemory(out, buf)
if isErr != test.isErr {
t.Errorf("Unescape(`%s`, bufsize=%d) returned isErr mismatch: expected %t, obtained %t", test.in, cap(buf), test.isErr, isErr)
break
} else if isErr {
continue
} else if !bytes.Equal(out, []byte(test.out)) {
t.Errorf("Unescape(`%s`, bufsize=%d) returned unescaped mismatch: expected `%s` (%v, len %d), obtained `%s` (%v, len %d)", test.in, cap(buf), test.out, []byte(test.out), len(test.out), string(out), out, len(out))
break
} else if isAlloc != (test.canAlloc && buftest.isTooSmall) {
t.Errorf("Unescape(`%s`, bufsize=%d) returned isAlloc mismatch: expected %t, obtained %t", test.in, cap(buf), buftest.isTooSmall, isAlloc)
break
}
}
}
}
jsonparser-1.1.1/fuzz.go 0000664 0000000 0000000 00000003517 13776023740 0015244 0 ustar 00root root 0000000 0000000 package jsonparser
func FuzzParseString(data []byte) int {
r, err := ParseString(data)
if err != nil || r == "" {
return 0
}
return 1
}
func FuzzEachKey(data []byte) int {
paths := [][]string{
{"name"},
{"order"},
{"nested", "a"},
{"nested", "b"},
{"nested2", "a"},
{"nested", "nested3", "b"},
{"arr", "[1]", "b"},
{"arrInt", "[3]"},
{"arrInt", "[5]"},
{"nested"},
{"arr", "["},
{"a\n", "b\n"},
}
EachKey(data, func(idx int, value []byte, vt ValueType, err error) {}, paths...)
return 1
}
func FuzzDelete(data []byte) int {
Delete(data, "test")
return 1
}
func FuzzSet(data []byte) int {
_, err := Set(data, []byte(`"new value"`), "test")
if err != nil {
return 0
}
return 1
}
func FuzzObjectEach(data []byte) int {
_ = ObjectEach(data, func(key, value []byte, valueType ValueType, off int) error {
return nil
})
return 1
}
func FuzzParseFloat(data []byte) int {
_, err := ParseFloat(data)
if err != nil {
return 0
}
return 1
}
func FuzzParseInt(data []byte) int {
_, err := ParseInt(data)
if err != nil {
return 0
}
return 1
}
func FuzzParseBool(data []byte) int {
_, err := ParseBoolean(data)
if err != nil {
return 0
}
return 1
}
func FuzzTokenStart(data []byte) int {
_ = tokenStart(data)
return 1
}
func FuzzGetString(data []byte) int {
_, err := GetString(data, "test")
if err != nil {
return 0
}
return 1
}
func FuzzGetFloat(data []byte) int {
_, err := GetFloat(data, "test")
if err != nil {
return 0
}
return 1
}
func FuzzGetInt(data []byte) int {
_, err := GetInt(data, "test")
if err != nil {
return 0
}
return 1
}
func FuzzGetBoolean(data []byte) int {
_, err := GetBoolean(data, "test")
if err != nil {
return 0
}
return 1
}
func FuzzGetUnsafeString(data []byte) int {
_, err := GetUnsafeString(data, "test")
if err != nil {
return 0
}
return 1
}
jsonparser-1.1.1/go.mod 0000664 0000000 0000000 00000000055 13776023740 0015017 0 ustar 00root root 0000000 0000000 module github.com/buger/jsonparser
go 1.13
jsonparser-1.1.1/go.sum 0000664 0000000 0000000 00000000000 13776023740 0015032 0 ustar 00root root 0000000 0000000 jsonparser-1.1.1/oss-fuzz-build.sh 0000775 0000000 0000000 00000003456 13776023740 0017155 0 ustar 00root root 0000000 0000000 #!/bin/bash -eu
git clone https://github.com/dvyukov/go-fuzz-corpus
zip corpus.zip go-fuzz-corpus/json/corpus/*
cp corpus.zip $OUT/fuzzparsestring_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzParseString fuzzparsestring
cp corpus.zip $OUT/fuzzeachkey_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzEachKey fuzzeachkey
cp corpus.zip $OUT/fuzzdelete_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzDelete fuzzdelete
cp corpus.zip $OUT/fuzzset_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzSet fuzzset
cp corpus.zip $OUT/fuzzobjecteach_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzObjectEach fuzzobjecteach
cp corpus.zip $OUT/fuzzparsefloat_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzParseFloat fuzzparsefloat
cp corpus.zip $OUT/fuzzparseint_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzParseInt fuzzparseint
cp corpus.zip $OUT/fuzzparsebool_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzParseBool fuzzparsebool
cp corpus.zip $OUT/fuzztokenstart_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzTokenStart fuzztokenstart
cp corpus.zip $OUT/fuzzgetstring_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzGetString fuzzgetstring
cp corpus.zip $OUT/fuzzgetfloat_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzGetFloat fuzzgetfloat
cp corpus.zip $OUT/fuzzgetint_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzGetInt fuzzgetint
cp corpus.zip $OUT/fuzzgetboolean_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzGetBoolean fuzzgetboolean
cp corpus.zip $OUT/fuzzgetunsafestring_seed_corpus.zip
compile_go_fuzzer github.com/buger/jsonparser FuzzGetUnsafeString fuzzgetunsafestring
jsonparser-1.1.1/parser.go 0000664 0000000 0000000 00000072612 13776023740 0015544 0 ustar 00root root 0000000 0000000 package jsonparser
import (
"bytes"
"errors"
"fmt"
"strconv"
)
// Errors
var (
KeyPathNotFoundError = errors.New("Key path not found")
UnknownValueTypeError = errors.New("Unknown value type")
MalformedJsonError = errors.New("Malformed JSON error")
MalformedStringError = errors.New("Value is string, but can't find closing '\"' symbol")
MalformedArrayError = errors.New("Value is array, but can't find closing ']' symbol")
MalformedObjectError = errors.New("Value looks like object, but can't find closing '}' symbol")
MalformedValueError = errors.New("Value looks like Number/Boolean/None, but can't find its end: ',' or '}' symbol")
OverflowIntegerError = errors.New("Value is number, but overflowed while parsing")
MalformedStringEscapeError = errors.New("Encountered an invalid escape sequence in a string")
)
// How much stack space to allocate for unescaping JSON strings; if a string longer
// than this needs to be escaped, it will result in a heap allocation
const unescapeStackBufSize = 64
func tokenEnd(data []byte) int {
for i, c := range data {
switch c {
case ' ', '\n', '\r', '\t', ',', '}', ']':
return i
}
}
return len(data)
}
func findTokenStart(data []byte, token byte) int {
for i := len(data) - 1; i >= 0; i-- {
switch data[i] {
case token:
return i
case '[', '{':
return 0
}
}
return 0
}
func findKeyStart(data []byte, key string) (int, error) {
i := 0
ln := len(data)
if ln > 0 && (data[0] == '{' || data[0] == '[') {
i = 1
}
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
if ku, err := Unescape(StringToBytes(key), stackbuf[:]); err == nil {
key = bytesToString(&ku)
}
for i < ln {
switch data[i] {
case '"':
i++
keyBegin := i
strEnd, keyEscaped := stringEnd(data[i:])
if strEnd == -1 {
break
}
i += strEnd
keyEnd := i - 1
valueOffset := nextToken(data[i:])
if valueOffset == -1 {
break
}
i += valueOffset
// if string is a key, and key level match
k := data[keyBegin:keyEnd]
// for unescape: if there are no escape sequences, this is cheap; if there are, it is a
// bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize
if keyEscaped {
if ku, err := Unescape(k, stackbuf[:]); err != nil {
break
} else {
k = ku
}
}
if data[i] == ':' && len(key) == len(k) && bytesToString(&k) == key {
return keyBegin - 1, nil
}
case '[':
end := blockEnd(data[i:], data[i], ']')
if end != -1 {
i = i + end
}
case '{':
end := blockEnd(data[i:], data[i], '}')
if end != -1 {
i = i + end
}
}
i++
}
return -1, KeyPathNotFoundError
}
func tokenStart(data []byte) int {
for i := len(data) - 1; i >= 0; i-- {
switch data[i] {
case '\n', '\r', '\t', ',', '{', '[':
return i
}
}
return 0
}
// Find position of next character which is not whitespace
func nextToken(data []byte) int {
for i, c := range data {
switch c {
case ' ', '\n', '\r', '\t':
continue
default:
return i
}
}
return -1
}
// Find position of last character which is not whitespace
func lastToken(data []byte) int {
for i := len(data) - 1; i >= 0; i-- {
switch data[i] {
case ' ', '\n', '\r', '\t':
continue
default:
return i
}
}
return -1
}
// Tries to find the end of string
// Support if string contains escaped quote symbols.
func stringEnd(data []byte) (int, bool) {
escaped := false
for i, c := range data {
if c == '"' {
if !escaped {
return i + 1, false
} else {
j := i - 1
for {
if j < 0 || data[j] != '\\' {
return i + 1, true // even number of backslashes
}
j--
if j < 0 || data[j] != '\\' {
break // odd number of backslashes
}
j--
}
}
} else if c == '\\' {
escaped = true
}
}
return -1, escaped
}
// Find end of the data structure, array or object.
// For array openSym and closeSym will be '[' and ']', for object '{' and '}'
func blockEnd(data []byte, openSym byte, closeSym byte) int {
level := 0
i := 0
ln := len(data)
for i < ln {
switch data[i] {
case '"': // If inside string, skip it
se, _ := stringEnd(data[i+1:])
if se == -1 {
return -1
}
i += se
case openSym: // If open symbol, increase level
level++
case closeSym: // If close symbol, increase level
level--
// If we have returned to the original level, we're done
if level == 0 {
return i + 1
}
}
i++
}
return -1
}
func searchKeys(data []byte, keys ...string) int {
keyLevel := 0
level := 0
i := 0
ln := len(data)
lk := len(keys)
lastMatched := true
if lk == 0 {
return 0
}
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
for i < ln {
switch data[i] {
case '"':
i++
keyBegin := i
strEnd, keyEscaped := stringEnd(data[i:])
if strEnd == -1 {
return -1
}
i += strEnd
keyEnd := i - 1
valueOffset := nextToken(data[i:])
if valueOffset == -1 {
return -1
}
i += valueOffset
// if string is a key
if data[i] == ':' {
if level < 1 {
return -1
}
key := data[keyBegin:keyEnd]
// for unescape: if there are no escape sequences, this is cheap; if there are, it is a
// bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize
var keyUnesc []byte
if !keyEscaped {
keyUnesc = key
} else if ku, err := Unescape(key, stackbuf[:]); err != nil {
return -1
} else {
keyUnesc = ku
}
if level <= len(keys) {
if equalStr(&keyUnesc, keys[level-1]) {
lastMatched = true
// if key level match
if keyLevel == level-1 {
keyLevel++
// If we found all keys in path
if keyLevel == lk {
return i + 1
}
}
} else {
lastMatched = false
}
} else {
return -1
}
} else {
i--
}
case '{':
// in case parent key is matched then only we will increase the level otherwise can directly
// can move to the end of this block
if !lastMatched {
end := blockEnd(data[i:], '{', '}')
if end == -1 {
return -1
}
i += end - 1
} else {
level++
}
case '}':
level--
if level == keyLevel {
keyLevel--
}
case '[':
// If we want to get array element by index
if keyLevel == level && keys[level][0] == '[' {
var keyLen = len(keys[level])
if keyLen < 3 || keys[level][0] != '[' || keys[level][keyLen-1] != ']' {
return -1
}
aIdx, err := strconv.Atoi(keys[level][1 : keyLen-1])
if err != nil {
return -1
}
var curIdx int
var valueFound []byte
var valueOffset int
var curI = i
ArrayEach(data[i:], func(value []byte, dataType ValueType, offset int, err error) {
if curIdx == aIdx {
valueFound = value
valueOffset = offset
if dataType == String {
valueOffset = valueOffset - 2
valueFound = data[curI+valueOffset : curI+valueOffset+len(value)+2]
}
}
curIdx += 1
})
if valueFound == nil {
return -1
} else {
subIndex := searchKeys(valueFound, keys[level+1:]...)
if subIndex < 0 {
return -1
}
return i + valueOffset + subIndex
}
} else {
// Do not search for keys inside arrays
if arraySkip := blockEnd(data[i:], '[', ']'); arraySkip == -1 {
return -1
} else {
i += arraySkip - 1
}
}
case ':': // If encountered, JSON data is malformed
return -1
}
i++
}
return -1
}
func sameTree(p1, p2 []string) bool {
minLen := len(p1)
if len(p2) < minLen {
minLen = len(p2)
}
for pi_1, p_1 := range p1[:minLen] {
if p2[pi_1] != p_1 {
return false
}
}
return true
}
func EachKey(data []byte, cb func(int, []byte, ValueType, error), paths ...[]string) int {
var x struct{}
pathFlags := make([]bool, len(paths))
var level, pathsMatched, i int
ln := len(data)
var maxPath int
for _, p := range paths {
if len(p) > maxPath {
maxPath = len(p)
}
}
pathsBuf := make([]string, maxPath)
for i < ln {
switch data[i] {
case '"':
i++
keyBegin := i
strEnd, keyEscaped := stringEnd(data[i:])
if strEnd == -1 {
return -1
}
i += strEnd
keyEnd := i - 1
valueOffset := nextToken(data[i:])
if valueOffset == -1 {
return -1
}
i += valueOffset
// if string is a key, and key level match
if data[i] == ':' {
match := -1
key := data[keyBegin:keyEnd]
// for unescape: if there are no escape sequences, this is cheap; if there are, it is a
// bit more expensive, but causes no allocations unless len(key) > unescapeStackBufSize
var keyUnesc []byte
if !keyEscaped {
keyUnesc = key
} else {
var stackbuf [unescapeStackBufSize]byte
if ku, err := Unescape(key, stackbuf[:]); err != nil {
return -1
} else {
keyUnesc = ku
}
}
if maxPath >= level {
if level < 1 {
cb(-1, nil, Unknown, MalformedJsonError)
return -1
}
pathsBuf[level-1] = bytesToString(&keyUnesc)
for pi, p := range paths {
if len(p) != level || pathFlags[pi] || !equalStr(&keyUnesc, p[level-1]) || !sameTree(p, pathsBuf[:level]) {
continue
}
match = pi
pathsMatched++
pathFlags[pi] = true
v, dt, _, e := Get(data[i+1:])
cb(pi, v, dt, e)
if pathsMatched == len(paths) {
break
}
}
if pathsMatched == len(paths) {
return i
}
}
if match == -1 {
tokenOffset := nextToken(data[i+1:])
i += tokenOffset
if data[i] == '{' {
blockSkip := blockEnd(data[i:], '{', '}')
i += blockSkip + 1
}
}
if i < ln {
switch data[i] {
case '{', '}', '[', '"':
i--
}
}
} else {
i--
}
case '{':
level++
case '}':
level--
case '[':
var ok bool
arrIdxFlags := make(map[int]struct{})
pIdxFlags := make([]bool, len(paths))
if level < 0 {
cb(-1, nil, Unknown, MalformedJsonError)
return -1
}
for pi, p := range paths {
if len(p) < level+1 || pathFlags[pi] || p[level][0] != '[' || !sameTree(p, pathsBuf[:level]) {
continue
}
if len(p[level]) >= 2 {
aIdx, _ := strconv.Atoi(p[level][1 : len(p[level])-1])
arrIdxFlags[aIdx] = x
pIdxFlags[pi] = true
}
}
if len(arrIdxFlags) > 0 {
level++
var curIdx int
arrOff, _ := ArrayEach(data[i:], func(value []byte, dataType ValueType, offset int, err error) {
if _, ok = arrIdxFlags[curIdx]; ok {
for pi, p := range paths {
if pIdxFlags[pi] {
aIdx, _ := strconv.Atoi(p[level-1][1 : len(p[level-1])-1])
if curIdx == aIdx {
of := searchKeys(value, p[level:]...)
pathsMatched++
pathFlags[pi] = true
if of != -1 {
v, dt, _, e := Get(value[of:])
cb(pi, v, dt, e)
}
}
}
}
}
curIdx += 1
})
if pathsMatched == len(paths) {
return i
}
i += arrOff - 1
} else {
// Do not search for keys inside arrays
if arraySkip := blockEnd(data[i:], '[', ']'); arraySkip == -1 {
return -1
} else {
i += arraySkip - 1
}
}
case ']':
level--
}
i++
}
return -1
}
// Data types available in valid JSON data.
type ValueType int
const (
NotExist = ValueType(iota)
String
Number
Object
Array
Boolean
Null
Unknown
)
func (vt ValueType) String() string {
switch vt {
case NotExist:
return "non-existent"
case String:
return "string"
case Number:
return "number"
case Object:
return "object"
case Array:
return "array"
case Boolean:
return "boolean"
case Null:
return "null"
default:
return "unknown"
}
}
var (
trueLiteral = []byte("true")
falseLiteral = []byte("false")
nullLiteral = []byte("null")
)
func createInsertComponent(keys []string, setValue []byte, comma, object bool) []byte {
isIndex := string(keys[0][0]) == "["
offset := 0
lk := calcAllocateSpace(keys, setValue, comma, object)
buffer := make([]byte, lk, lk)
if comma {
offset += WriteToBuffer(buffer[offset:], ",")
}
if isIndex && !comma {
offset += WriteToBuffer(buffer[offset:], "[")
} else {
if object {
offset += WriteToBuffer(buffer[offset:], "{")
}
if !isIndex {
offset += WriteToBuffer(buffer[offset:], "\"")
offset += WriteToBuffer(buffer[offset:], keys[0])
offset += WriteToBuffer(buffer[offset:], "\":")
}
}
for i := 1; i < len(keys); i++ {
if string(keys[i][0]) == "[" {
offset += WriteToBuffer(buffer[offset:], "[")
} else {
offset += WriteToBuffer(buffer[offset:], "{\"")
offset += WriteToBuffer(buffer[offset:], keys[i])
offset += WriteToBuffer(buffer[offset:], "\":")
}
}
offset += WriteToBuffer(buffer[offset:], string(setValue))
for i := len(keys) - 1; i > 0; i-- {
if string(keys[i][0]) == "[" {
offset += WriteToBuffer(buffer[offset:], "]")
} else {
offset += WriteToBuffer(buffer[offset:], "}")
}
}
if isIndex && !comma {
offset += WriteToBuffer(buffer[offset:], "]")
}
if object && !isIndex {
offset += WriteToBuffer(buffer[offset:], "}")
}
return buffer
}
func calcAllocateSpace(keys []string, setValue []byte, comma, object bool) int {
isIndex := string(keys[0][0]) == "["
lk := 0
if comma {
// ,
lk += 1
}
if isIndex && !comma {
// []
lk += 2
} else {
if object {
// {
lk += 1
}
if !isIndex {
// "keys[0]"
lk += len(keys[0]) + 3
}
}
lk += len(setValue)
for i := 1; i < len(keys); i++ {
if string(keys[i][0]) == "[" {
// []
lk += 2
} else {
// {"keys[i]":setValue}
lk += len(keys[i]) + 5
}
}
if object && !isIndex {
// }
lk += 1
}
return lk
}
func WriteToBuffer(buffer []byte, str string) int {
copy(buffer, str)
return len(str)
}
/*
Del - Receives existing data structure, path to delete.
Returns:
`data` - return modified data
*/
func Delete(data []byte, keys ...string) []byte {
lk := len(keys)
if lk == 0 {
return data[:0]
}
array := false
if len(keys[lk-1]) > 0 && string(keys[lk-1][0]) == "[" {
array = true
}
var startOffset, keyOffset int
endOffset := len(data)
var err error
if !array {
if len(keys) > 1 {
_, _, startOffset, endOffset, err = internalGet(data, keys[:lk-1]...)
if err == KeyPathNotFoundError {
// problem parsing the data
return data
}
}
keyOffset, err = findKeyStart(data[startOffset:endOffset], keys[lk-1])
if err == KeyPathNotFoundError {
// problem parsing the data
return data
}
keyOffset += startOffset
_, _, _, subEndOffset, _ := internalGet(data[startOffset:endOffset], keys[lk-1])
endOffset = startOffset + subEndOffset
tokEnd := tokenEnd(data[endOffset:])
tokStart := findTokenStart(data[:keyOffset], ","[0])
if data[endOffset+tokEnd] == ","[0] {
endOffset += tokEnd + 1
} else if data[endOffset+tokEnd] == " "[0] && len(data) > endOffset+tokEnd+1 && data[endOffset+tokEnd+1] == ","[0] {
endOffset += tokEnd + 2
} else if data[endOffset+tokEnd] == "}"[0] && data[tokStart] == ","[0] {
keyOffset = tokStart
}
} else {
_, _, keyOffset, endOffset, err = internalGet(data, keys...)
if err == KeyPathNotFoundError {
// problem parsing the data
return data
}
tokEnd := tokenEnd(data[endOffset:])
tokStart := findTokenStart(data[:keyOffset], ","[0])
if data[endOffset+tokEnd] == ","[0] {
endOffset += tokEnd + 1
} else if data[endOffset+tokEnd] == "]"[0] && data[tokStart] == ","[0] {
keyOffset = tokStart
}
}
// We need to remove remaining trailing comma if we delete las element in the object
prevTok := lastToken(data[:keyOffset])
remainedValue := data[endOffset:]
var newOffset int
if nextToken(remainedValue) > -1 && remainedValue[nextToken(remainedValue)] == '}' && data[prevTok] == ',' {
newOffset = prevTok
} else {
newOffset = prevTok + 1
}
// We have to make a copy here if we don't want to mangle the original data, because byte slices are
// accessed by reference and not by value
dataCopy := make([]byte, len(data))
copy(dataCopy, data)
data = append(dataCopy[:newOffset], dataCopy[endOffset:]...)
return data
}
/*
Set - Receives existing data structure, path to set, and data to set at that key.
Returns:
`value` - modified byte array
`err` - On any parsing error
*/
func Set(data []byte, setValue []byte, keys ...string) (value []byte, err error) {
// ensure keys are set
if len(keys) == 0 {
return nil, KeyPathNotFoundError
}
_, _, startOffset, endOffset, err := internalGet(data, keys...)
if err != nil {
if err != KeyPathNotFoundError {
// problem parsing the data
return nil, err
}
// full path doesnt exist
// does any subpath exist?
var depth int
for i := range keys {
_, _, start, end, sErr := internalGet(data, keys[:i+1]...)
if sErr != nil {
break
} else {
endOffset = end
startOffset = start
depth++
}
}
comma := true
object := false
if endOffset == -1 {
firstToken := nextToken(data)
// We can't set a top-level key if data isn't an object
if firstToken < 0 || data[firstToken] != '{' {
return nil, KeyPathNotFoundError
}
// Don't need a comma if the input is an empty object
secondToken := firstToken + 1 + nextToken(data[firstToken+1:])
if data[secondToken] == '}' {
comma = false
}
// Set the top level key at the end (accounting for any trailing whitespace)
// This assumes last token is valid like '}', could check and return error
endOffset = lastToken(data)
}
depthOffset := endOffset
if depth != 0 {
// if subpath is a non-empty object, add to it
// or if subpath is a non-empty array, add to it
if (data[startOffset] == '{' && data[startOffset+1+nextToken(data[startOffset+1:])] != '}') ||
(data[startOffset] == '[' && data[startOffset+1+nextToken(data[startOffset+1:])] == '{') && keys[depth:][0][0] == 91 {
depthOffset--
startOffset = depthOffset
// otherwise, over-write it with a new object
} else {
comma = false
object = true
}
} else {
startOffset = depthOffset
}
value = append(data[:startOffset], append(createInsertComponent(keys[depth:], setValue, comma, object), data[depthOffset:]...)...)
} else {
// path currently exists
startComponent := data[:startOffset]
endComponent := data[endOffset:]
value = make([]byte, len(startComponent)+len(endComponent)+len(setValue))
newEndOffset := startOffset + len(setValue)
copy(value[0:startOffset], startComponent)
copy(value[startOffset:newEndOffset], setValue)
copy(value[newEndOffset:], endComponent)
}
return value, nil
}
func getType(data []byte, offset int) ([]byte, ValueType, int, error) {
var dataType ValueType
endOffset := offset
// if string value
if data[offset] == '"' {
dataType = String
if idx, _ := stringEnd(data[offset+1:]); idx != -1 {
endOffset += idx + 1
} else {
return nil, dataType, offset, MalformedStringError
}
} else if data[offset] == '[' { // if array value
dataType = Array
// break label, for stopping nested loops
endOffset = blockEnd(data[offset:], '[', ']')
if endOffset == -1 {
return nil, dataType, offset, MalformedArrayError
}
endOffset += offset
} else if data[offset] == '{' { // if object value
dataType = Object
// break label, for stopping nested loops
endOffset = blockEnd(data[offset:], '{', '}')
if endOffset == -1 {
return nil, dataType, offset, MalformedObjectError
}
endOffset += offset
} else {
// Number, Boolean or None
end := tokenEnd(data[endOffset:])
if end == -1 {
return nil, dataType, offset, MalformedValueError
}
value := data[offset : endOffset+end]
switch data[offset] {
case 't', 'f': // true or false
if bytes.Equal(value, trueLiteral) || bytes.Equal(value, falseLiteral) {
dataType = Boolean
} else {
return nil, Unknown, offset, UnknownValueTypeError
}
case 'u', 'n': // undefined or null
if bytes.Equal(value, nullLiteral) {
dataType = Null
} else {
return nil, Unknown, offset, UnknownValueTypeError
}
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-':
dataType = Number
default:
return nil, Unknown, offset, UnknownValueTypeError
}
endOffset += end
}
return data[offset:endOffset], dataType, endOffset, nil
}
/*
Get - Receives data structure, and key path to extract value from.
Returns:
`value` - Pointer to original data structure containing key value, or just empty slice if nothing found or error
`dataType` - Can be: `NotExist`, `String`, `Number`, `Object`, `Array`, `Boolean` or `Null`
`offset` - Offset from provided data structure where key value ends. Used mostly internally, for example for `ArrayEach` helper.
`err` - If key not found or any other parsing issue it should return error. If key not found it also sets `dataType` to `NotExist`
Accept multiple keys to specify path to JSON value (in case of quering nested structures).
If no keys provided it will try to extract closest JSON value (simple ones or object/array), useful for reading streams or arrays, see `ArrayEach` implementation.
*/
func Get(data []byte, keys ...string) (value []byte, dataType ValueType, offset int, err error) {
a, b, _, d, e := internalGet(data, keys...)
return a, b, d, e
}
func internalGet(data []byte, keys ...string) (value []byte, dataType ValueType, offset, endOffset int, err error) {
if len(keys) > 0 {
if offset = searchKeys(data, keys...); offset == -1 {
return nil, NotExist, -1, -1, KeyPathNotFoundError
}
}
// Go to closest value
nO := nextToken(data[offset:])
if nO == -1 {
return nil, NotExist, offset, -1, MalformedJsonError
}
offset += nO
value, dataType, endOffset, err = getType(data, offset)
if err != nil {
return value, dataType, offset, endOffset, err
}
// Strip quotes from string values
if dataType == String {
value = value[1 : len(value)-1]
}
return value[:len(value):len(value)], dataType, offset, endOffset, nil
}
// ArrayEach is used when iterating arrays, accepts a callback function with the same return arguments as `Get`.
func ArrayEach(data []byte, cb func(value []byte, dataType ValueType, offset int, err error), keys ...string) (offset int, err error) {
if len(data) == 0 {
return -1, MalformedObjectError
}
nT := nextToken(data)
if nT == -1 {
return -1, MalformedJsonError
}
offset = nT + 1
if len(keys) > 0 {
if offset = searchKeys(data, keys...); offset == -1 {
return offset, KeyPathNotFoundError
}
// Go to closest value
nO := nextToken(data[offset:])
if nO == -1 {
return offset, MalformedJsonError
}
offset += nO
if data[offset] != '[' {
return offset, MalformedArrayError
}
offset++
}
nO := nextToken(data[offset:])
if nO == -1 {
return offset, MalformedJsonError
}
offset += nO
if data[offset] == ']' {
return offset, nil
}
for true {
v, t, o, e := Get(data[offset:])
if e != nil {
return offset, e
}
if o == 0 {
break
}
if t != NotExist {
cb(v, t, offset+o-len(v), e)
}
if e != nil {
break
}
offset += o
skipToToken := nextToken(data[offset:])
if skipToToken == -1 {
return offset, MalformedArrayError
}
offset += skipToToken
if data[offset] == ']' {
break
}
if data[offset] != ',' {
return offset, MalformedArrayError
}
offset++
}
return offset, nil
}
// ObjectEach iterates over the key-value pairs of a JSON object, invoking a given callback for each such entry
func ObjectEach(data []byte, callback func(key []byte, value []byte, dataType ValueType, offset int) error, keys ...string) (err error) {
offset := 0
// Descend to the desired key, if requested
if len(keys) > 0 {
if off := searchKeys(data, keys...); off == -1 {
return KeyPathNotFoundError
} else {
offset = off
}
}
// Validate and skip past opening brace
if off := nextToken(data[offset:]); off == -1 {
return MalformedObjectError
} else if offset += off; data[offset] != '{' {
return MalformedObjectError
} else {
offset++
}
// Skip to the first token inside the object, or stop if we find the ending brace
if off := nextToken(data[offset:]); off == -1 {
return MalformedJsonError
} else if offset += off; data[offset] == '}' {
return nil
}
// Loop pre-condition: data[offset] points to what should be either the next entry's key, or the closing brace (if it's anything else, the JSON is malformed)
for offset < len(data) {
// Step 1: find the next key
var key []byte
// Check what the the next token is: start of string, end of object, or something else (error)
switch data[offset] {
case '"':
offset++ // accept as string and skip opening quote
case '}':
return nil // we found the end of the object; stop and return success
default:
return MalformedObjectError
}
// Find the end of the key string
var keyEscaped bool
if off, esc := stringEnd(data[offset:]); off == -1 {
return MalformedJsonError
} else {
key, keyEscaped = data[offset:offset+off-1], esc
offset += off
}
// Unescape the string if needed
if keyEscaped {
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
if keyUnescaped, err := Unescape(key, stackbuf[:]); err != nil {
return MalformedStringEscapeError
} else {
key = keyUnescaped
}
}
// Step 2: skip the colon
if off := nextToken(data[offset:]); off == -1 {
return MalformedJsonError
} else if offset += off; data[offset] != ':' {
return MalformedJsonError
} else {
offset++
}
// Step 3: find the associated value, then invoke the callback
if value, valueType, off, err := Get(data[offset:]); err != nil {
return err
} else if err := callback(key, value, valueType, offset+off); err != nil { // Invoke the callback here!
return err
} else {
offset += off
}
// Step 4: skip over the next comma to the following token, or stop if we hit the ending brace
if off := nextToken(data[offset:]); off == -1 {
return MalformedArrayError
} else {
offset += off
switch data[offset] {
case '}':
return nil // Stop if we hit the close brace
case ',':
offset++ // Ignore the comma
default:
return MalformedObjectError
}
}
// Skip to the next token after the comma
if off := nextToken(data[offset:]); off == -1 {
return MalformedArrayError
} else {
offset += off
}
}
return MalformedObjectError // we shouldn't get here; it's expected that we will return via finding the ending brace
}
// GetUnsafeString returns the value retrieved by `Get`, use creates string without memory allocation by mapping string to slice memory. It does not handle escape symbols.
func GetUnsafeString(data []byte, keys ...string) (val string, err error) {
v, _, _, e := Get(data, keys...)
if e != nil {
return "", e
}
return bytesToString(&v), nil
}
// GetString returns the value retrieved by `Get`, cast to a string if possible, trying to properly handle escape and utf8 symbols
// If key data type do not match, it will return an error.
func GetString(data []byte, keys ...string) (val string, err error) {
v, t, _, e := Get(data, keys...)
if e != nil {
return "", e
}
if t != String {
return "", fmt.Errorf("Value is not a string: %s", string(v))
}
// If no escapes return raw content
if bytes.IndexByte(v, '\\') == -1 {
return string(v), nil
}
return ParseString(v)
}
// GetFloat returns the value retrieved by `Get`, cast to a float64 if possible.
// The offset is the same as in `Get`.
// If key data type do not match, it will return an error.
func GetFloat(data []byte, keys ...string) (val float64, err error) {
v, t, _, e := Get(data, keys...)
if e != nil {
return 0, e
}
if t != Number {
return 0, fmt.Errorf("Value is not a number: %s", string(v))
}
return ParseFloat(v)
}
// GetInt returns the value retrieved by `Get`, cast to a int64 if possible.
// If key data type do not match, it will return an error.
func GetInt(data []byte, keys ...string) (val int64, err error) {
v, t, _, e := Get(data, keys...)
if e != nil {
return 0, e
}
if t != Number {
return 0, fmt.Errorf("Value is not a number: %s", string(v))
}
return ParseInt(v)
}
// GetBoolean returns the value retrieved by `Get`, cast to a bool if possible.
// The offset is the same as in `Get`.
// If key data type do not match, it will return error.
func GetBoolean(data []byte, keys ...string) (val bool, err error) {
v, t, _, e := Get(data, keys...)
if e != nil {
return false, e
}
if t != Boolean {
return false, fmt.Errorf("Value is not a boolean: %s", string(v))
}
return ParseBoolean(v)
}
// ParseBoolean parses a Boolean ValueType into a Go bool (not particularly useful, but here for completeness)
func ParseBoolean(b []byte) (bool, error) {
switch {
case bytes.Equal(b, trueLiteral):
return true, nil
case bytes.Equal(b, falseLiteral):
return false, nil
default:
return false, MalformedValueError
}
}
// ParseString parses a String ValueType into a Go string (the main parsing work is unescaping the JSON string)
func ParseString(b []byte) (string, error) {
var stackbuf [unescapeStackBufSize]byte // stack-allocated array for allocation-free unescaping of small strings
if bU, err := Unescape(b, stackbuf[:]); err != nil {
return "", MalformedValueError
} else {
return string(bU), nil
}
}
// ParseNumber parses a Number ValueType into a Go float64
func ParseFloat(b []byte) (float64, error) {
if v, err := parseFloat(&b); err != nil {
return 0, MalformedValueError
} else {
return v, nil
}
}
// ParseInt parses a Number ValueType into a Go int64
func ParseInt(b []byte) (int64, error) {
if v, ok, overflow := parseInt(b); !ok {
if overflow {
return 0, OverflowIntegerError
}
return 0, MalformedValueError
} else {
return v, nil
}
}
jsonparser-1.1.1/parser_error_test.go 0000664 0000000 0000000 00000007672 13776023740 0020020 0 ustar 00root root 0000000 0000000 package jsonparser
import (
"fmt"
"strings"
"testing"
)
var testPaths = [][]string{
[]string{"test"},
[]string{"these"},
[]string{"keys"},
[]string{"please"},
}
func testIter(data []byte) (err error) {
EachKey(data, func(idx int, value []byte, vt ValueType, iterErr error) {
if iterErr != nil {
err = fmt.Errorf("Error parsing json: %s", iterErr.Error())
}
}, testPaths...)
return err
}
func TestPanickingErrors(t *testing.T) {
if err := testIter([]byte(`{"test":`)); err == nil {
t.Error("Expected error...")
}
if err := testIter([]byte(`{"test":0}some":[{"these":[{"keys":"some"}]}]}some"}]}],"please":"some"}`)); err == nil {
t.Error("Expected error...")
}
if _, _, _, err := Get([]byte(`{"test":`), "test"); err == nil {
t.Error("Expected error...")
}
if _, _, _, err := Get([]byte(`{"some":0}some":[{"some":[{"some":"some"}]}]}some"}]}],"some":"some"}`), "x"); err == nil {
t.Error("Expected error...")
}
}
// check having a very deep key depth
func TestKeyDepth(t *testing.T) {
var sb strings.Builder
var keys []string
//build data
sb.WriteString("{")
for i := 0; i < 128; i++ {
fmt.Fprintf(&sb, `"key%d": %dx,`, i, i)
keys = append(keys, fmt.Sprintf("key%d", i))
}
sb.WriteString("}")
data := []byte(sb.String())
EachKey(data, func(offset int, value []byte, dt ValueType, err error) {
return
}, keys)
}
// check having a bunch of keys in a call to EachKey
func TestKeyCount(t *testing.T) {
var sb strings.Builder
var keys [][]string
//build data
sb.WriteString("{")
for i := 0; i < 128; i++ {
fmt.Fprintf(&sb, `"key%d":"%d"`, i, i)
if i < 127 {
sb.WriteString(",")
}
keys = append(keys, []string{fmt.Sprintf("key%d", i)})
}
sb.WriteString("}")
data := []byte(sb.String())
EachKey(data, func(offset int, value []byte, dt ValueType, err error) {
return
}, keys...)
}
// try pulling lots of keys out of a big array
func TestKeyDepthArray(t *testing.T) {
var sb strings.Builder
var keys []string
//build data
sb.WriteString("[")
for i := 0; i < 128; i++ {
fmt.Fprintf(&sb, `{"key": %d},`, i)
keys = append(keys, fmt.Sprintf("[%d].key", i))
}
sb.WriteString("]")
data := []byte(sb.String())
EachKey(data, func(offset int, value []byte, dt ValueType, err error) {
return
}, keys)
}
// check having a bunch of keys
func TestKeyCountArray(t *testing.T) {
var sb strings.Builder
var keys [][]string
//build data
sb.WriteString("[")
for i := 0; i < 128; i++ {
fmt.Fprintf(&sb, `{"key":"%d"}`, i)
if i < 127 {
sb.WriteString(",")
}
keys = append(keys, []string{fmt.Sprintf("[%d].key", i)})
}
sb.WriteString("]")
data := []byte(sb.String())
EachKey(data, func(offset int, value []byte, dt ValueType, err error) {
return
}, keys...)
}
// check having a bunch of keys in a super deep array
func TestEachKeyArray(t *testing.T) {
var sb strings.Builder
var keys [][]string
//build data
sb.WriteString(`[`)
for i := 0; i < 127; i++ {
fmt.Fprintf(&sb, `%d`, i)
if i < 127 {
sb.WriteString(",")
}
if i < 32 {
keys = append(keys, []string{fmt.Sprintf("[%d]", 128+i)})
}
}
sb.WriteString(`]`)
data := []byte(sb.String())
EachKey(data, func(offset int, value []byte, dt ValueType, err error) {
return
}, keys...)
}
func TestLargeArray(t *testing.T) {
var sb strings.Builder
//build data
sb.WriteString(`[`)
for i := 0; i < 127; i++ {
fmt.Fprintf(&sb, `%d`, i)
if i < 127 {
sb.WriteString(",")
}
}
sb.WriteString(`]`)
keys := [][]string{[]string{`[1]`}}
data := []byte(sb.String())
EachKey(data, func(offset int, value []byte, dt ValueType, err error) {
return
}, keys...)
}
func TestArrayOutOfBounds(t *testing.T) {
var sb strings.Builder
//build data
sb.WriteString(`[`)
for i := 0; i < 61; i++ {
fmt.Fprintf(&sb, `%d`, i)
if i < 61 {
sb.WriteString(",")
}
}
sb.WriteString(`]`)
keys := [][]string{[]string{`[128]`}}
data := []byte(sb.String())
EachKey(data, func(offset int, value []byte, dt ValueType, err error) {
return
}, keys...)
}
jsonparser-1.1.1/parser_test.go 0000664 0000000 0000000 00000137161 13776023740 0016604 0 ustar 00root root 0000000 0000000 package jsonparser
import (
"bytes"
"fmt"
_ "fmt"
"reflect"
"testing"
)
// Set it to non-empty value if want to run only specific test
var activeTest = ""
func toArray(data []byte) (result [][]byte) {
ArrayEach(data, func(value []byte, dataType ValueType, offset int, err error) {
result = append(result, value)
})
return
}
func toStringArray(data []byte) (result []string) {
ArrayEach(data, func(value []byte, dataType ValueType, offset int, err error) {
result = append(result, string(value))
})
return
}
type GetTest struct {
desc string
json string
path []string
isErr bool
isFound bool
data interface{}
}
type SetTest struct {
desc string
json string
setData string
path []string
isErr bool
isFound bool
data interface{}
}
type DeleteTest struct {
desc string
json string
path []string
data interface{}
}
var deleteTests = []DeleteTest{
{
desc: "Delete test key",
json: `{"test":"input"}`,
path: []string{"test"},
data: `{}`,
},
{
desc: "Delete object",
json: `{"test":"input"}`,
path: []string{},
data: ``,
},
{
desc: "Delete a nested object",
json: `{"test":"input","new.field":{"key": "new object"}}`,
path: []string{"new.field", "key"},
data: `{"test":"input","new.field":{}}`,
},
{
desc: "Deleting a key that doesn't exist should return the same object",
json: `{"test":"input"}`,
path: []string{"test2"},
data: `{"test":"input"}`,
},
{
desc: "Delete object in an array",
json: `{"test":[{"key":"val-obj1"}]}`,
path: []string{"test", "[0]"},
data: `{"test":[]}`,
},
{
desc: "Deleting a object in an array that doesn't exists should return the same object",
json: `{"test":[{"key":"val-obj1"}]}`,
path: []string{"test", "[1]"},
data: `{"test":[{"key":"val-obj1"}]}`,
},
{
desc: "Delete a complex object in a nested array",
json: `{"test":[{"key":[{"innerKey":"innerKeyValue"}]}]}`,
path: []string{"test", "[0]", "key", "[0]"},
data: `{"test":[{"key":[]}]}`,
},
{
desc: "Delete known key (simple type within nested array)",
json: `{"test":[{"key":["innerKey"]}]}`,
path: []string{"test", "[0]", "key", "[0]"},
data: `{"test":[{"key":[]}]}`,
},
{
desc: "Delete in empty json",
json: `{}`,
path: []string{},
data: ``,
},
{
desc: "Delete empty array",
json: `[]`,
path: []string{},
data: ``,
},
{
desc: "Deleting non json should return the same value",
json: `1.323`,
path: []string{"foo"},
data: `1.323`,
},
{
desc: "Delete known key (top level array)",
json: `[{"key":"val-obj1"}]`,
path: []string{"[0]"},
data: `[]`,
},
{ // This test deletes the key instead of returning a parse error, as checking for the malformed JSON would reduce performance (this is not ideal)
desc: `malformed with trailing whitespace`,
json: `{"a":1 `,
path: []string{"a"},
data: `{ `,
},
{ // This test dels the key instead of returning a parse error, as checking for the malformed JSON would reduce performance (this is not ideal)
desc: "malformed 'colon chain', delete b",
json: `{"a":"b":"c"}`,
path: []string{"b"},
data: `{"a":}`,
},
{
desc: "Delete object without inner array",
json: `{"a": {"b": 1}, "b": 2}`,
path: []string{"b"},
data: `{"a": {"b": 1}}`,
},
{
desc: "Delete object without inner array",
json: `{"a": [{"b": 1}], "b": 2}`,
path: []string{"b"},
data: `{"a": [{"b": 1}]}`,
},
{
desc: "Delete object without inner array",
json: `{"a": {"c": {"b": 3}, "b": 1}, "b": 2}`,
path: []string{"a", "b"},
data: `{"a": {"c": {"b": 3}}, "b": 2}`,
},
{
desc: "Delete object without inner array",
json: `{"a": [{"c": {"b": 3}, "b": 1}], "b": 2}`,
path: []string{"a", "[0]", "b"},
data: `{"a": [{"c": {"b": 3}}], "b": 2}`,
},
{
desc: "Remove trailing comma if last object is deleted",
json: `{"a": "1", "b": "2"}`,
path: []string{"b"},
data: `{"a": "1"}`,
},
{
desc: "Correctly delete first element with space-comma",
json: `{"a": "1" ,"b": "2" }`,
path: []string{"a"},
data: `{"b": "2" }`,
},
{
desc: "Correctly delete middle element with space-comma",
json: `{"a": "1" ,"b": "2" , "c": 3}`,
path: []string{"b"},
data: `{"a": "1" , "c": 3}`,
},
{
desc: "Delete non-last key",
json: `{"test":"input","test1":"input1"}`,
path: []string{"test"},
data: `{"test1":"input1"}`,
},
{
desc: "Delete non-exist key",
json: `{"test:":"input"}`,
path: []string{"test", "test1"},
data: `{"test:":"input"}`,
},
{
desc: "Delete non-last object in an array",
json: `[{"key":"val-obj1"},{"key2":"val-obj2"}]`,
path: []string{"[0]"},
data: `[{"key2":"val-obj2"}]`,
},
{
desc: "Delete non-first object in an array",
json: `[{"key":"val-obj1"},{"key2":"val-obj2"}]`,
path: []string{"[1]"},
data: `[{"key":"val-obj1"}]`,
},
{
desc: "Issue #188: infinite loop in Delete",
json: `^_�^C^A^@[`,
path: []string{""},
data: `^_�^C^A^@[`,
},
{
desc: "Issue #188: infinite loop in Delete",
json: `^_�^C^A^@{`,
path: []string{""},
data: `^_�^C^A^@{`,
},
}
var setTests = []SetTest{
{
desc: "set unknown key (string)",
json: `{"test":"input"}`,
isFound: true,
path: []string{"new.field"},
setData: `"new value"`,
data: `{"test":"input","new.field":"new value"}`,
},
{
desc: "set known key (string)",
json: `{"test":"input"}`,
isFound: true,
path: []string{"test"},
setData: `"new value"`,
data: `{"test":"new value"}`,
},
{
desc: "set unknown key (object)",
json: `{"test":"input"}`,
isFound: true,
path: []string{"new.field"},
setData: `{"key": "new object"}`,
data: `{"test":"input","new.field":{"key": "new object"}}`,
},
{
desc: "set known key (object)",
json: `{"test":"input"}`,
isFound: true,
path: []string{"test"},
setData: `{"key": "new object"}`,
data: `{"test":{"key": "new object"}}`,
},
{
desc: "set known key (object within array)",
json: `{"test":[{"key":"val-obj1"}]}`,
isFound: true,
path: []string{"test", "[0]"},
setData: `{"key":"new object"}`,
data: `{"test":[{"key":"new object"}]}`,
},
{
desc: "set unknown key (replace object)",
json: `{"test":[{"key":"val-obj1"}]}`,
isFound: true,
path: []string{"test", "newKey"},
setData: `"new object"`,
data: `{"test":{"newKey":"new object"}}`,
},
{
desc: "set unknown key (complex object within nested array)",
json: `{"test":[{"key":[{"innerKey":"innerKeyValue"}]}]}`,
isFound: true,
path: []string{"test", "[0]", "key", "[0]", "newInnerKey"},
setData: `{"key":"new object"}`,
data: `{"test":[{"key":[{"innerKey":"innerKeyValue","newInnerKey":{"key":"new object"}}]}]}`,
},
{
desc: "set known key (complex object within nested array)",
json: `{"test":[{"key":[{"innerKey":"innerKeyValue"}]}]}`,
isFound: true,
path: []string{"test", "[0]", "key", "[0]", "innerKey"},
setData: `{"key":"new object"}`,
data: `{"test":[{"key":[{"innerKey":{"key":"new object"}}]}]}`,
},
{
desc: "set unknown key (object, partial subtree exists)",
json: `{"test":{"input":"output"}}`,
isFound: true,
path: []string{"test", "new.field"},
setData: `{"key":"new object"}`,
data: `{"test":{"input":"output","new.field":{"key":"new object"}}}`,
},
{
desc: "set unknown key (object, empty partial subtree exists)",
json: `{"test":{}}`,
isFound: true,
path: []string{"test", "new.field"},
setData: `{"key":"new object"}`,
data: `{"test":{"new.field":{"key":"new object"}}}`,
},
{
desc: "set unknown key (object, no subtree exists)",
json: `{"test":"input"}`,
isFound: true,
path: []string{"new.field", "nested", "value"},
setData: `{"key": "new object"}`,
data: `{"test":"input","new.field":{"nested":{"value":{"key": "new object"}}}}`,
},
{
desc: "set in empty json",
json: `{}`,
isFound: true,
path: []string{"foo"},
setData: `"null"`,
data: `{"foo":"null"}`,
},
{
desc: "set subtree in empty json",
json: `{}`,
isFound: true,
path: []string{"foo", "bar"},
setData: `"null"`,
data: `{"foo":{"bar":"null"}}`,
},
{
desc: "set in empty string - not found",
json: ``,
isFound: false,
path: []string{"foo"},
setData: `"null"`,
data: ``,
},
{
desc: "set in Number - not found",
json: `1.323`,
isFound: false,
path: []string{"foo"},
setData: `"null"`,
data: `1.323`,
},
{
desc: "set known key (top level array)",
json: `[{"key":"val-obj1"}]`,
isFound: true,
path: []string{"[0]", "key"},
setData: `"new object"`,
data: `[{"key":"new object"}]`,
},
{
desc: "set unknown key (trailing whitespace)",
json: `{"key":"val-obj1"} `,
isFound: true,
path: []string{"alt-key"},
setData: `"new object"`,
data: `{"key":"val-obj1","alt-key":"new object"} `,
},
{ // This test sets the key instead of returning a parse error, as checking for the malformed JSON would reduce performance (this is not ideal)
desc: `malformed with trailing whitespace`,
json: `{"a":1 `,
path: []string{"a"},
setData: `2`,
isFound: true,
data: `{"a":2 `,
},
{ // This test sets the key instead of returning a parse error, as checking for the malformed JSON would reduce performance (this is not ideal)
desc: "malformed 'colon chain', set second string",
json: `{"a":"b":"c"}`,
path: []string{"b"},
setData: `"d"`,
isFound: true,
data: `{"a":"b":"d"}`,
},
{
desc: "set indexed path to object on empty JSON",
json: `{}`,
path: []string{"top", "[0]", "middle", "[0]", "bottom"},
setData: `"value"`,
isFound: true,
data: `{"top":[{"middle":[{"bottom":"value"}]}]}`,
},
{
desc: "set indexed path on existing object with object",
json: `{"top":[{"middle":[]}]}`,
path: []string{"top", "[0]", "middle", "[0]", "bottom"},
setData: `"value"`,
isFound: true,
data: `{"top":[{"middle":[{"bottom":"value"}]}]}`,
},
{
desc: "set indexed path on existing object with value",
json: `{"top":[{"middle":[]}]}`,
path: []string{"top", "[0]", "middle", "[0]"},
setData: `"value"`,
isFound: true,
data: `{"top":[{"middle":["value"]}]}`,
},
{
desc: "set indexed path on empty object with value",
json: `{}`,
path: []string{"top", "[0]", "middle", "[0]"},
setData: `"value"`,
isFound: true,
data: `{"top":[{"middle":["value"]}]}`,
},
{
desc: "set indexed path on object with existing array",
json: `{"top":["one", "two", "three"]}`,
path: []string{"top", "[2]"},
setData: `"value"`,
isFound: true,
data: `{"top":["one", "two", "value"]}`,
},
{
desc: "set non-exist key",
json: `{"test":"input"}`,
setData: `"new value"`,
isFound: false,
},
{
desc: "set key in invalid json",
json: `{"test"::"input"}`,
path: []string{"test"},
setData: "new value",
isErr: true,
},
{
desc: "set unknown key (simple object within nested array)",
json: `{"test":{"key":[{"innerKey":"innerKeyValue", "innerKey2":"innerKeyValue2"}]}}`,
isFound: true,
path: []string{"test", "key", "[1]", "newInnerKey"},
setData: `"new object"`,
data: `{"test":{"key":[{"innerKey":"innerKeyValue", "innerKey2":"innerKeyValue2"},{"newInnerKey":"new object"}]}}`,
},
}
var getTests = []GetTest{
// Trivial tests
{
desc: "read string",
json: `""`,
isFound: true,
data: ``,
},
{
desc: "read number",
json: `0`,
isFound: true,
data: `0`,
},
{
desc: "read object",
json: `{}`,
isFound: true,
data: `{}`,
},
{
desc: "read array",
json: `[]`,
isFound: true,
data: `[]`,
},
{
desc: "read boolean",
json: `true`,
isFound: true,
data: `true`,
},
// Found key tests
{
desc: "handling multiple nested keys with same name",
json: `{"a":[{"b":1},{"b":2},3],"c":{"c":[1,2]}} }`,
path: []string{"c", "c"},
isFound: true,
data: `[1,2]`,
},
{
desc: "read basic key",
json: `{"a":"b"}`,
path: []string{"a"},
isFound: true,
data: `b`,
},
{
desc: "read basic key with space",
json: `{"a": "b"}`,
path: []string{"a"},
isFound: true,
data: `b`,
},
{
desc: "read composite key",
json: `{"a": { "b":{"c":"d" }}}`,
path: []string{"a", "b", "c"},
isFound: true,
data: `d`,
},
{
desc: `read numberic value as string`,
json: `{"a": "b", "c": 1}`,
path: []string{"c"},
isFound: true,
data: `1`,
},
{
desc: `handle multiple nested keys with same name`,
json: `{"a":[{"b":1},{"b":2},3],"c":{"c":[1,2]}} }`,
path: []string{"c", "c"},
isFound: true,
data: `[1,2]`,
},
{
desc: `read string values with quotes`,
json: `{"a": "string\"with\"quotes"}`,
path: []string{"a"},
isFound: true,
data: `string\"with\"quotes`,
},
{
desc: `read object`,
json: `{"a": { "b":{"c":"d" }}}`,
path: []string{"a", "b"},
isFound: true,
data: `{"c":"d" }`,
},
{
desc: `empty path`,
json: `{"c":"d" }`,
path: []string{},
isFound: true,
data: `{"c":"d" }`,
},
{
desc: `formatted JSON value`,
json: "{\n \"a\": \"b\"\n}",
path: []string{"a"},
isFound: true,
data: `b`,
},
{
desc: `formatted JSON value 2`,
json: "{\n \"a\":\n {\n\"b\":\n {\"c\":\"d\",\n\"e\": \"f\"}\n}\n}",
path: []string{"a", "b"},
isFound: true,
data: "{\"c\":\"d\",\n\"e\": \"f\"}",
},
{
desc: `whitespace`,
json: " \n\r\t{ \n\r\t\"whitespace\" \n\r\t: \n\r\t333 \n\r\t} \n\r\t",
path: []string{"whitespace"},
isFound: true,
data: "333",
},
{
desc: `escaped backslash quote`,
json: `{"a": "\\\""}`,
path: []string{"a"},
isFound: true,
data: `\\\"`,
},
{
desc: `unescaped backslash quote`,
json: `{"a": "\\"}`,
path: []string{"a"},
isFound: true,
data: `\\`,
},
{
desc: `unicode in JSON`,
json: `{"a": "15°C"}`,
path: []string{"a"},
isFound: true,
data: `15°C`,
},
{
desc: `no padding + nested`,
json: `{"a":{"a":"1"},"b":2}`,
path: []string{"b"},
isFound: true,
data: `2`,
},
{
desc: `no padding + nested + array`,
json: `{"a":{"b":[1,2]},"c":3}`,
path: []string{"c"},
isFound: true,
data: `3`,
},
{
desc: `empty key`,
json: `{"":{"":{"":true}}}`,
path: []string{"", "", ""},
isFound: true,
data: `true`,
},
// Escaped key tests
{
desc: `key with simple escape`,
json: `{"a\\b":1}`,
path: []string{"a\\b"},
isFound: true,
data: `1`,
},
{
desc: `key and value with whitespace escapes`,
json: `{"key\b\f\n\r\tkey":"value\b\f\n\r\tvalue"}`,
path: []string{"key\b\f\n\r\tkey"},
isFound: true,
data: `value\b\f\n\r\tvalue`, // value is not unescaped since this is Get(), but the key should work correctly
},
{
desc: `key with Unicode escape`,
json: `{"a\u00B0b":1}`,
path: []string{"a\u00B0b"},
isFound: true,
data: `1`,
},
{
desc: `key with complex escape`,
json: `{"a\uD83D\uDE03b":1}`,
path: []string{"a\U0001F603b"},
isFound: true,
data: `1`,
},
{ // This test returns a match instead of a parse error, as checking for the malformed JSON would reduce performance
desc: `malformed with trailing whitespace`,
json: `{"a":1 `,
path: []string{"a"},
isFound: true,
data: `1`,
},
{ // This test returns a match instead of a parse error, as checking for the malformed JSON would reduce performance
desc: `malformed with wrong closing bracket`,
json: `{"a":1]`,
path: []string{"a"},
isFound: true,
data: `1`,
},
// Not found key tests
{
desc: `empty input`,
json: ``,
path: []string{"a"},
isFound: false,
},
{
desc: "non-existent key 1",
json: `{"a":"b"}`,
path: []string{"c"},
isFound: false,
},
{
desc: "non-existent key 2",
json: `{"a":"b"}`,
path: []string{"b"},
isFound: false,
},
{
desc: "non-existent key 3",
json: `{"aa":"b"}`,
path: []string{"a"},
isFound: false,
},
{
desc: "apply scope of parent when search for nested key",
json: `{"a": { "b": 1}, "c": 2 }`,
path: []string{"a", "b", "c"},
isFound: false,
},
{
desc: `apply scope to key level`,
json: `{"a": { "b": 1}, "c": 2 }`,
path: []string{"b"},
isFound: false,
},
{
desc: `handle escaped quote in key name in JSON`,
json: `{"key\"key": 1}`,
path: []string{"key"},
isFound: false,
},
{
desc: "handling multiple keys with different name",
json: `{"a":{"a":1},"b":{"a":3,"c":[1,2]}}`,
path: []string{"a", "c"},
isFound: false,
},
{
desc: "handling nested json",
json: `{"a":{"b":{"c":1},"d":4}}`,
path: []string{"a", "d"},
isFound: true,
data: `4`,
},
{ // Issue #148
desc: `missing key in different key same level`,
json: `{"s":"s","ic":2,"r":{"o":"invalid"}}`,
path: []string{"ic", "o"},
isFound: false,
},
// Error/invalid tests
{
desc: `handle escaped quote in key name in JSON`,
json: `{"key\"key": 1}`,
path: []string{"key"},
isFound: false,
},
{
desc: `missing closing brace, but can still find key`,
json: `{"a":"b"`,
path: []string{"a"},
isFound: true,
data: `b`,
},
{
desc: `missing value closing quote`,
json: `{"a":"b`,
path: []string{"a"},
isErr: true,
},
{
desc: `missing value closing curly brace`,
json: `{"a": { "b": "c"`,
path: []string{"a"},
isErr: true,
},
{
desc: `missing value closing square bracket`,
json: `{"a": [1, 2, 3 }`,
path: []string{"a"},
isErr: true,
},
{
desc: `missing value 1`,
json: `{"a":`,
path: []string{"a"},
isErr: true,
},
{
desc: `missing value 2`,
json: `{"a": `,
path: []string{"a"},
isErr: true,
},
{
desc: `missing value 3`,
json: `{"a":}`,
path: []string{"a"},
isErr: true,
},
{
desc: `malformed array (no closing brace)`,
json: `{"a":[, "b":123}`,
path: []string{"b"},
isFound: false,
},
{ // Issue #81
desc: `missing key in object in array`,
json: `{"p":{"a":[{"u":"abc","t":"th"}]}}`,
path: []string{"p", "a", "[0]", "x"},
isFound: false,
},
{ // Issue #81 counter test
desc: `existing key in object in array`,
json: `{"p":{"a":[{"u":"abc","t":"th"}]}}`,
path: []string{"p", "a", "[0]", "u"},
isFound: true,
data: "abc",
},
{ // This test returns not found instead of a parse error, as checking for the malformed JSON would reduce performance
desc: "malformed key (followed by comma followed by colon)",
json: `{"a",:1}`,
path: []string{"a"},
isFound: false,
},
{ // This test returns a match instead of a parse error, as checking for the malformed JSON would reduce performance (this is not ideal)
desc: "malformed 'colon chain', lookup first string",
json: `{"a":"b":"c"}`,
path: []string{"a"},
isFound: true,
data: "b",
},
{ // This test returns a match instead of a parse error, as checking for the malformed JSON would reduce performance (this is not ideal)
desc: "malformed 'colon chain', lookup second string",
json: `{"a":"b":"c"}`,
path: []string{"b"},
isFound: true,
data: "c",
},
// Array index paths
{
desc: "last key in path is index",
json: `{"a":[{"b":1},{"b":"2"}, 3],"c":{"c":[1,2]}}`,
path: []string{"a", "[1]"},
isFound: true,
data: `{"b":"2"}`,
},
{
desc: "get string from array",
json: `{"a":[{"b":1},"foo", 3],"c":{"c":[1,2]}}`,
path: []string{"a", "[1]"},
isFound: true,
data: "foo",
},
{
desc: "key in path is index",
json: `{"a":[{"b":"1"},{"b":"2"},3],"c":{"c":[1,2]}}`,
path: []string{"a", "[0]", "b"},
isFound: true,
data: `1`,
},
{
desc: "last key in path is an index to value in array (formatted json)",
json: `{
"a": [
{
"b": 1
},
{"b":"2"},
3
],
"c": {
"c": [
1,
2
]
}
}`,
path: []string{"a", "[1]"},
isFound: true,
data: `{"b":"2"}`,
},
{
desc: "key in path is index (formatted json)",
json: `{
"a": [
{"b": 1},
{"b": "2"},
3
],
"c": {
"c": [
1,
2
]
}
}`,
path: []string{"a", "[0]", "b"},
isFound: true,
data: `1`,
},
{
// Issue #178: Crash in searchKeys
desc: `invalid json`,
json: `{{{"":`,
path: []string{"a", "b"},
isFound: false,
},
{
desc: `opening brace instead of closing and without key`,
json: `{"a":1{`,
path: []string{"b"},
isFound: false,
},
}
var getIntTests = []GetTest{
{
desc: `read numeric value as number`,
json: `{"a": "b", "c": 1}`,
path: []string{"c"},
isFound: true,
data: int64(1),
},
{
desc: `read numeric value as number in formatted JSON`,
json: "{\"a\": \"b\", \"c\": 1 \n}",
path: []string{"c"},
isFound: true,
data: int64(1),
},
{ // Issue #138: overflow detection
desc: `Fails because of overflow`,
json: `{"p":9223372036854775808}`,
path: []string{"p"},
isErr: true,
},
{ // Issue #138: overflow detection
desc: `Fails because of underflow`,
json: `{"p":-9223372036854775809}`,
path: []string{"p"},
isErr: true,
},
{
desc: `read non-numeric value as integer`,
json: `{"a": "b", "c": "d"}`,
path: []string{"c"},
isErr: true,
},
}
var getFloatTests = []GetTest{
{
desc: `read numeric value as number`,
json: `{"a": "b", "c": 1.123}`,
path: []string{"c"},
isFound: true,
data: float64(1.123),
},
{
desc: `read numeric value as number in formatted JSON`,
json: "{\"a\": \"b\", \"c\": 23.41323 \n}",
path: []string{"c"},
isFound: true,
data: float64(23.41323),
},
{
desc: `read non-numeric value as float`,
json: `{"a": "b", "c": "d"}`,
path: []string{"c"},
isErr: true,
},
}
var getStringTests = []GetTest{
{
desc: `Translate Unicode symbols`,
json: `{"c": "test"}`,
path: []string{"c"},
isFound: true,
data: `test`,
},
{
desc: `Translate Unicode symbols`,
json: `{"c": "15\u00b0C"}`,
path: []string{"c"},
isFound: true,
data: `15°C`,
},
{
desc: `Translate supplementary Unicode symbols`,
json: `{"c": "\uD83D\uDE03"}`, // Smiley face (UTF16 surrogate pair)
path: []string{"c"},
isFound: true,
data: "\U0001F603", // Smiley face
},
{
desc: `Translate escape symbols`,
json: `{"c": "\\\""}`,
path: []string{"c"},
isFound: true,
data: `\"`,
},
{
desc: `key and value with whitespace escapes`,
json: `{"key\b\f\n\r\tkey":"value\b\f\n\r\tvalue"}`,
path: []string{"key\b\f\n\r\tkey"},
isFound: true,
data: "value\b\f\n\r\tvalue", // value is unescaped since this is GetString()
},
{ // This test checks we avoid an infinite loop for certain malformed JSON. We don't check for all malformed JSON as it would reduce performance.
desc: `malformed with double quotes`,
json: `{"a"":1}`,
path: []string{"a"},
isFound: false,
data: ``,
},
{ // More malformed JSON testing, to be sure we avoid an infinite loop.
desc: `malformed with double quotes, and path does not exist`,
json: `{"z":123,"y":{"x":7,"w":0},"v":{"u":"t","s":"r","q":0,"p":1558051800},"a":"b","c":"2016-11-02T20:10:11Z","d":"e","f":"g","h":{"i":"j""},"k":{"l":"m"}}`,
path: []string{"o"},
isFound: false,
data: ``,
},
{
desc: `read non-string as string`,
json: `{"c": true}`,
path: []string{"c"},
isErr: true,
},
{
desc: `empty array index`,
json: `[""]`,
path: []string{"[]"},
isFound: false,
},
{
desc: `malformed array index`,
json: `[""]`,
path: []string{"["},
isFound: false,
},
}
var getUnsafeStringTests = []GetTest{
{
desc: `Do not translate Unicode symbols`,
json: `{"c": "test"}`,
path: []string{"c"},
isFound: true,
data: `test`,
},
{
desc: `Do not translate Unicode symbols`,
json: `{"c": "15\u00b0C"}`,
path: []string{"c"},
isFound: true,
data: `15\u00b0C`,
},
{
desc: `Do not translate supplementary Unicode symbols`,
json: `{"c": "\uD83D\uDE03"}`, // Smiley face (UTF16 surrogate pair)
path: []string{"c"},
isFound: true,
data: `\uD83D\uDE03`, // Smiley face
},
{
desc: `Do not translate escape symbols`,
json: `{"c": "\\\""}`,
path: []string{"c"},
isFound: true,
data: `\\\"`,
},
}
var getBoolTests = []GetTest{
{
desc: `read boolean true as boolean`,
json: `{"a": "b", "c": true}`,
path: []string{"c"},
isFound: true,
data: true,
},
{
desc: `boolean true in formatted JSON`,
json: "{\"a\": \"b\", \"c\": true \n}",
path: []string{"c"},
isFound: true,
data: true,
},
{
desc: `read boolean false as boolean`,
json: `{"a": "b", "c": false}`,
path: []string{"c"},
isFound: true,
data: false,
},
{
desc: `boolean true in formatted JSON`,
json: "{\"a\": \"b\", \"c\": false \n}",
path: []string{"c"},
isFound: true,
data: false,
},
{
desc: `read fake boolean true`,
json: `{"a": txyz}`,
path: []string{"a"},
isErr: true,
},
{
desc: `read fake boolean false`,
json: `{"a": fwxyz}`,
path: []string{"a"},
isErr: true,
},
{
desc: `read boolean true with whitespace and another key`,
json: "{\r\t\n \"a\"\r\t\n :\r\t\n true\r\t\n ,\r\t\n \"b\": 1}",
path: []string{"a"},
isFound: true,
data: true,
},
}
var getArrayTests = []GetTest{
{
desc: `read array of simple values`,
json: `{"a": { "b":[1,2,3,4]}}`,
path: []string{"a", "b"},
isFound: true,
data: []string{`1`, `2`, `3`, `4`},
},
{
desc: `read array via empty path`,
json: `[1,2,3,4]`,
path: []string{},
isFound: true,
data: []string{`1`, `2`, `3`, `4`},
},
{
desc: `read array of objects`,
json: `{"a": { "b":[{"x":1},{"x":2},{"x":3},{"x":4}]}}`,
path: []string{"a", "b"},
isFound: true,
data: []string{`{"x":1}`, `{"x":2}`, `{"x":3}`, `{"x":4}`},
},
{
desc: `read nested array`,
json: `{"a": [[[1]],[[2]]]}`,
path: []string{"a"},
isFound: true,
data: []string{`[[1]]`, `[[2]]`},
},
}
// checkFoundAndNoError checks the dataType and error return from Get*() against the test case expectations.
// Returns true the test should proceed to checking the actual data returned from Get*(), or false if the test is finished.
func getTestCheckFoundAndNoError(t *testing.T, testKind string, test GetTest, jtype ValueType, value interface{}, err error) bool {
isFound := (err != KeyPathNotFoundError)
isErr := (err != nil && err != KeyPathNotFoundError)
if test.isErr != isErr {
// If the call didn't match the error expectation, fail
t.Errorf("%s test '%s' isErr mismatch: expected %t, obtained %t (err %v). Value: %v", testKind, test.desc, test.isErr, isErr, err, value)
return false
} else if isErr {
// Else, if there was an error, don't fail and don't check isFound or the value
return false
} else if test.isFound != isFound {
// Else, if the call didn't match the is-found expectation, fail
t.Errorf("%s test '%s' isFound mismatch: expected %t, obtained %t", testKind, test.desc, test.isFound, isFound)
return false
} else if !isFound {
// Else, if no value was found, don't fail and don't check the value
return false
} else {
// Else, there was no error and a value was found, so check the value
return true
}
}
func runGetTests(t *testing.T, testKind string, tests []GetTest, runner func(GetTest) (interface{}, ValueType, error), resultChecker func(GetTest, interface{}) (bool, interface{})) {
for _, test := range tests {
if activeTest != "" && test.desc != activeTest {
continue
}
fmt.Println("Running:", test.desc)
value, dataType, err := runner(test)
if getTestCheckFoundAndNoError(t, testKind, test, dataType, value, err) {
if test.data == nil {
t.Errorf("MALFORMED TEST: %v", test)
continue
}
if ok, expected := resultChecker(test, value); !ok {
if expectedBytes, ok := expected.([]byte); ok {
expected = string(expectedBytes)
}
if valueBytes, ok := value.([]byte); ok {
value = string(valueBytes)
}
t.Errorf("%s test '%s' expected to return value %v, but did returned %v instead", testKind, test.desc, expected, value)
}
}
}
}
func setTestCheckFoundAndNoError(t *testing.T, testKind string, test SetTest, value interface{}, err error) bool {
isFound := (err != KeyPathNotFoundError)
isErr := (err != nil && err != KeyPathNotFoundError)
if test.isErr != isErr {
// If the call didn't match the error expectation, fail
t.Errorf("%s test '%s' isErr mismatch: expected %t, obtained %t (err %v). Value: %v", testKind, test.desc, test.isErr, isErr, err, value)
return false
} else if isErr {
// Else, if there was an error, don't fail and don't check isFound or the value
return false
} else if test.isFound != isFound {
// Else, if the call didn't match the is-found expectation, fail
t.Errorf("%s test '%s' isFound mismatch: expected %t, obtained %t", testKind, test.desc, test.isFound, isFound)
return false
} else if !isFound {
// Else, if no value was found, don't fail and don't check the value
return false
} else {
// Else, there was no error and a value was found, so check the value
return true
}
}
func runSetTests(t *testing.T, testKind string, tests []SetTest, runner func(SetTest) (interface{}, ValueType, error), resultChecker func(SetTest, interface{}) (bool, interface{})) {
for _, test := range tests {
if activeTest != "" && test.desc != activeTest {
continue
}
fmt.Println("Running:", test.desc)
value, _, err := runner(test)
if setTestCheckFoundAndNoError(t, testKind, test, value, err) {
if test.data == nil {
t.Errorf("MALFORMED TEST: %v", test)
continue
}
if string(value.([]byte)) != test.data {
t.Errorf("Unexpected result on %s test '%s'", testKind, test.desc)
t.Log("Got: ", string(value.([]byte)))
t.Log("Expected:", test.data)
t.Log("Error: ", err)
}
}
}
}
func runDeleteTests(t *testing.T, testKind string, tests []DeleteTest, runner func(DeleteTest) (interface{}, []byte), resultChecker func(DeleteTest, interface{}) (bool, interface{})) {
for _, test := range tests {
if activeTest != "" && test.desc != activeTest {
continue
}
original := make([]byte, len(test.json))
copy(original, test.json)
fmt.Println("Running:", test.desc)
value, bytes := runner(test)
if string(original) != string(bytes) {
t.Errorf("ORIGINAL DATA MALFORMED: %v, %v", string(original), string(bytes))
continue
}
if test.data == nil {
t.Errorf("MALFORMED TEST: %v", test)
continue
}
if ok, expected := resultChecker(test, value); !ok {
if expectedBytes, ok := expected.([]byte); ok {
expected = string(expectedBytes)
}
if valueBytes, ok := value.([]byte); ok {
value = string(valueBytes)
}
t.Errorf("%s test '%s' expected to return value %v, but did returned %v instead", testKind, test.desc, expected, value)
}
}
}
func TestSet(t *testing.T) {
runSetTests(t, "Set()", setTests,
func(test SetTest) (value interface{}, dataType ValueType, err error) {
value, err = Set([]byte(test.json), []byte(test.setData), test.path...)
return
},
func(test SetTest, value interface{}) (bool, interface{}) {
expected := []byte(test.data.(string))
return bytes.Equal(expected, value.([]byte)), expected
},
)
}
func TestDelete(t *testing.T) {
runDeleteTests(t, "Delete()", deleteTests,
func(test DeleteTest) (interface{}, []byte) {
ba := []byte(test.json)
return Delete(ba, test.path...), ba
},
func(test DeleteTest, value interface{}) (bool, interface{}) {
expected := []byte(test.data.(string))
return bytes.Equal(expected, value.([]byte)), expected
},
)
}
func TestGet(t *testing.T) {
runGetTests(t, "Get()", getTests,
func(test GetTest) (value interface{}, dataType ValueType, err error) {
value, dataType, _, err = Get([]byte(test.json), test.path...)
return
},
func(test GetTest, value interface{}) (bool, interface{}) {
expected := []byte(test.data.(string))
return bytes.Equal(expected, value.([]byte)), expected
},
)
}
func TestGetString(t *testing.T) {
runGetTests(t, "GetString()", getStringTests,
func(test GetTest) (value interface{}, dataType ValueType, err error) {
value, err = GetString([]byte(test.json), test.path...)
return value, String, err
},
func(test GetTest, value interface{}) (bool, interface{}) {
expected := test.data.(string)
return expected == value.(string), expected
},
)
}
func TestGetUnsafeString(t *testing.T) {
runGetTests(t, "GetUnsafeString()", getUnsafeStringTests,
func(test GetTest) (value interface{}, dataType ValueType, err error) {
value, err = GetUnsafeString([]byte(test.json), test.path...)
return value, String, err
},
func(test GetTest, value interface{}) (bool, interface{}) {
expected := test.data.(string)
return expected == value.(string), expected
},
)
}
func TestGetInt(t *testing.T) {
runGetTests(t, "GetInt()", getIntTests,
func(test GetTest) (value interface{}, dataType ValueType, err error) {
value, err = GetInt([]byte(test.json), test.path...)
return value, Number, err
},
func(test GetTest, value interface{}) (bool, interface{}) {
expected := test.data.(int64)
return expected == value.(int64), expected
},
)
}
func TestGetFloat(t *testing.T) {
runGetTests(t, "GetFloat()", getFloatTests,
func(test GetTest) (value interface{}, dataType ValueType, err error) {
value, err = GetFloat([]byte(test.json), test.path...)
return value, Number, err
},
func(test GetTest, value interface{}) (bool, interface{}) {
expected := test.data.(float64)
return expected == value.(float64), expected
},
)
}
func TestGetBoolean(t *testing.T) {
runGetTests(t, "GetBoolean()", getBoolTests,
func(test GetTest) (value interface{}, dataType ValueType, err error) {
value, err = GetBoolean([]byte(test.json), test.path...)
return value, Boolean, err
},
func(test GetTest, value interface{}) (bool, interface{}) {
expected := test.data.(bool)
return expected == value.(bool), expected
},
)
}
func TestGetSlice(t *testing.T) {
runGetTests(t, "Get()-for-arrays", getArrayTests,
func(test GetTest) (value interface{}, dataType ValueType, err error) {
value, dataType, _, err = Get([]byte(test.json), test.path...)
return
},
func(test GetTest, value interface{}) (bool, interface{}) {
expected := test.data.([]string)
return reflect.DeepEqual(expected, toStringArray(value.([]byte))), expected
},
)
}
func TestArrayEach(t *testing.T) {
mock := []byte(`{"a": { "b":[{"x": 1} ,{"x":2},{ "x":3}, {"x":4} ]}}`)
count := 0
ArrayEach(mock, func(value []byte, dataType ValueType, offset int, err error) {
count++
switch count {
case 1:
if string(value) != `{"x": 1}` {
t.Errorf("Wrong first item: %s", string(value))
}
case 2:
if string(value) != `{"x":2}` {
t.Errorf("Wrong second item: %s", string(value))
}
case 3:
if string(value) != `{ "x":3}` {
t.Errorf("Wrong third item: %s", string(value))
}
case 4:
if string(value) != `{"x":4}` {
t.Errorf("Wrong forth item: %s", string(value))
}
default:
t.Errorf("Should process only 4 items")
}
}, "a", "b")
}
func TestArrayEachWithWhiteSpace(t *testing.T) {
//Issue #159
count := 0
funcError := func([]byte, ValueType, int, error) { t.Errorf("Run func not allow") }
funcSuccess := func(value []byte, dataType ValueType, index int, err error) {
count++
switch count {
case 1:
if string(value) != `AAA` {
t.Errorf("Wrong first item: %s", string(value))
}
case 2:
if string(value) != `BBB` {
t.Errorf("Wrong second item: %s", string(value))
}
case 3:
if string(value) != `CCC` {
t.Errorf("Wrong third item: %s", string(value))
}
default:
t.Errorf("Should process only 3 items")
}
}
type args struct {
data []byte
cb func(value []byte, dataType ValueType, offset int, err error)
keys []string
}
tests := []struct {
name string
args args
wantErr bool
}{
{"Array with white space", args{[]byte(` ["AAA", "BBB", "CCC"]`), funcSuccess, []string{}}, false},
{"Array with only one character after white space", args{[]byte(` 1`), funcError, []string{}}, true},
{"Only white space", args{[]byte(` `), funcError, []string{}}, true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, err := ArrayEach(tt.args.data, tt.args.cb, tt.args.keys...)
if (err != nil) != tt.wantErr {
t.Errorf("ArrayEach() error = %v, wantErr %v", err, tt.wantErr)
return
}
})
}
}
func TestArrayEachEmpty(t *testing.T) {
funcError := func([]byte, ValueType, int, error) { t.Errorf("Run func not allow") }
type args struct {
data []byte
cb func(value []byte, dataType ValueType, offset int, err error)
keys []string
}
tests := []struct {
name string
args args
wantOffset int
wantErr bool
}{
{"Empty array", args{[]byte("[]"), funcError, []string{}}, 1, false},
{"Empty array with space", args{[]byte("[ ]"), funcError, []string{}}, 2, false},
{"Empty array with \n", args{[]byte("[\n]"), funcError, []string{}}, 2, false},
{"Empty field array", args{[]byte("{\"data\": []}"), funcError, []string{"data"}}, 10, false},
{"Empty field array with space", args{[]byte("{\"data\": [ ]}"), funcError, []string{"data"}}, 11, false},
{"Empty field array with \n", args{[]byte("{\"data\": [\n]}"), funcError, []string{"data"}}, 11, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotOffset, err := ArrayEach(tt.args.data, tt.args.cb, tt.args.keys...)
if (err != nil) != tt.wantErr {
t.Errorf("ArrayEach() error = %v, wantErr %v", err, tt.wantErr)
return
}
if gotOffset != tt.wantOffset {
t.Errorf("ArrayEach() = %v, want %v", gotOffset, tt.wantOffset)
}
})
}
}
type keyValueEntry struct {
key string
value string
valueType ValueType
}
func (kv keyValueEntry) String() string {
return fmt.Sprintf("[%s: %s (%s)]", kv.key, kv.value, kv.valueType)
}
type ObjectEachTest struct {
desc string
json string
isErr bool
entries []keyValueEntry
}
var objectEachTests = []ObjectEachTest{
{
desc: "empty object",
json: `{}`,
entries: []keyValueEntry{},
},
{
desc: "single key-value object",
json: `{"key": "value"}`,
entries: []keyValueEntry{
{"key", "value", String},
},
},
{
desc: "multiple key-value object with many value types",
json: `{
"key1": null,
"key2": true,
"key3": 1.23,
"key4": "string value",
"key5": [1,2,3],
"key6": {"a":"b"}
}`,
entries: []keyValueEntry{
{"key1", "null", Null},
{"key2", "true", Boolean},
{"key3", "1.23", Number},
{"key4", "string value", String},
{"key5", "[1,2,3]", Array},
{"key6", `{"a":"b"}`, Object},
},
},
{
desc: "escaped key",
json: `{"key\"\\\/\b\f\n\r\t\u00B0": "value"}`,
entries: []keyValueEntry{
{"key\"\\/\b\f\n\r\t\u00B0", "value", String},
},
},
// Error cases
{
desc: "no object present",
json: ` \t\n\r`,
isErr: true,
},
{
desc: "unmatched braces 1",
json: `{`,
isErr: true,
},
{
desc: "unmatched braces 2",
json: `}`,
isErr: true,
},
{
desc: "unmatched braces 3",
json: `}{}`,
isErr: true,
},
{
desc: "bad key (number)",
json: `{123: "value"}`,
isErr: true,
},
{
desc: "bad key (unclosed quote)",
json: `{"key: 123}`,
isErr: true,
},
{
desc: "bad value (no value)",
json: `{"key":}`,
isErr: true,
},
{
desc: "bad value (bogus value)",
json: `{"key": notavalue}`,
isErr: true,
},
{
desc: "bad entry (missing colon)",
json: `{"key" "value"}`,
isErr: true,
},
{
desc: "bad entry (no trailing comma)",
json: `{"key": "value" "key2": "value2"}`,
isErr: true,
},
{
desc: "bad entry (two commas)",
json: `{"key": "value",, "key2": "value2"}`,
isErr: true,
},
}
func TestObjectEach(t *testing.T) {
for _, test := range objectEachTests {
if activeTest != "" && test.desc != activeTest {
continue
}
// Execute ObjectEach and capture all of the entries visited, in order
var entries []keyValueEntry
err := ObjectEach([]byte(test.json), func(key, value []byte, valueType ValueType, off int) error {
entries = append(entries, keyValueEntry{
key: string(key),
value: string(value),
valueType: valueType,
})
return nil
})
// Check the correctness of the result
isErr := (err != nil)
if test.isErr != isErr {
// If the call didn't match the error expectation, fail
t.Errorf("ObjectEach test '%s' isErr mismatch: expected %t, obtained %t (err %v)", test.desc, test.isErr, isErr, err)
} else if isErr {
// Else, if there was an expected error, don't fail and don't check anything further
} else if len(test.entries) != len(entries) {
t.Errorf("ObjectEach test '%s' mismatch in number of key-value entries: expected %d, obtained %d (entries found: %s)", test.desc, len(test.entries), len(entries), entries)
} else {
for i, entry := range entries {
expectedEntry := test.entries[i]
if expectedEntry.key != entry.key {
t.Errorf("ObjectEach test '%s' key mismatch at entry %d: expected %s, obtained %s", test.desc, i, expectedEntry.key, entry.key)
break
} else if expectedEntry.value != entry.value {
t.Errorf("ObjectEach test '%s' value mismatch at entry %d: expected %s, obtained %s", test.desc, i, expectedEntry.value, entry.value)
break
} else if expectedEntry.valueType != entry.valueType {
t.Errorf("ObjectEach test '%s' value type mismatch at entry %d: expected %s, obtained %s", test.desc, i, expectedEntry.valueType, entry.valueType)
break
} else {
// Success for this entry
}
}
}
}
}
var testJson = []byte(`{
"name": "Name",
"order": "Order",
"sum": 100,
"len": 12,
"isPaid": true,
"nested": {"a":"test", "b":2, "nested3":{"a":"test3","b":4}, "c": "unknown"},
"nested2": {
"a":"test2",
"b":3
},
"arr": [
{
"a":"zxc",
"b": 1
},
{
"a":"123",
"b":2
}
],
"arrInt": [1,2,3,4],
"intPtr": 10,
"a\n":{
"b\n":99
}
}`)
func TestEachKey(t *testing.T) {
paths := [][]string{
{"name"},
{"order"},
{"nested", "a"},
{"nested", "b"},
{"nested2", "a"},
{"nested", "nested3", "b"},
{"arr", "[1]", "b"},
{"arrInt", "[3]"},
{"arrInt", "[5]"}, // Should not find last key
{"nested"},
{"arr", "["}, // issue#177 Invalid arguments
{"a\n", "b\n"}, // issue#165
{"nested", "b"}, // Should find repeated key
}
keysFound := 0
EachKey(testJson, func(idx int, value []byte, vt ValueType, err error) {
keysFound++
switch idx {
case 0:
if string(value) != "Name" {
t.Error("Should find 1 key", string(value))
}
case 1:
if string(value) != "Order" {
t.Errorf("Should find 2 key")
}
case 2:
if string(value) != "test" {
t.Errorf("Should find 3 key")
}
case 3:
if string(value) != "2" {
t.Errorf("Should find 4 key")
}
case 4:
if string(value) != "test2" {
t.Error("Should find 5 key", string(value))
}
case 5:
if string(value) != "4" {
t.Errorf("Should find 6 key")
}
case 6:
if string(value) != "2" {
t.Errorf("Should find 7 key")
}
case 7:
if string(value) != "4" {
t.Error("Should find 8 key", string(value))
}
case 8:
t.Errorf("Found key #8 that should not be found")
case 9:
if string(value) != `{"a":"test", "b":2, "nested3":{"a":"test3","b":4}, "c": "unknown"}` {
t.Error("Should find 9 key", string(value))
}
case 10:
t.Errorf("Found key #10 that should not be found")
case 11:
if string(value) != "99" {
t.Error("Should find 10 key", string(value))
}
case 12:
if string(value) != "2" {
t.Errorf("Should find 11 key")
}
default:
t.Errorf("Should find only 10 keys, got %v key", idx)
}
}, paths...)
if keysFound != 11 {
t.Errorf("Should find 11 keys: %d", keysFound)
}
}
type ParseTest struct {
in string
intype ValueType
out interface{}
isErr bool
}
var parseBoolTests = []ParseTest{
{
in: "true",
intype: Boolean,
out: true,
},
{
in: "false",
intype: Boolean,
out: false,
},
{
in: "foo",
intype: Boolean,
isErr: true,
},
{
in: "trux",
intype: Boolean,
isErr: true,
},
{
in: "truex",
intype: Boolean,
isErr: true,
},
{
in: "",
intype: Boolean,
isErr: true,
},
}
var parseFloatTest = []ParseTest{
{
in: "0",
intype: Number,
out: float64(0),
},
{
in: "0.0",
intype: Number,
out: float64(0.0),
},
{
in: "1",
intype: Number,
out: float64(1),
},
{
in: "1.234",
intype: Number,
out: float64(1.234),
},
{
in: "1.234e5",
intype: Number,
out: float64(1.234e5),
},
{
in: "-1.234e5",
intype: Number,
out: float64(-1.234e5),
},
{
in: "+1.234e5", // Note: + sign not allowed under RFC7159, but our parser accepts it since it uses strconv.ParseFloat
intype: Number,
out: float64(1.234e5),
},
{
in: "1.2.3",
intype: Number,
isErr: true,
},
{
in: "1..1",
intype: Number,
isErr: true,
},
{
in: "1a",
intype: Number,
isErr: true,
},
{
in: "",
intype: Number,
isErr: true,
},
}
// parseTestCheckNoError checks the error return from Parse*() against the test case expectations.
// Returns true the test should proceed to checking the actual data returned from Parse*(), or false if the test is finished.
func parseTestCheckNoError(t *testing.T, testKind string, test ParseTest, value interface{}, err error) bool {
if isErr := (err != nil); test.isErr != isErr {
// If the call didn't match the error expectation, fail
t.Errorf("%s test '%s' isErr mismatch: expected %t, obtained %t (err %v). Obtained value: %v", testKind, test.in, test.isErr, isErr, err, value)
return false
} else if isErr {
// Else, if there was an error, don't fail and don't check isFound or the value
return false
} else {
// Else, there was no error and a value was found, so check the value
return true
}
}
func runParseTests(t *testing.T, testKind string, tests []ParseTest, runner func(ParseTest) (interface{}, error), resultChecker func(ParseTest, interface{}) (bool, interface{})) {
for _, test := range tests {
value, err := runner(test)
if parseTestCheckNoError(t, testKind, test, value, err) {
if test.out == nil {
t.Errorf("MALFORMED TEST: %v", test)
continue
}
if ok, expected := resultChecker(test, value); !ok {
if expectedBytes, ok := expected.([]byte); ok {
expected = string(expectedBytes)
}
if valueBytes, ok := value.([]byte); ok {
value = string(valueBytes)
}
t.Errorf("%s test '%s' expected to return value %v, but did returned %v instead", testKind, test.in, expected, value)
}
}
}
}
func TestParseBoolean(t *testing.T) {
runParseTests(t, "ParseBoolean()", parseBoolTests,
func(test ParseTest) (value interface{}, err error) {
return ParseBoolean([]byte(test.in))
},
func(test ParseTest, obtained interface{}) (bool, interface{}) {
expected := test.out.(bool)
return obtained.(bool) == expected, expected
},
)
}
func TestParseFloat(t *testing.T) {
runParseTests(t, "ParseFloat()", parseFloatTest,
func(test ParseTest) (value interface{}, err error) {
return ParseFloat([]byte(test.in))
},
func(test ParseTest, obtained interface{}) (bool, interface{}) {
expected := test.out.(float64)
return obtained.(float64) == expected, expected
},
)
}
var parseStringTest = []ParseTest{
{
in: `\uFF11`,
intype: String,
out: "\uFF11",
},
{
in: `\uFFFF`,
intype: String,
out: "\uFFFF",
},
{
in: `\uDF00`,
intype: String,
isErr: true,
},
}
func TestParseString(t *testing.T) {
runParseTests(t, "ParseString()", parseStringTest,
func(test ParseTest) (value interface{}, err error) {
return ParseString([]byte(test.in))
},
func(test ParseTest, obtained interface{}) (bool, interface{}) {
expected := test.out.(string)
return obtained.(string) == expected, expected
},
)
}