Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Would you please add goccy/go-json as a benchmark target ? #17

Closed
goccy opened this issue Jun 11, 2021 · 16 comments
Closed

Would you please add goccy/go-json as a benchmark target ? #17

goccy opened this issue Jun 11, 2021 · 16 comments

Comments

@goccy
Copy link
Contributor

goccy commented Jun 11, 2021

I develop goccy/go-json.
Since this library has same concept of yours about compatibility with encoding/json and high performance,
I would be grateful if you could add it to the benchmark target.

@AsterDY
Copy link
Collaborator

AsterDY commented Jun 21, 2021

Here is The Benchmark results for our data:

  • small
goos: darwin
goarch: amd64
pkg: code.byted.org/middleware/json_perf
cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz
BenchmarkEncodeSmallMap_StdJson-16             	  500000	     10590 ns/op	  34.47 MB/s	    3771 B/op	      77 allocs/op
BenchmarkEncodeSmallMap_GoJson-16              	  500000	      4825 ns/op	  75.65 MB/s	     866 B/op	       6 allocs/op
BenchmarkEncodeSmallMap_JsonIterator-16        	  500000	      9414 ns/op	  38.77 MB/s	    3556 B/op	      57 allocs/op
BenchmarkEncodeSmallMap_Sonic-16               	  500000	      2267 ns/op	 161.00 MB/s	    1128 B/op	       9 allocs/op
BenchmarkMarshalSmallStruct_StdJson-16         	  500000	      1614 ns/op	 227.39 MB/s	     384 B/op	       1 allocs/op
BenchmarkMarshalSmallStruct_GoJson-16          	  500000	       967.7 ns/op	 379.27 MB/s	     384 B/op	       1 allocs/op
BenchmarkMarshalSmallStruct_JsonIterator-16    	  500000	      1682 ns/op	 218.21 MB/s	     392 B/op	       2 allocs/op
BenchmarkMarshalSmallStruct_Easyjson-16        	  500000	      1258 ns/op	 291.70 MB/s	    1072 B/op	       5 allocs/op
BenchmarkMarshalSmallStruct_Sonic-16           	  500000	       622.9 ns/op	 589.20 MB/s	     614 B/op	       4 allocs/op
BenchmarkParseSmallMap_StdJson-16              	  500000	      7496 ns/op	  48.69 MB/s	    3491 B/op	      90 allocs/op
BenchmarkParseSmallMap_GoJson-16               	  500000	      7744 ns/op	  47.14 MB/s	    4123 B/op	      98 allocs/op
BenchmarkParseSmallMap_JsonIterator-16         	  500000	      8112 ns/op	  45.00 MB/s	    3734 B/op	     111 allocs/op
BenchmarkParseSmallMap_Gjson-16                	  500000	      5510 ns/op	  66.24 MB/s	    3026 B/op	      45 allocs/op
BenchmarkParseSmallMap_Simdjson-16             	  500000	     13087 ns/op	  27.89 MB/s	    3440 B/op	      81 allocs/op
BenchmarkParseSmallMap_Fastjson-16             	  500000	      5208 ns/op	  70.08 MB/s	    2938 B/op	      76 allocs/op
BenchmarkParseSmallMap_Sonic-16                	  500000	      3550 ns/op	 102.81 MB/s	    3311 B/op	      37 allocs/op
BenchmarkBindSmallStruct_StdJson-16            	  500000	      6697 ns/op	  54.50 MB/s	     440 B/op	      24 allocs/op
BenchmarkBindSmallStruct_GoJson-16             	  500000	      1341 ns/op	 272.15 MB/s	     386 B/op	       1 allocs/op
BenchmarkBindSmallStruct_JsonIterator-16       	  500000	      1796 ns/op	 203.27 MB/s	     128 B/op	      18 allocs/op
BenchmarkBindSmallStruct_Easyjson-16           	  500000	      1960 ns/op	 186.20 MB/s	      64 B/op	       7 allocs/op
BenchmarkBindSmallStruct_Gjson-16              	  500000	      4567 ns/op	  79.93 MB/s	    5005 B/op	      21 allocs/op
BenchmarkBindSmallStruct_Simdjson-16           	  500000	      9533 ns/op	  38.29 MB/s	     856 B/op	      39 allocs/op
BenchmarkBindSmallStruct_Fastjson-16           	  500000	      1902 ns/op	 191.90 MB/s	     693 B/op	       9 allocs/op
BenchmarkBindSmallStruct_Sonic-16              	  500000	      1986 ns/op	 183.75 MB/s	     447 B/op	       1 allocs/op
BenchmarkGetIntFromSmall_JsonIterator-16       	  500000	      2485 ns/op	 146.85 MB/s	     584 B/op	      41 allocs/op
BenchmarkGetIntFromSmall_Gjson-16              	  500000	       746.3 ns/op	 489.05 MB/s	      48 B/op	       4 allocs/op
BenchmarkGetIntFromSmall_Fastjson-16           	  500000	       885.3 ns/op	 412.27 MB/s	       0 B/op	       0 allocs/op
BenchmarkGetIntFromSmall_Sonic-16              	  500000	      1512 ns/op	 241.33 MB/s	       0 B/op	       0 allocs/op
  • medium
goos: darwin
goarch: amd64
pkg: code.byted.org/middleware/json_perf
cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz
BenchmarkEncodeMediumMap_StdJson-16             	   10000	    280746 ns/op	 404.67 MB/s	  130925 B/op	     339 allocs/op
BenchmarkEncodeMediumMap_GoJson-16              	   10000	    196889 ns/op	 577.03 MB/s	  121545 B/op	       9 allocs/op
BenchmarkEncodeMediumMap_JsonIterator-16        	   10000	    227396 ns/op	 499.61 MB/s	  142481 B/op	     209 allocs/op
BenchmarkEncodeMediumMap_Sonic-16               	   10000	    201627 ns/op	 563.47 MB/s	  146863 B/op	      13 allocs/op
BenchmarkMarshalMediumStruct_StdJson-16         	   10000	    263802 ns/op	 430.66 MB/s	  127194 B/op	     239 allocs/op
BenchmarkMarshalMediumStruct_GoJson-16          	   10000	    187351 ns/op	 606.40 MB/s	  117247 B/op	       5 allocs/op
BenchmarkMarshalMediumStruct_JsonIterator-16    	   10000	    216246 ns/op	 525.37 MB/s	  153472 B/op	     137 allocs/op
BenchmarkMarshalMediumStruct_Easyjson-16        	   10000	    205750 ns/op	 552.18 MB/s	  118211 B/op	      15 allocs/op
BenchmarkMarshalMediumStruct_Sonic-16           	   10000	    202093 ns/op	 562.17 MB/s	  156560 B/op	       9 allocs/op
BenchmarkParseMediumMap_StdJson-16              	   10000	    644084 ns/op	 176.39 MB/s	  245140 B/op	     638 allocs/op
BenchmarkParseMediumMap_GoJson-16               	   10000	   4213246 ns/op	  26.96 MB/s	  146354 B/op	     772 allocs/op
BenchmarkParseMediumMap_JsonIterator-16         	   10000	    630752 ns/op	 180.12 MB/s	  532486 B/op	     958 allocs/op
BenchmarkParseMediumMap_Gjson-16                	   10000	    526684 ns/op	 215.71 MB/s	  355261 B/op	     393 allocs/op
BenchmarkParseMediumMap_Simdjson-16             	   10000	    263135 ns/op	 431.76 MB/s	  132462 B/op	     632 allocs/op
BenchmarkParseMediumMap_Fastjson-16             	   10000	   1399188 ns/op	  81.20 MB/s	  260763 B/op	     714 allocs/op
BenchmarkParseMediumMap_Sonic-16                	   10000	    133630 ns/op	 850.19 MB/s	  162181 B/op	     370 allocs/op
BenchmarkBindMediumStruct_StdJson-16            	   10000	    822807 ns/op	 138.08 MB/s	  233507 B/op	     662 allocs/op
BenchmarkBindMediumStruct_GoJson-16             	   10000	   3658608 ns/op	  31.05 MB/s	  122823 B/op	     431 allocs/op
BenchmarkBindMediumStruct_JsonIterator-16       	   10000	    533058 ns/op	 213.13 MB/s	  509237 B/op	     643 allocs/op
BenchmarkBindMediumStruct_Easyjson-16           	   10000	    294931 ns/op	 385.21 MB/s	  141230 B/op	     334 allocs/op
BenchmarkBindMediumStruct_Sonic-16              	   10000	     86638 ns/op	1311.32 MB/s	  113493 B/op	       9 allocs/op
BenchmarkGetIntFromMiddle_JsonIterator-16       	   10000	    503236 ns/op	 225.76 MB/s	  499197 B/op	     237 allocs/op
BenchmarkGetIntFromMiddle_Gjson-16              	   10000	    188409 ns/op	 603.00 MB/s	      48 B/op	       4 allocs/op
BenchmarkGetIntFromMiddle_Fastjson-16           	   10000	     48891 ns/op	2323.75 MB/s	       0 B/op	       0 allocs/op
BenchmarkGetIntFromMiddle_Sonic-16              	   10000	     35046 ns/op	3241.70 MB/s	       0 B/op	       0 allocs/op
  • large
goos: darwin
goarch: amd64
pkg: code.byted.org/middleware/json_perf
cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz
BenchmarkEncodeLargeMap_StdJson-16          	    1000	   6300175 ns/op	  89.27 MB/s	 2065477 B/op	   33584 allocs/op
BenchmarkEncodeLargeMap_GoJson-16           	    1000	  20461298 ns/op	  27.49 MB/s	  673169 B/op	    1269 allocs/op
BenchmarkEncodeLargeMap_JsonIterator-16     	    1000	   5360517 ns/op	 104.92 MB/s	 2618777 B/op	   21302 allocs/op
BenchmarkEncodeLargeMap_Sonic-16            	    1000	   1092647 ns/op	 514.72 MB/s	  672902 B/op	    1268 allocs/op
BenchmarkMarhsalLargeStruct_Stdjson-16      	    1000	    813470 ns/op	 691.37 MB/s	  335194 B/op	    1631 allocs/op
BenchmarkMarhsalLargeStruct_GoJson-16       	    1000	   1796631 ns/op	 313.03 MB/s	  274117 B/op	     106 allocs/op
BenchmarkMarhsalLargeStruct_Jsoniter-16     	    1000	    893555 ns/op	 629.41 MB/s	  486490 B/op	    1220 allocs/op
BenchmarkMarhsalLargeStruct_Easyjson-16     	    1000	    750665 ns/op	 749.21 MB/s	  366868 B/op	    3067 allocs/op
BenchmarkMarhsalLargeStruct_Sonic-16        	    1000	    195884 ns/op	2871.13 MB/s	  336710 B/op	     109 allocs/op
BenchmarkParseLargeMap_StdJson-16           	    1000	   6334496 ns/op	  88.78 MB/s	 2330350 B/op	   31756 allocs/op
BenchmarkParseLargeMap_GoJson-16            	    1000	 224372703 ns/op	   2.51 MB/s	 2659144 B/op	   38648 allocs/op
BenchmarkParseLargeMap_JsonIterator-16      	    1000	   5714802 ns/op	  98.41 MB/s	 2609167 B/op	   47203 allocs/op
BenchmarkParseLargeMap_Gjson-16             	    1000	   6508536 ns/op	  86.41 MB/s	 2568583 B/op	   13186 allocs/op
BenchmarkParseLargeMap_Simdjson-16          	    1000	   4251712 ns/op	 132.28 MB/s	 2632829 B/op	   29890 allocs/op
BenchmarkParseLargeMap_Fastjson-16          	    1000	   5966901 ns/op	  94.25 MB/s	 2232564 B/op	   32861 allocs/op
BenchmarkParseLargeMap_Sonic-16             	    1000	   2763576 ns/op	 203.51 MB/s	 2140303 B/op	   12133 allocs/op
BenchmarkBindLargeStruct_Stdjson-16         	    1000	   5575405 ns/op	 100.87 MB/s	  312637 B/op	    5880 allocs/op
BenchmarkBindLargeStruct_GoJson-16          	    1000	 112113959 ns/op	   5.02 MB/s	  639628 B/op	    2294 allocs/op
BenchmarkBindLargeStruct_Jsoniter-16        	    1000	   2916025 ns/op	 192.87 MB/s	  730169 B/op	   20939 allocs/op
BenchmarkBindLargeStruct_Easyjson-16        	    1000	   3140324 ns/op	 179.09 MB/s	  235821 B/op	    4338 allocs/op
BenchmarkBindLargeStruct_Sonic-16           	    1000	   1098470 ns/op	 511.99 MB/s	  193234 B/op	    1489 allocs/op
BenchmarkGetIntFromLarge_JsonIterator-16    	    1000	   2480708 ns/op	 226.71 MB/s	 1217005 B/op	   18604 allocs/op
BenchmarkGetIntFromLarge_Gjson-16           	    1000	     98282 ns/op	5722.42 MB/s	      74 B/op	       4 allocs/op
BenchmarkGetIntFromLarge_Fastjson-16        	    1000	    423637 ns/op	1327.57 MB/s	    5000 B/op	       4 allocs/op
BenchmarkGetIntFromLarge_Sonic-16           	    1000	    141302 ns/op	3980.18 MB/s	       0 B/op	       0 allocs/op

It seems your optimization works great in small json, but has some scalability problems. Welcome to use Sonic as your benchmark, too.

@goccy
Copy link
Contributor Author

goccy commented Jun 21, 2021

@AsterDY
Thank you for sharing the benchmark results. However, could you share me the benchmark code because the result alone cannot be evaluated correctly ?

@goccy
Copy link
Contributor Author

goccy commented Jun 21, 2021

Also, I created PR ( goccy/go-json#254 ) to add your library to the benchmark target

@AsterDY
Copy link
Collaborator

AsterDY commented Jun 22, 2021

@AsterDY
Thank you for sharing the benchmark results. However, could you share me the benchmark code because the result alone cannot be evaluated correctly ?

Actually we use the well-know big twitter json as our large data, you can just try it. As for medium, it is our practical business data which can't be open —— But its scale is close to the bench sample we use.

@AsterDY
Copy link
Collaborator

AsterDY commented Jun 22, 2021

Also, I created PR ( goccy/go-json#254 ) to add your library to the benchmark target

Add your lib for benchmarks too, cc https://github.com/bytedance/sonic/pull/23/files . It seems the 'Large' you defined is smaller than our 'Medium'.

@goccy
Copy link
Contributor Author

goccy commented Jun 22, 2021

Thank you for the replying.
I'd like to know why go-json's performance suddenly deteriorated in Medium and Large, but I don't need the data, so could you share the actual code that ran the benchmark ?
( I want to eliminate the suspicion that the benchmark code is wrong )

Also, thank you for adding to the your benchmark.

@AsterDY
Copy link
Collaborator

AsterDY commented Jun 22, 2021

Thank you for the replying.
I'd like to know why go-json's performance suddenly deteriorated in Medium and Large, but I don't need the data, so could you share the actual code that ran the benchmark ?
( I want to eliminate the suspicion that the benchmark code is wrong )

Also, thank you for adding to the your benchmark.

Later we will post our entire bench system filtering commercial data, which takes a little time. And I can tell you that it's all just 'box-open' use like decoder/decoder_test.go, maybe try run them by yourself at present? I think the key factor is the scale and schema of data

@goccy
Copy link
Contributor Author

goccy commented Jun 22, 2021

I mention this benchmark result. Looking at the repository, I couldn't find any code generating this benchmark result.

@PureWhiteWu
Copy link
Collaborator

I mention this benchmark result. Looking at the repository, I couldn't find any code generating this benchmark result.

#23

@goccy
Copy link
Contributor Author

goccy commented Jun 22, 2021

@PureWhiteWu
No. #23 isn't include BenchmarkMarhsalLargeStruct_XXX or BenchmarkEncodeLargeMap_XXX or BenchmarkBindLargeStruct_XXX etc ... .
If there is no code that is exactly the same as the code that outputs the above benchmark results, the reliability of the benchmark results will be lost, so please show that.

@PureWhiteWu
Copy link
Collaborator

@AsterDY please take a look

@AsterDY
Copy link
Collaborator

AsterDY commented Jun 22, 2021

@PureWhiteWu
No. #23 isn't include BenchmarkMarhsalLargeStruct_XXX or BenchmarkEncodeLargeMap_XXX or BenchmarkBindLargeStruct_XXX etc ... .
If there is no code that is exactly the same as the code that outputs the above benchmark results, the reliability of the benchmark results will be lost, so please show that.
@goccy
I've told you above benchmarks has commercial data and will be later posted. Why not see this? https://github.com/bytedance/sonic/pull/23/files#diff-b335630551682c19a781afebcf4d07bf978fb1f8ac04c6bf87428ed5106870f5R19 —— Try help with yourself

goos: darwin
goarch: amd64
pkg: github.com/bytedance/sonic/encoder
cpu: Intel(R) Core(TM) i9-9880H CPU @ 2.30GHz
BenchmarkEncoder_Generic_Sonic-16                         100000             24174 ns/op         539.22 MB/s       17757 B/op         42 allocs/op
BenchmarkEncoder_Generic_JsonIter-16                      100000             44613 ns/op         292.18 MB/s       13433 B/op         77 allocs/op
BenchmarkEncoder_Generic_GoJson-16                        100000             87898 ns/op         148.30 MB/s       13234 B/op         39 allocs/op
BenchmarkEncoder_Generic_StdLib-16                        100000            133512 ns/op          97.63 MB/s       48177 B/op        827 allocs/op
BenchmarkEncoder_Binding_Sonic-16                         100000              6058 ns/op        2151.73 MB/s       13481 B/op          4 allocs/op
BenchmarkEncoder_Binding_JsonIter-16                      100000             21223 ns/op         614.20 MB/s        9488 B/op          2 allocs/op
BenchmarkEncoder_Binding_GoJson-16                        100000             10186 ns/op        1279.74 MB/s        9480 B/op          1 allocs/op
BenchmarkEncoder_Binding_StdLib-16                        100000             17741 ns/op         734.75 MB/s        9479 B/op          1 allocs/op
BenchmarkDecoder_Generic_Sonic-16                         100000             53344 ns/op         244.36 MB/s       50158 B/op        313 allocs/op
BenchmarkDecoder_Generic_StdLib-16                        100000            141006 ns/op          92.44 MB/s       50898 B/op        772 allocs/op
BenchmarkDecoder_Generic_JsonIter-16                      100000            106386 ns/op         122.53 MB/s       55785 B/op       1068 allocs/op
BenchmarkDecoder_Generic_GoJson-16                        100000            107184 ns/op         121.61 MB/s       65678 B/op        944 allocs/op
BenchmarkDecoder_Binding_Sonic-16                         100000             30039 ns/op         433.94 MB/s       25259 B/op         34 allocs/op
BenchmarkDecoder_Binding_StdLib-16                        100000            131088 ns/op          99.44 MB/s       10560 B/op        207 allocs/op
BenchmarkDecoder_Binding_JsonIter-16                      100000             37988 ns/op         343.13 MB/s       14674 B/op        385 allocs/op
BenchmarkDecoder_Binding_GoJson-16                        100000             33741 ns/op         386.33 MB/s       22047 B/op         49 allocs/op

@PureWhiteWu
Copy link
Collaborator

Let's delete the benchmark results which used our internal commercial data. They are not reproducable in the open source community.

@goccy
Copy link
Contributor Author

goccy commented Jun 22, 2021

Again, I asked you to put the following like code here.

func NewLaurgePayload() {
	return struct{}{} // dummy type because actual type is internal data
}

func BenchmarkMarhsalLargeStruct_Stdjson(b *testing.B) {
	b.ReportAllocs()
	for i := 0; i < b.N; i++ {
		if _, err := json.Marshal(NewLargePayload()); err != nil {
			b.Fatal(err)
		}
	}
}

This is because the code in the above function may be wrong in the first place, resulting in unfair data. So if you want to show the result, you have to show the code to prove that the result is valid, but I'm not asking for the same data. It doesn't have to be reproducible, so I wanted to see the source code you used ( The logic of the benchmark itself ).

@AsterDY
Copy link
Collaborator

AsterDY commented Jun 22, 2021

Anyway. You are welcome to use data https://github.com/bytedance/sonic/blob/main/testdata/twitterescaped.json and https://github.com/bytedance/sonic/blob/main/decoder/testdata_test.go to write code in the way you like and submit another PR. But don't forget to compare generic-codec case(map[string]interface{}), which is more import usage for json, maybe (otherwise why not use Protobuf or Thrift)?

@AsterDY
Copy link
Collaborator

AsterDY commented Jun 22, 2021

@PureWhiteWu
No. #23 isn't include BenchmarkMarhsalLargeStruct_XXX or BenchmarkEncodeLargeMap_XXX or BenchmarkBindLargeStruct_XXX etc ... .
If there is no code that is exactly the same as the code that outputs the above benchmark results, the reliability of the benchmark results will be lost, so please show that.

package json_perf

import (
	"bytes"
	json "encoding/json"
	"fmt"
	"strconv"
	"strings"
	"testing"

	"xx.xx/middleware/json_perf/testdata"

	"xx.xx/chenzhuoyu/infra-kernels/jsonx"
	simplejson "github.com/bitly/go-simplejson"
	"github.com/buger/jsonparser"
	"github.com/bytedance/sonic"
	"github.com/bytedance/sonic/decoder"
	gojson "github.com/goccy/go-json"
	jsoniter "github.com/json-iterator/go"
	easyjson "github.com/mailru/easyjson"
	simdjson "github.com/minio/simdjson-go"
	"github.com/pquerna/ffjson/ffjson"
	"github.com/tidwall/gjson"
	"github.com/valyala/fastjson"
)

//---------------------------Encode-------------------------
func benchmarkEncodeStdJson(b *testing.B, data []byte) {
	var sbook interface{}
	if err := json.Unmarshal(data, &sbook); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Marshal(&sbook)
	}
}

func benchmarkEncodeGoJson(b *testing.B, data []byte) {
	var ibook interface{}
	if err := gojson.Unmarshal(data, &ibook); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Marshal(&ibook)
	}
}

func benchmarkEncodeJsonIterator(b *testing.B, data []byte) {
	var ibook interface{}
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	if err := jsoniter.Unmarshal(data, &ibook); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Marshal(&ibook)
	}
}

func benchmarkEncodeFfjson(b *testing.B, data []byte) {
	var fbook interface{}
	if err := ffjson.Unmarshal(data, &fbook); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		ffjson.Marshal(&fbook)
	}
}

// func benchmarkEncodeEasyjson(b *testing.B, data []byte) {
// 	var ebook interface{}
// 	if err := easyjson.Unmarshal(data, &ebook); err != nil {
// 		b.Fatal(err)
// 	}
// 	b.ReportAllocs()
// 	b.ResetTimer()
// 	for i := 0; i < b.N; i++ {
// 		easyjson.Marshal(&ebook)
// 	}
// }

func benchmarkEncodeJsonx(b *testing.B, data []byte) {
	var xbook interface{}
	if err := jsonx.Unmarshal(data, &xbook); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonx.Marshal(&xbook)
	}
}

func benchmarkEncodeSonic(b *testing.B, data []byte) {
	var xbook = map[string]interface{}{}
	if err := sonic.Unmarshal(data, &xbook); err != nil {
		b.Fatal(err)
	}
	if _, err := sonic.Marshal(&xbook); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		sonic.Marshal(&xbook)
	}
}

//---------------------------Marshal------------------------
func benchmarkMarshalStdJson(b *testing.B, book Book) {
	var sbook = book
	data, _ := json.Marshal(&sbook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Marshal(&sbook)
	}
}

func benchmarkMarshalGoJson(b *testing.B, book Book) {
	var ibook = book
	data, _ := gojson.Marshal(&ibook)
	b.SetBytes(int64(len(data)))
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Marshal(&ibook)
	}
}

func benchmarkMarshalJsonIterator(b *testing.B, book Book) {
	var ibook = book
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	data, _ := jsonIterator.Marshal(&ibook)
	b.SetBytes(int64(len(data)))
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Marshal(&ibook)
	}
}

func benchmarkMarshalFfjson(b *testing.B, book Book) {
	fbook := FBook{
		BookId:  book.BookId,
		BookIds: book.BookIds,
		Title:   book.Title,
		Titles:  book.Titles,
		Price:   book.Price,
		Prices:  book.Prices,
		Hot:     book.Hot,
		Hots:    book.Hots,
		Author: FAuthor{
			Name: book.Author.Name,
			Age:  book.Author.Age,
			Male: book.Author.Male,
		},
		Weights: book.Weights,
	}
	for _, a := range book.Authors {
		aa := FAuthor{
			Name: a.Name,
			Age:  a.Age,
			Male: a.Male,
		}
		fbook.Authors = append(fbook.Authors, aa)
	}
	data, _ := ffjson.Marshal(&fbook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		ffjson.Marshal(&fbook)
	}
}

func benchmarkMarshalEasyjson(b *testing.B, book Book) {
	ebook := EBook{
		BookId:  book.BookId,
		BookIds: book.BookIds,
		Title:   book.Title,
		Titles:  book.Titles,
		Price:   book.Price,
		Prices:  book.Prices,
		Hot:     book.Hot,
		Hots:    book.Hots,
		Author: EAuthor{
			Name: book.Author.Name,
			Age:  book.Author.Age,
			Male: book.Author.Male,
		},
		Weights: book.Weights,
	}
	for _, a := range book.Authors {
		aa := EAuthor{
			Name: a.Name,
			Age:  a.Age,
			Male: a.Male,
		}
		ebook.Authors = append(ebook.Authors, aa)
	}
	data, _ := easyjson.Marshal(&ebook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		easyjson.Marshal(&ebook)
	}
}

func benchmarkMarshalJsonx(b *testing.B, book Book) {
	var xbook = book
	data, _ := jsonx.Marshal(&xbook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonx.Marshal(&xbook)
	}
}

func benchmarkMiddleMarshalStdJson(b *testing.B, data []byte) {
	var sbook FeedRequest
	json.Unmarshal(data, &sbook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Marshal(&sbook)
	}
}

func benchmarkMiddleMarshalGoJson(b *testing.B, data []byte) {
	var ibook FeedRequest
	gojson.Unmarshal(data, &ibook)
	b.SetBytes(int64(len(data)))
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Marshal(&ibook)
	}
}

func benchmarkMiddleMarshalJsonIterator(b *testing.B, data []byte) {
	var ibook FeedRequest
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	jsonIterator.Unmarshal(data, &ibook)
	b.SetBytes(int64(len(data)))
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Marshal(&ibook)
	}
}

func benchmarkLargeMarshalStdJson(b *testing.B, data []byte) {
	var sbook testdata.TwitterStruct
	json.Unmarshal(data, &sbook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Marshal(&sbook)
	}
}

func benchmarkLargeMarshalGoJson(b *testing.B, data []byte) {
	var ibook testdata.TwitterStruct
	gojson.Unmarshal(data, &ibook)
	b.SetBytes(int64(len(data)))
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Marshal(&ibook)
	}
}

func benchmarkLargeMarshalJsonIterator(b *testing.B, data []byte) {
	var ibook testdata.TwitterStruct
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	jsonIterator.Unmarshal(data, &ibook)
	b.SetBytes(int64(len(data)))
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Marshal(&ibook)
	}
}

func benchmarkMiddleMarshalFfjson(b *testing.B, data []byte) {
	var fbook FFeedRequest
	ffjson.Unmarshal(data, &fbook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		ffjson.Marshal(&fbook)
	}
}

func benchmarkMiddleMarshalEasyjson(b *testing.B, data []byte) {
	var ebook EFeedRequest
	easyjson.Unmarshal(data, &ebook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		easyjson.Marshal(&ebook)
	}
}

func benchmarkLargeMarshalEasyjson(b *testing.B, data []byte) {
	var ebook testdata.ETwitterStruct
	easyjson.Unmarshal(data, &ebook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		easyjson.Marshal(&ebook)
	}
}

func benchmarkMiddleMarshalJsonx(b *testing.B, data []byte) {
	var xbook FeedRequest
	jsonx.Unmarshal(data, &xbook)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonx.Marshal(&xbook)
	}
}

func benchmarkSmallMarshalSonic(b *testing.B, book interface{}) {
	data, err := sonic.Marshal(book)
	if err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		sonic.Marshal(book)
	}
}

func benchmarkMiddleMarshalSonic(b *testing.B, data []byte) {
	var book FeedRequest
	err := sonic.Unmarshal(data, &book)
	if err != nil {
		b.Fatal(err)
	}
	_, err = sonic.Marshal(book)
	if err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		sonic.Marshal(book)
	}
}

func benchmarkLargeMarshalSonic(b *testing.B, data []byte) {
	var book testdata.TwitterStruct
	err := sonic.Unmarshal(data, &book)
	if err != nil {
		b.Fatal(err)
	}
	_, err = sonic.Marshal(book)
	if err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		sonic.Marshal(book)
	}
}

//---------------------------Bind------------------------

func benchmarkBindStdJson(b *testing.B, data []byte) {
	var book Book
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Unmarshal(data, &book)
	}
}

func benchmarkMiddleBindStdJson(b *testing.B, data []byte) {
	var book FeedRequest
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Unmarshal(data, &book)
	}
}

func benchmarkLargeBindStdJson(b *testing.B, data []byte) {
	var book testdata.TwitterStruct
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Unmarshal(data, &book)
	}
}

func benchmarkBindGoJson(b *testing.B, data []byte) {
	var book Book
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Unmarshal(data, &book)
	}
}

func benchmarkMiddleBindGoJson(b *testing.B, data []byte) {
	var book FeedRequest
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Unmarshal(data, &book)
	}
}

func benchmarkLargeBindGoJson(b *testing.B, data []byte) {
	var book testdata.TwitterStruct
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Unmarshal(data, &book)
	}
}

func benchmarkBindJsonIterator(b *testing.B, data []byte) {
	var book Book
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Unmarshal(data, &book)
	}
}

func benchmarkMiddleBindJsonIterator(b *testing.B, data []byte) {
	var book FeedRequest
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Unmarshal(data, &book)
	}
}

func benchmarkLargeBindJsonIterator(b *testing.B, data []byte) {
	var book testdata.TwitterStruct
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Unmarshal(data, &book)
	}
}

func benchmarkBindFfjson(b *testing.B, data []byte) {
	var book FBook
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		ffjson.Unmarshal(data, &book)
	}
}

func benchmarkBindEasyjson(b *testing.B, data []byte) {
	var book EBook
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		easyjson.Unmarshal(data, &book)
	}
}

func benchmarkMiddleBindEasyjson(b *testing.B, data []byte) {
	var book EFeedRequest
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		easyjson.Unmarshal(data, &book)
	}
}

func benchmarkLargeBindEasyjson(b *testing.B, data []byte) {
	var book testdata.ETwitterStruct
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		easyjson.Unmarshal(data, &book)
	}
}

func benchmarkBindJsonx(b *testing.B, data []byte) {
	var book Book
	//var data = []byte(`{"id":12125925,"ids":[-2147483648,2147483647],"title":"future","titles":["hello","world"],"price":40.8,"prices":[-0.1,0.1],"hot":true,"hots":[true,true,true],"author":{"name":"json","age":99,"male":true},"authors":[{"name":"json","age":99,"male":true},{"name":"json","age":99,"male":true},{"name":"json","age":99,"male":true}],"weights":[]}`)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonx.Unmarshal(data, &book)
	}
}

func benchmarkMiddleBindFfjson(b *testing.B, data []byte) {
	var book FFeedRequest
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		ffjson.Unmarshal(data, &book)
	}
}

func benchmarkMiddleBindJsonx(b *testing.B, data []byte) {
	var book FeedRequest
	//var data = []byte(`{"id":12125925,"ids":[-2147483648,2147483647],"title":"future","titles":["hello","world"],"price":40.8,"prices":[-0.1,0.1],"hot":true,"hots":[true,true,true],"author":{"name":"json","age":99,"male":true},"authors":[{"name":"json","age":99,"male":true},{"name":"json","age":99,"male":true},{"name":"json","age":99,"male":true}],"weights":[]}`)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonx.Unmarshal(data, &book)
	}
}

func benchmarkBindSimplejson(b *testing.B, data []byte) {
	var book Book
	j := simplejson.New()
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		j.UnmarshalJSON(data)
		book.BookId, _ = j.Get("id").Int()
		ids, _ := j.Get("ids").Array()
		for _, v := range ids {
			i := v.(json.Number)
			id, _ := i.Int64()
			book.BookIds = append(book.BookIds, int(id))
		}
		book.Title, _ = j.Get("title").String()
		titles, _ := j.Get("titles").Array()
		for _, v := range titles {
			i := v.(string)
			book.Titles = append(book.Titles, i)
		}
		book.Price, _ = j.Get("price").Float64()
		prices, _ := j.Get("prices").Array()
		for _, v := range prices {
			i := v.(json.Number)
			id, _ := i.Float64()
			book.Prices = append(book.Prices, id)
		}
		book.Hot, _ = j.Get("hot").Bool()
		hots, _ := j.Get("hots").Array()
		for _, v := range hots {
			i := v.(bool)
			book.Hots = append(book.Hots, i)
		}
		ab := j.Get("author").Interface().(map[string]interface{})
		book.Author = parseAuthorSimplejson(ab)
		abs := j.Get("authors").Interface().([]interface{})
		for _, v := range abs {
			i := v.(map[string]interface{})
			book.Authors = append(book.Authors, parseAuthorSimplejson(i))
		}
	}
}

func parseAuthorSimplejson(value map[string]interface{}) Author {
	var author Author
	author.Name, _ = value["name"].(string)
	i, _ := value["age"].(json.Number)
	age, _ := i.Int64()
	author.Age = int(age)
	author.Male, _ = value["male"].(bool)
	return author
}

func benchmarkBindJsonparser(b *testing.B, data []byte) {
	var book Book
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonparser.ObjectEach(data, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
			var err error
			switch string(key) {
			case "id":
				book.BookId, err = strconv.Atoi(string(value))
			case "ids":
				ids := strings.Split(string(value[1:len(value)-1]), ",")
				book.BookIds = book.BookIds[:0]
				for _, v := range ids {
					id, _ := strconv.Atoi(v)
					book.BookIds = append(book.BookIds, id)
				}
			case "title":
				book.Title = string(value)
			case "titles":
				book.Titles = strings.Split(string(value[1:len(value)-1]), ",")
			case "price":
				book.Price, err = strconv.ParseFloat(string(value), 64)
			case "prices":
				prices := strings.Split(string(value[1:len(value)-1]), ",")
				book.Prices = book.Prices[:0]
				for _, v := range prices {
					id, _ := strconv.ParseFloat(v, 64)
					book.Prices = append(book.Prices, id)
				}
			case "hot":
				book.Hot, err = strconv.ParseBool(string(value))
			case "hots":
				hots := strings.Split(string(value[1:len(value)-1]), ",")
				book.Hots = book.Hots[:0]
				for _, v := range hots {
					id, _ := strconv.ParseBool(v)
					book.Hots = append(book.Hots, id)
				}
			case "author":
				book.Author = parseAuthorJsonparser(value)
			case "authors":
				book.Authors = book.Authors[:0]
				jsonparser.ArrayEach(value, func(v []byte, dataType jsonparser.ValueType, offset int, err error) {
					book.Authors = append(book.Authors, parseAuthorJsonparser(v))
				})
			}
			return err
		})
	}
	// b.StopTimer()
	// fmt.Printf("result: %+v\n", book)
}

func parseAuthorJsonparser(value []byte) Author {
	var author Author
	jsonparser.ObjectEach(value, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
		var err error
		switch string(key) {
		case "name":
			author.Name = string(value)
		case "age":
			author.Age, err = strconv.Atoi(string(value))
		case "male":
			author.Male, err = strconv.ParseBool(string(value))
		}
		return err
	})
	return author
}

func benchmarkBindGjson(b *testing.B, data []byte) {
	var book Book
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gjson.ParseBytes(data).ForEach(func(key, value gjson.Result) bool {
			switch key.Str {
			case "id":
				book.BookId = int(value.Int())
			case "ids":
				ids := value.Array()
				book.BookIds = book.BookIds[:0]
				for _, v := range ids {
					id := int(v.Int())
					book.BookIds = append(book.BookIds, id)
				}
			case "title":
				book.Title = value.String()
			case "titles":
				titles := value.Array()
				book.Titles = book.Titles[:0]
				for _, v := range titles {
					book.Titles = append(book.Titles, v.String())
				}
			case "price":
				book.Price = value.Float()
			case "prices":
				prices := value.Array()
				book.Prices = book.Prices[:0]
				for _, v := range prices {
					book.Prices = append(book.Prices, v.Float())
				}
			case "hot":
				book.Hot = value.Bool()
			case "hots":
				hots := value.Array()
				book.Hots = book.Hots[:0]
				for _, v := range hots {
					book.Hots = append(book.Hots, v.Bool())
				}
			case "author":
				book.Author = parseAuthorGjson(value.Map())
			case "authors":
				authors := value.Array()
				for _, v := range authors {
					book.Authors = append(book.Authors, parseAuthorGjson(v.Map()))
				}
			}
			return true
		})
	}
}

func parseAuthorGjson(value map[string]gjson.Result) Author {
	var author Author
	author.Name = value["name"].String()
	author.Age = int(value["age"].Int())
	author.Male = value["male"].Bool()
	return author
}

func benchmarkBindSimdjson(b *testing.B, data []byte) {
	if !simdjson.SupportedCPU() {
		fmt.Println("the cpu doesn't support SIMD")
	}
	var book Book
	var pj, obj, subobj, tmp, subtmp, array = &simdjson.ParsedJson{}, &simdjson.Object{}, &simdjson.Object{}, &simdjson.Iter{}, &simdjson.Iter{}, &simdjson.Array{}
	var name, tt, ttt = "", simdjson.TypeNone, simdjson.TypeNone
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		pj, _ = simdjson.Parse(data, pj)
		*tmp = pj.Iter()
		tmp.Advance()
		_, tmp, _ = tmp.Root(tmp)
		obj, _ = tmp.Object(obj)
		name, tt, _ = obj.NextElement(tmp)
		for ; tt != simdjson.TypeNone; name, tt, _ = obj.NextElement(tmp) {
			switch name {
			case "id":
				id, _ := tmp.Int()
				book.BookId = int(id)
			case "ids":
				ids, _ := tmp.Array(array)
				book.BookIds = book.BookIds[:0]
				*tmp = ids.Iter()
				ttt = tmp.Advance()
				for ; ttt != simdjson.TypeNone; ttt = tmp.Advance() {
					id, _ := tmp.Int()
					book.BookIds = append(book.BookIds, int(id))
				}
			case "title":
				book.Title, _ = tmp.String()
			case "titles":
				titles, _ := tmp.Array(array)
				book.Titles = book.Titles[:0]
				*tmp = titles.Iter()
				ttt = tmp.Advance()
				for ; ttt != simdjson.TypeNone; ttt = tmp.Advance() {
					id, _ := tmp.String()
					book.Titles = append(book.Titles, id)
				}
			case "price":
				book.Price, _ = tmp.Float()
			case "prices":
				prices, _ := tmp.Array(array)
				book.Prices = book.Prices[:0]
				*tmp = prices.Iter()
				ttt = tmp.Advance()
				for ; ttt != simdjson.TypeNone; ttt = tmp.Advance() {
					id, _ := tmp.Float()
					book.Prices = append(book.Prices, id)
				}
			case "hot":
				book.Hot, _ = tmp.Bool()
			case "hots":
				hots, _ := tmp.Array(array)
				book.Hots = book.Hots[:0]
				*tmp = hots.Iter()
				ttt = tmp.Advance()
				for ; ttt != simdjson.TypeNone; ttt = tmp.Advance() {
					id, _ := tmp.Bool()
					book.Hots = append(book.Hots, id)
				}
			case "author":
				subobj, _ = tmp.Object(subobj)
				book.Author = parseAuthorSimdjson(subobj, subtmp)
			case "authors":
				authors, _ := tmp.Array(array)
				book.Authors = book.Authors[:0]
				*tmp = authors.Iter()
				ttt = tmp.Advance()
				for ; ttt != simdjson.TypeNone; ttt = tmp.Advance() {
					subobj, _ = tmp.Object(subobj)
					book.Authors = append(book.Authors, parseAuthorSimdjson(subobj, subtmp))
				}
			}
		}
	}
}

func parseAuthorSimdjson(value *simdjson.Object, tmp *simdjson.Iter) Author {
	var author Author
	name, tt, _ := value.NextElement(tmp)
	for ; tt != simdjson.TypeNone; name, tt, _ = value.NextElement(tmp) {
		switch name {
		case "name":
			author.Name, _ = tmp.String()
		case "age":
			age, _ := tmp.Int()
			author.Age = int(age)
		case "male":
			author.Male, _ = tmp.Bool()
		}
	}
	return author
}

func benchmarkBindFastjson(b *testing.B, data []byte) {
	var p fastjson.Parser
	var book Book
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		v, _ := p.ParseBytes(data)
		v.Object()
		root, _ := v.Object()
		root.Visit(func(key []byte, value *fastjson.Value) {
			switch string(key) {
			case "id":
				book.BookId, _ = value.Int()
			case "ids":
				ids, _ := value.Array()
				book.BookIds = book.BookIds[:0]
				for _, v := range ids {
					id, _ := v.Int()
					book.BookIds = append(book.BookIds, id)
				}
			case "title":
				book.Title = value.String()
			case "titles":
				titles, _ := value.Array()
				book.Titles = book.Titles[:0]
				for _, v := range titles {
					book.Titles = append(book.Titles, v.String())
				}
			case "price":
				book.Price, _ = value.Float64()
			case "prices":
				prices, _ := value.Array()
				book.Prices = book.Prices[:0]
				for _, v := range prices {
					p, _ := v.Float64()
					book.Prices = append(book.Prices, p)
				}
			case "hot":
				book.Hot, _ = value.Bool()
			case "hots":
				hots, _ := value.Array()
				book.Hots = book.Hots[:0]
				for _, v := range hots {
					h, _ := v.Bool()
					book.Hots = append(book.Hots, h)
				}
			case "author":
				oj, _ := value.Object()
				book.Author = parseAuthorFastjson(oj)
			case "authors":
				authors, _ := value.Array()
				for _, v := range authors {
					oj, _ := v.Object()
					book.Authors = append(book.Authors, parseAuthorFastjson(oj))
				}
			}
		})
	}
}

func parseAuthorFastjson(value *fastjson.Object) Author {
	var author Author
	author.Name = value.Get("name").String()
	author.Age, _ = value.Get("age").Int()
	author.Male, _ = value.Get("male").Bool()
	return author
}

func benchmarkBindSonic(b *testing.B, data []byte) {
	var book Book
	if err := sonic.Unmarshal(data, &book); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		sonic.Unmarshal(data, &book)
	}
}

func benchmarkMiddleBindSonic(b *testing.B, data []byte) {
	var book FeedRequest
	if err := sonic.Unmarshal(data, &book); err != nil {
		b.Fatal(err)
	}
	var str = string(data)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		decoder.NewDecoder(str).Decode(&book)
	}
}

func benchmarkLargeBindSonic(b *testing.B, data []byte) {
	var book testdata.TwitterStruct
	if err := sonic.Unmarshal(data, &book); err != nil {
		b.Fatal(err)
	}
	var str = string(data)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		decoder.NewDecoder(str).Decode(&book)
	}
}

//-------------------------Parse-----------------------------

func benchmarkParseStdJson(b *testing.B, data []byte) {
	var book interface{}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		json.Unmarshal(data, &book)
	}
}

func benchmarkParseGoJson(b *testing.B, data []byte) {
	var book interface{}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gojson.Unmarshal(data, &book)
	}
}

func benchmarkParseJsonIterator(b *testing.B, data []byte) {
	var book interface{}
	var jsonIterator = jsoniter.ConfigCompatibleWithStandardLibrary
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonIterator.Unmarshal(data, &book)
	}
}

func benchmarkParseFfjson(b *testing.B, data []byte) {
	var book interface{}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		ffjson.Unmarshal(data, &book)
	}
}

// func benchmarkParseEasyjson(b *testing.B, data []byte) {
// 	var book interface{}
// 	b.ResetTimer()
// 	for i := 0; i < b.N; i++ {
// 		easyjson.Unmarshal(data, &book)
// 	}
// }

func benchmarkParseJsonx(b *testing.B, data []byte) {
	var book interface{}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonx.Unmarshal(data, &book)
	}
}

func benchmarkParseSimplejson(b *testing.B, data []byte) {
	j := simplejson.New()
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		j.UnmarshalJSON(data)
		j.Interface()
	}
}

func benchmarkParseJsonparser(b *testing.B, data []byte) {
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		var cont = map[string]interface{}{}
		jsonparser.ObjectEach(data, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
			cont[string(key)], _ = dfsObjectJsonparser(key, value, dataType, offset)
			return nil
		})
	}
}

func dfsObjectJsonparser(key []byte, value []byte, dataType jsonparser.ValueType, offset int) (interface{}, error) {
	if dataType == jsonparser.Array {
		next := make([]interface{}, 0)
		jsonparser.ArrayEach(value, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
			v, err := dfsObjectJsonparser(nil, value, dataType, offset)
			if err != nil {
				return
			}
			next = append(next, v)
		})
		return next, nil
	} else if dataType == jsonparser.Object {
		next := make(map[string]interface{})
		jsonparser.ObjectEach(value, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
			v, err := dfsObjectJsonparser(key, value, dataType, offset)
			if err != nil {
				return err
			}
			next[string(key)] = v
			return nil
		})
		return next, nil
	}
	switch dataType {
	case jsonparser.Boolean:
		return strconv.ParseBool(string(value))
	case jsonparser.String:
		return string(value), nil
	case jsonparser.Number:
		return strconv.ParseFloat(string(value), 64)
	case jsonparser.Null:
		return nil, nil
	}
	return value, nil
}

func benchmarkParseGjson(b *testing.B, data []byte) {
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		var con = map[string]interface{}{}
		gjson.ParseBytes(data).ForEach(func(key, value gjson.Result) bool {
			var ok bool
			con[key.Str], ok = dfsGjson(key, value)
			return ok
		})
	}
}

func dfsGjson(k, v gjson.Result) (interface{}, bool) {
	if v.IsArray() {
		next := []interface{}{}
		v.ForEach(func(key, value gjson.Result) bool {
			v, ok := dfsGjson(key, value)
			next = append(next, v)
			return ok
		})
		return next, true
	} else if v.IsObject() {
		next := map[string]interface{}{}
		v.ForEach(func(key, value gjson.Result) bool {
			var ok bool
			next[key.Str], ok = dfsGjson(key, value)
			return ok
		})
		return next, true
	}
	switch v.Type {
	case gjson.False, gjson.True:
		return v.Bool(), true
	case gjson.String:
		return v.String(), true
	case gjson.Number:
		return v.Float(), true
	case gjson.Null:
		return nil, true
	}
	return v.Raw, true
}

func benchmarkParseSimdjson(b *testing.B, data []byte) {
	if !simdjson.SupportedCPU() {
		b.Fatalf("the cpu doesn't support SIMD")

	}
	var tmp = &simdjson.Iter{}
	var obj = &simdjson.Object{}
	var m = map[string]interface{}{}
	var pj = &simdjson.ParsedJson{}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		pj, _ = simdjson.Parse(data, pj)
		*tmp = pj.Iter()
		tmp.Advance()
		_, tmp, _ = tmp.Root(tmp)
		obj, _ = tmp.Object(obj)
		obj.Map(m)
		//b.Log(m)
		// for ; tt != simdjson.TypeNone; tt = tmp.Advance() {
		// 	if tt == simdjson.TypeArray {
		// 		array, _ = tmp.Array(array)

		// 	}
		// }
	}
}

// func dfsSimdjson(v *simdjson.Iter) {
// 	if v.Type() == simdjson.TypeArray {
// 		array, _ := v.Array(&simdjson.Array{})
// 		nt := array.Iter()
// 		for ttt := nt.Advance(); ttt != simdjson.TypeNone; ttt = nt.Advance() {
// 			dfsSimdjson(&nt)
// 		}
// 	} else if v.Type() == simdjson.TypeObject {
// 		obj, _ := v.Object(&simdjson.Object{})
// 		nt := obj.Map()
// 	}
// }

func benchmarkParseFastjson(b *testing.B, data []byte) {
	var p fastjson.Parser
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		v, _ := p.ParseBytes(data)
		o, _ := v.Object()
		var con = map[string]interface{}{}
		o.Visit(func(key []byte, v *fastjson.Value) {
			con[string(key)] = dfsFastJson(key, v)
		})
	}
}

func dfsFastJson(key []byte, v *fastjson.Value) interface{} {
	if v.Type() == fastjson.TypeArray {
		var next = []interface{}{}
		ars, _ := v.Array()
		for _, a := range ars {
			next = append(next, dfsFastJson(nil, a))
		}
		return next
	} else if v.Type() == fastjson.TypeObject {
		var next = map[string]interface{}{}
		obj, _ := v.Object()
		obj.Visit(func(key []byte, v *fastjson.Value) {
			next[string(key)] = dfsFastJson(key, v)
		})
		return next
	}
	switch v.Type() {
	case fastjson.TypeFalse, fastjson.TypeTrue:
		b, _ := v.Bool()
		return b
	case fastjson.TypeString:
		return v.String()
	case fastjson.TypeNumber:
		f, _ := v.Float64()
		return f
	case fastjson.TypeNull:
		return nil
	}
	return v.MarshalTo([]byte{})
}

func benchmarkParseSonic(b *testing.B, data []byte) {
	var book map[string]interface{}
	if err := sonic.Unmarshal(data, &book); err != nil {
		b.Fatal(err)
	}
	var str = string(data)
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		decoder.NewDecoder(str).Decode(&book)
	}
}

//----------------------------GetInt--------------------------------
func benchmarkGetIntJsonIterator(b *testing.B, data []byte, k1 string, id2 int, k3 string) {
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsoniter.Get(data, k1, id2, k3).ToInt()
		// b.Log(i)
	}
}

func benchmarkGetIntJsonparser(b *testing.B, data []byte, k1 string, id2 int, k3 string) {
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		jsonparser.ObjectEach(data, func(key []byte, value []byte, dataType jsonparser.ValueType, offset int) error {
			if bytes.Equal(key, []byte(k1)) {
				jsonparser.ArrayEach(value, func(value []byte, dataType jsonparser.ValueType, offset int, err error) {
					if offset == id2 {
						jsonparser.ObjectEach(value, func(k []byte, v []byte, dt jsonparser.ValueType, off int) error {
							var err error
							if bytes.Equal(k, []byte(k3)) {
								_, err = strconv.Atoi(string(v))
								// b.Log(i)
							}
							return err
						})
					}
				})
			}
			return nil
		})
	}
}

func benchmarkGetIntGjson(b *testing.B, data []byte, k1 string, id2 int, k3 string) {
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		gjson.GetBytes(data, fmt.Sprintf("%s.%d.%s", k1, id2, k3)).Int()
		//b.Log(i)
	}
}

func benchmarkGetIntFastjson(b *testing.B, data []byte, k1 string, id2 int, k3 string) {
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		fastjson.GetInt(data, k1, strconv.Itoa(id2), k3)
	}
}

func benchmarkGetIntSonic(b *testing.B, data []byte, k1 string, id2 int, k3 string) {
	vd := string(data)
	if _, err := sonic.GetFromString(vd, k1, id2, k3); err != nil {
		b.Fatal(err)
	}
	b.SetBytes(int64(len(data)))
	b.ReportAllocs()
	b.ResetTimer()
	for i := 0; i < b.N; i++ {
		n, _ := sonic.GetFromString(vd, k1, id2, k3)
		n.Int64()
	}
}

This is basicly the code we used for general usage test. Take a little time to move on this repo

@AsterDY AsterDY closed this as completed Jun 22, 2021
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

3 participants