From 042eea873536b4071817cf580fe830ca9d762847 Mon Sep 17 00:00:00 2001 From: masv3971 Date: Thu, 5 Sep 2024 10:57:21 +0200 Subject: [PATCH 01/25] First blood. --- docs/flowshart.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 docs/flowshart.md diff --git a/docs/flowshart.md b/docs/flowshart.md new file mode 100644 index 00000000..02a51985 --- /dev/null +++ b/docs/flowshart.md @@ -0,0 +1,22 @@ +# Flowchart + +## Upload document to datastore + +```mermaid + sequenceDiagram; + authentic source->>datastore: POST /notification; + datastore->>authentic source: 200/400 ; +``` + + +## Fetch a credential + +```mermaid + sequenceDiagram; + wallet->>satosa; + satosa->>apigw; + apigw->>issuer; + issuer->>apigw; + apigw->>satosa; + satosa->>wallet; +``` From 610dfe193718d34a76f677c8e5ac30d491aa2a7f Mon Sep 17 00:00:00 2001 From: masv3971 Date: Thu, 5 Sep 2024 11:00:22 +0200 Subject: [PATCH 02/25] Add comment to flowchart. --- docs/flowshart.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/flowshart.md b/docs/flowshart.md index 02a51985..2cdb4724 100644 --- a/docs/flowshart.md +++ b/docs/flowshart.md @@ -13,10 +13,10 @@ ```mermaid sequenceDiagram; - wallet->>satosa; - satosa->>apigw; - apigw->>issuer; - issuer->>apigw; - apigw->>satosa; - satosa->>wallet; + wallet->>satosa: openID Federation; + satosa->>apigw: POST /credential; + apigw->>issuer: gRPC makeSDJWT(); + issuer->>apigw: Callback; + apigw->>satosa: Callback; + satosa->>wallet openID Federation; ``` From bba8b5c7b4e1665730398780ae497ae4e6bd6089 Mon Sep 17 00:00:00 2001 From: masv3971 Date: Thu, 5 Sep 2024 11:01:36 +0200 Subject: [PATCH 03/25] Fix formatting. --- docs/flowshart.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/flowshart.md b/docs/flowshart.md index 2cdb4724..c8df0314 100644 --- a/docs/flowshart.md +++ b/docs/flowshart.md @@ -18,5 +18,5 @@ apigw->>issuer: gRPC makeSDJWT(); issuer->>apigw: Callback; apigw->>satosa: Callback; - satosa->>wallet openID Federation; + satosa->>wallet: openID Federation; ``` From 930b85e04a8250bd6d0e179048862dc8e3ff9289 Mon Sep 17 00:00:00 2001 From: masv3971 Date: Thu, 5 Sep 2024 13:11:45 +0200 Subject: [PATCH 04/25] New vendor, go.* and flowchart. --- docs/flowshart.md | 16 +- go.mod | 88 +- go.sum | 208 +- internal/apigw/httpserver/service.go | 2 +- vendor/github.com/bytedance/sonic/.gitignore | 4 +- vendor/github.com/bytedance/sonic/README.md | 24 +- .../bytedance/sonic/README_ZH_CN.md | 24 +- vendor/github.com/bytedance/sonic/api.go | 28 + vendor/github.com/bytedance/sonic/ast/api.go | 6 +- .../bytedance/sonic/ast/api_compat.go | 4 +- .../github.com/bytedance/sonic/ast/buffer.go | 89 +- .../github.com/bytedance/sonic/ast/decode.go | 11 +- .../github.com/bytedance/sonic/ast/encode.go | 55 +- .../github.com/bytedance/sonic/ast/error.go | 6 +- .../bytedance/sonic/ast/iterator.go | 21 +- vendor/github.com/bytedance/sonic/ast/node.go | 303 +- .../github.com/bytedance/sonic/ast/parser.go | 142 +- .../github.com/bytedance/sonic/ast/search.go | 31 +- .../github.com/bytedance/sonic/ast/stubs.go | 142 + .../bytedance/sonic/ast/stubs_go115.go | 55 - .../bytedance/sonic/ast/stubs_go120.go | 55 - .../github.com/bytedance/sonic/ast/visitor.go | 45 +- vendor/github.com/bytedance/sonic/compat.go | 4 +- .../bytedance/sonic/decoder/decoder_compat.go | 4 +- .../{decoder_amd64.go => decoder_native.go} | 36 +- .../bytedance/sonic/encoder/encoder_compat.go | 4 +- .../{encoder_amd64.go => encoder_native.go} | 5 +- vendor/github.com/bytedance/sonic/go.work.sum | 1 + .../sonic/internal/base64/b64_amd64.go | 46 + .../sonic/internal/base64/b64_compat.go | 44 + .../bytedance/sonic/internal/cpu/features.go | 4 +- .../internal/decoder/{ => api}/decoder.go | 135 +- .../internal/decoder/api/decoder_amd64.go | 38 + .../api/decoder_arm64.go} | 26 +- .../internal/decoder/{ => api}/stream.go | 27 +- .../internal/decoder/asm_stubs_amd64_go116.go | 130 - .../decoder/assembler_stkabi_amd64.go | 1950 -- .../sonic/internal/decoder/consts/option.go | 33 + .../internal/decoder/{ => errors}/errors.go | 16 +- .../internal/decoder/generic_stkabi_amd64.go | 733 - .../decoder/generic_stkabi_amd64_test.s | 37 - .../sonic/internal/decoder/{ => jitdec}/asm.s | 0 .../{ => jitdec}/asm_stubs_amd64_go117.go | 2 +- .../{ => jitdec}/asm_stubs_amd64_go121.go | 4 +- .../{ => jitdec}/assembler_regabi_amd64.go | 41 +- .../internal/decoder/{ => jitdec}/compiler.go | 79 +- .../internal/decoder/{ => jitdec}/debug.go | 2 +- .../sonic/internal/decoder/jitdec/decoder.go | 139 + .../{ => jitdec}/generic_regabi_amd64.go | 4 +- .../{ => jitdec}/generic_regabi_amd64_test.s | 2 +- .../internal/decoder/{ => jitdec}/pools.go | 4 +- .../decoder/{ => jitdec}/primitives.go | 9 +- .../decoder/{ => jitdec}/stubs_go116.go | 9 +- .../decoder/{ => jitdec}/stubs_go120.go | 7 +- .../internal/decoder/{ => jitdec}/types.go | 2 +- .../internal/decoder/{ => jitdec}/utils.go | 2 +- .../internal/decoder/optdec/compile_struct.go | 174 + .../sonic/internal/decoder/optdec/compiler.go | 449 + .../sonic/internal/decoder/optdec/const.go | 60 + .../sonic/internal/decoder/optdec/context.go | 3 + .../sonic/internal/decoder/optdec/decoder.go | 160 + .../sonic/internal/decoder/optdec/errors.go | 73 + .../sonic/internal/decoder/optdec/functor.go | 281 + .../sonic/internal/decoder/optdec/helper.go | 101 + .../internal/decoder/optdec/interface.go | 169 + .../sonic/internal/decoder/optdec/map.go | 430 + .../sonic/internal/decoder/optdec/native.go | 269 + .../sonic/internal/decoder/optdec/node.go | 1279 ++ .../sonic/internal/decoder/optdec/slice.go | 224 + .../internal/decoder/optdec/stringopts.go | 360 + .../sonic/internal/decoder/optdec/structs.go | 61 + .../sonic/internal/decoder/optdec/types.go | 60 + .../internal/encoder/{ => alg}/mapiter.go | 89 +- .../sonic/internal/encoder/alg/opts.go | 31 + .../sonic/internal/encoder/alg/primitives.go | 95 + .../sonic/internal/encoder/{ => alg}/sort.go | 2 +- .../sonic/internal/encoder/alg/spec.go | 198 + .../sonic/internal/encoder/alg/spec_compat.go | 148 + .../internal/encoder/asm_stubs_amd64_go116.go | 51 - .../encoder/assembler_regabi_amd64.go | 1176 -- .../encoder/assembler_stkabi_amd64.go | 1175 -- .../sonic/internal/encoder/compiler.go | 1517 +- .../sonic/internal/encoder/debug_go117.go | 205 - .../sonic/internal/encoder/encode_norace.go | 24 + .../sonic/internal/encoder/encode_race.go | 54 + .../sonic/internal/encoder/encoder.go | 171 +- .../bytedance/sonic/internal/encoder/ir/op.go | 473 + .../bytedance/sonic/internal/encoder/pools.go | 193 - .../sonic/internal/encoder/pools_amd64.go | 97 + .../sonic/internal/encoder/pools_compt.go | 24 + .../sonic/internal/encoder/primitives.go | 167 - .../sonic/internal/encoder/stream.go | 28 +- .../sonic/internal/encoder/stubs_go116.go | 61 - .../sonic/internal/encoder/stubs_go117.go | 62 - .../sonic/internal/encoder/stubs_go120.go | 62 - .../sonic/internal/encoder/stubs_go121.go | 62 - .../bytedance/sonic/internal/encoder/utils.go | 52 - .../sonic/internal/encoder/vars/cache.go | 48 + .../sonic/internal/encoder/vars/const.go | 42 + .../internal/encoder/{ => vars}/errors.go | 20 +- .../sonic/internal/encoder/vars/stack.go | 146 + .../internal/encoder/{ => vars}/types.go | 20 +- .../sonic/internal/encoder/vm/stbus.go | 45 + .../bytedance/sonic/internal/encoder/vm/vm.go | 374 + .../{ => x86}/asm_stubs_amd64_go117.go | 20 +- .../{ => x86}/asm_stubs_amd64_go121.go | 22 +- .../encoder/x86/assembler_regabi_amd64.go | 1194 ++ .../internal/encoder/{ => x86}/debug_go116.go | 4 +- .../sonic/internal/encoder/x86/debug_go117.go | 201 + .../sonic/internal/encoder/x86/stbus.go | 54 + .../bytedance/sonic/internal/envs/decode.go | 24 + .../bytedance/sonic/internal/jit/runtime.go | 2 +- .../sonic/internal/native/avx/f32toa_subr.go | 44 - .../internal/native/avx/f32toa_text_amd64.go | 964 - .../sonic/internal/native/avx/f64toa_subr.go | 46 - .../internal/native/avx/f64toa_text_amd64.go | 2402 --- .../sonic/internal/native/avx/get_by_path.go | 37 - .../internal/native/avx/get_by_path_subr.go | 45 - .../native/avx/get_by_path_text_amd64.go | 6049 ------ .../internal/native/avx/html_escape_subr.go | 45 - .../native/avx/html_escape_text_amd64.go | 620 - .../sonic/internal/native/avx/i64toa_subr.go | 47 - .../internal/native/avx/i64toa_text_amd64.go | 639 - .../sonic/internal/native/avx/lspace_subr.go | 37 - .../internal/native/avx/lspace_text_amd64.go | 44 - .../internal/native/avx/native_export.go | 47 - .../sonic/internal/native/avx/quote.go | 35 - .../sonic/internal/native/avx/quote_subr.go | 46 - .../internal/native/avx/quote_text_amd64.go | 1085 -- .../sonic/internal/native/avx/skip_array.go | 37 - .../internal/native/avx/skip_array_subr.go | 46 - .../native/avx/skip_array_text_amd64.go | 2854 --- .../sonic/internal/native/avx/skip_number.go | 36 - .../internal/native/avx/skip_number_subr.go | 46 - .../native/avx/skip_number_text_amd64.go | 394 - .../sonic/internal/native/avx/skip_object.go | 37 - .../internal/native/avx/skip_object_subr.go | 46 - .../native/avx/skip_object_text_amd64.go | 2854 --- .../sonic/internal/native/avx/skip_one.go | 37 - .../internal/native/avx/skip_one_fast_subr.go | 45 - .../native/avx/skip_one_fast_text_amd64.go | 945 - .../internal/native/avx/skip_one_subr.go | 46 - .../native/avx/skip_one_text_amd64.go | 2830 --- .../sonic/internal/native/avx/u64toa_subr.go | 39 - .../internal/native/avx/u64toa_text_amd64.go | 371 - .../sonic/internal/native/avx/unquote.go | 36 - .../sonic/internal/native/avx/unquote_subr.go | 46 - .../internal/native/avx/unquote_text_amd64.go | 639 - .../sonic/internal/native/avx/validate_one.go | 37 - .../internal/native/avx/validate_one_subr.go | 46 - .../native/avx/validate_one_text_amd64.go | 2851 --- .../internal/native/avx/validate_utf8.go | 39 - .../internal/native/avx/validate_utf8_fast.go | 36 - .../native/avx/validate_utf8_fast_subr.go | 41 - .../avx/validate_utf8_fast_text_amd64.go | 156 - .../internal/native/avx/validate_utf8_subr.go | 44 - .../native/avx/validate_utf8_text_amd64.go | 191 - .../sonic/internal/native/avx/value.go | 33 - .../sonic/internal/native/avx/value_subr.go | 46 - .../internal/native/avx/value_text_amd64.go | 5639 ------ .../sonic/internal/native/avx/vnumber.go | 33 - .../sonic/internal/native/avx/vnumber_subr.go | 46 - .../internal/native/avx/vnumber_text_amd64.go | 4387 ----- .../sonic/internal/native/avx/vsigned.go | 33 - .../sonic/internal/native/avx/vsigned_subr.go | 50 - .../internal/native/avx/vsigned_text_amd64.go | 112 - .../sonic/internal/native/avx/vstring.go | 33 - .../sonic/internal/native/avx/vstring_subr.go | 46 - .../internal/native/avx/vstring_text_amd64.go | 643 - .../sonic/internal/native/avx/vunsigned.go | 33 - .../internal/native/avx/vunsigned_subr.go | 43 - .../native/avx/vunsigned_text_amd64.go | 108 - .../sonic/internal/native/avx2/f32toa.go | 7 +- .../sonic/internal/native/avx2/f32toa_subr.go | 36 +- .../internal/native/avx2/f32toa_text_amd64.go | 2006 +- .../sonic/internal/native/avx2/f64toa.go | 2 - .../sonic/internal/native/avx2/f64toa_subr.go | 38 +- .../internal/native/avx2/f64toa_text_amd64.go | 4876 ++--- .../sonic/internal/native/avx2/get_by_path.go | 2 - .../internal/native/avx2/get_by_path_subr.go | 35 +- .../native/avx2/get_by_path_text_amd64.go | 12637 +++++++------ .../sonic/internal/native/avx2/html_escape.go | 2 - .../internal/native/avx2/html_escape_subr.go | 32 +- .../native/avx2/html_escape_text_amd64.go | 1063 +- .../sonic/internal/native/avx2/i64toa.go | 2 - .../sonic/internal/native/avx2/i64toa_subr.go | 36 +- .../internal/native/avx2/i64toa_text_amd64.go | 1154 +- .../i64toa.go => avx2/lookup_small_key.go} | 10 +- .../native/avx2/lookup_small_key_subr.go | 45 + .../avx2/lookup_small_key_text_amd64.go | 218 + .../sonic/internal/native/avx2/lspace.go | 3 +- .../sonic/internal/native/avx2/lspace_subr.go | 17 +- .../internal/native/avx2/lspace_text_amd64.go | 130 +- .../internal/native/avx2/native_export.go | 6 +- .../lspace.go => avx2/parse_with_padding.go} | 10 +- .../native/avx2/parse_with_padding_subr.go | 46 + .../avx2/parse_with_padding_text_amd64.go | 15205 ++++++++++++++++ .../sonic/internal/native/avx2/quote.go | 2 - .../sonic/internal/native/avx2/quote_subr.go | 36 +- .../internal/native/avx2/quote_text_amd64.go | 1730 +- .../sonic/internal/native/avx2/skip_array.go | 2 - .../internal/native/avx2/skip_array_subr.go | 38 +- .../native/avx2/skip_array_text_amd64.go | 5980 +++--- .../sonic/internal/native/avx2/skip_number.go | 2 - .../internal/native/avx2/skip_number_subr.go | 34 +- .../native/avx2/skip_number_text_amd64.go | 915 +- .../sonic/internal/native/avx2/skip_object.go | 2 - .../internal/native/avx2/skip_object_subr.go | 38 +- .../native/avx2/skip_object_text_amd64.go | 5980 +++--- .../sonic/internal/native/avx2/skip_one.go | 2 - .../internal/native/avx2/skip_one_fast.go | 2 - .../native/avx2/skip_one_fast_subr.go | 33 +- .../native/avx2/skip_one_fast_text_amd64.go | 1672 +- .../internal/native/avx2/skip_one_subr.go | 38 +- .../native/avx2/skip_one_text_amd64.go | 6094 +++---- .../sonic/internal/native/avx2/u64toa.go | 2 - .../sonic/internal/native/avx2/u64toa_subr.go | 20 +- .../internal/native/avx2/u64toa_text_amd64.go | 621 +- .../sonic/internal/native/avx2/unquote.go | 2 - .../internal/native/avx2/unquote_subr.go | 36 +- .../native/avx2/unquote_text_amd64.go | 1246 +- .../internal/native/avx2/validate_one.go | 2 - .../internal/native/avx2/validate_one_subr.go | 38 +- .../native/avx2/validate_one_text_amd64.go | 6105 +++---- .../internal/native/avx2/validate_utf8.go | 2 - .../native/avx2/validate_utf8_fast.go | 2 - .../native/avx2/validate_utf8_fast_subr.go | 24 +- .../avx2/validate_utf8_fast_text_amd64.go | 852 +- .../native/avx2/validate_utf8_subr.go | 30 +- .../native/avx2/validate_utf8_text_amd64.go | 220 +- .../sonic/internal/native/avx2/value_subr.go | 38 +- .../internal/native/avx2/value_text_amd64.go | 11152 ++++++------ .../internal/native/avx2/vnumber_subr.go | 35 +- .../native/avx2/vnumber_text_amd64.go | 8521 +++++---- .../internal/native/avx2/vsigned_subr.go | 42 +- .../native/avx2/vsigned_text_amd64.go | 194 +- .../internal/native/avx2/vstring_subr.go | 36 +- .../native/avx2/vstring_text_amd64.go | 1040 +- .../internal/native/avx2/vunsigned_subr.go | 44 +- .../native/avx2/vunsigned_text_amd64.go | 211 +- .../sonic/internal/native/dispatch_amd64.go | 75 +- .../sonic/internal/native/dispatch_arm64.go | 15 + .../sonic/internal/native/f32toa.tmpl | 7 +- .../sonic/internal/native/f64toa.tmpl | 2 - .../sonic/internal/native/fastfloat_test.tmpl | 2 - .../sonic/internal/native/fastint_test.tmpl | 2 - .../sonic/internal/native/get_by_path.tmpl | 2 - .../sonic/internal/native/html_escape.tmpl | 2 - .../sonic/internal/native/i64toa.tmpl | 2 - .../html_escape.go => lookup_small_key.tmpl} | 11 +- .../sonic/internal/native/lspace.tmpl | 3 +- .../sonic/internal/native/native_export.tmpl | 6 +- .../sonic/internal/native/native_test.tmpl | 4 +- .../internal/native/neon/f32toa_arm64.go | 2 - .../sonic/internal/native/neon/f32toa_arm64.s | 2 +- .../internal/native/neon/f64toa_arm64.go | 2 - .../sonic/internal/native/neon/f64toa_arm64.s | 2 +- .../internal/native/neon/get_by_path_arm64.go | 2 - .../internal/native/neon/html_escape_arm64.go | 2 - .../internal/native/neon/i64toa_arm64.go | 2 - .../native/neon/lookup_small_key_arm64.go | 31 + .../native/neon/lookup_small_key_arm64.s | 354 + .../neon/lookup_small_key_subr_arm64.go | 25 + .../internal/native/neon/lspace_arm64.go | 2 - .../native/neon/native_export_arm64.go | 2 + .../native/neon/parse_with_padding_arm64.go | 30 + .../native/neon/parse_with_padding_arm64.s | 14122 ++++++++++++++ .../neon/parse_with_padding_subr_arm64.go | 25 + .../sonic/internal/native/neon/quote_arm64.go | 2 - .../internal/native/neon/skip_array_arm64.go | 2 - .../internal/native/neon/skip_number_arm64.go | 2 - .../internal/native/neon/skip_object_arm64.go | 2 - .../internal/native/neon/skip_one_arm64.go | 2 - .../native/neon/skip_one_fast_arm64.go | 2 - .../internal/native/neon/u64toa_arm64.go | 2 - .../internal/native/neon/unquote_arm64.go | 2 - .../native/neon/validate_one_arm64.go | 2 - .../native/neon/validate_utf8_arm64.go | 2 - .../native/neon/validate_utf8_fast_arm64.go | 2 - .../f32toa.go => parse_with_padding.tmpl} | 10 +- .../sonic/internal/native/quote.tmpl | 2 - .../sonic/internal/native/recover_test.tmpl | 59 +- .../sonic/internal/native/skip_array.tmpl | 2 - .../sonic/internal/native/skip_number.tmpl | 2 - .../sonic/internal/native/skip_object.tmpl | 2 - .../sonic/internal/native/skip_one.tmpl | 2 - .../sonic/internal/native/skip_one_fast.tmpl | 2 - .../sonic/internal/native/sse/f32toa.go | 7 +- .../sonic/internal/native/sse/f32toa_subr.go | 34 +- .../internal/native/sse/f32toa_text_amd64.go | 1971 +- .../sonic/internal/native/sse/f64toa.go | 2 - .../sonic/internal/native/sse/f64toa_subr.go | 36 +- .../internal/native/sse/f64toa_text_amd64.go | 4829 ++--- .../sonic/internal/native/sse/get_by_path.go | 2 - .../internal/native/sse/get_by_path_subr.go | 36 +- .../native/sse/get_by_path_text_amd64.go | 12674 +++++++------ .../sonic/internal/native/sse/html_escape.go | 2 - .../internal/native/sse/html_escape_subr.go | 34 +- .../native/sse/html_escape_text_amd64.go | 714 +- .../sonic/internal/native/sse/i64toa.go | 2 - .../sonic/internal/native/sse/i64toa_subr.go | 36 +- .../internal/native/sse/i64toa_text_amd64.go | 1182 +- .../lookup_small_key.go} | 10 +- .../native/sse/lookup_small_key_subr.go | 45 + .../native/sse/lookup_small_key_text_amd64.go | 242 + .../sonic/internal/native/sse/lspace.go | 3 +- .../sonic/internal/native/sse/lspace_subr.go | 12 +- .../internal/native/sse/lspace_text_amd64.go | 57 +- .../internal/native/sse/native_export.go | 6 +- .../f64toa.go => sse/parse_with_padding.go} | 11 +- .../native/sse/parse_with_padding_subr.go | 46 + .../sse/parse_with_padding_text_amd64.go | 14926 +++++++++++++++ .../sonic/internal/native/sse/quote.go | 2 - .../sonic/internal/native/sse/quote_subr.go | 36 +- .../internal/native/sse/quote_text_amd64.go | 1132 +- .../sonic/internal/native/sse/skip_array.go | 2 - .../internal/native/sse/skip_array_subr.go | 36 +- .../native/sse/skip_array_text_amd64.go | 5898 +++--- .../sonic/internal/native/sse/skip_number.go | 2 - .../internal/native/sse/skip_number_subr.go | 34 +- .../native/sse/skip_number_text_amd64.go | 681 +- .../sonic/internal/native/sse/skip_object.go | 2 - .../internal/native/sse/skip_object_subr.go | 36 +- .../native/sse/skip_object_text_amd64.go | 5898 +++--- .../sonic/internal/native/sse/skip_one.go | 2 - .../internal/native/sse/skip_one_fast.go | 2 - .../internal/native/sse/skip_one_fast_subr.go | 34 +- .../native/sse/skip_one_fast_text_amd64.go | 2009 +- .../internal/native/sse/skip_one_subr.go | 36 +- .../native/sse/skip_one_text_amd64.go | 5869 +++--- .../sonic/internal/native/sse/u64toa.go | 2 - .../sonic/internal/native/sse/u64toa_subr.go | 20 +- .../internal/native/sse/u64toa_text_amd64.go | 635 +- .../sonic/internal/native/sse/unquote.go | 2 - .../sonic/internal/native/sse/unquote_subr.go | 36 +- .../internal/native/sse/unquote_text_amd64.go | 1168 +- .../sonic/internal/native/sse/validate_one.go | 2 - .../internal/native/sse/validate_one_subr.go | 36 +- .../native/sse/validate_one_text_amd64.go | 5901 +++--- .../internal/native/sse/validate_utf8.go | 2 - .../internal/native/sse/validate_utf8_fast.go | 2 - .../native/sse/validate_utf8_fast_subr.go | 22 +- .../sse/validate_utf8_fast_text_amd64.go | 128 +- .../internal/native/sse/validate_utf8_subr.go | 30 +- .../native/sse/validate_utf8_text_amd64.go | 220 +- .../sonic/internal/native/sse/value_subr.go | 38 +- .../internal/native/sse/value_text_amd64.go | 10899 ++++++----- .../sonic/internal/native/sse/vnumber_subr.go | 35 +- .../internal/native/sse/vnumber_text_amd64.go | 8171 ++++----- .../sonic/internal/native/sse/vsigned_subr.go | 42 +- .../internal/native/sse/vsigned_text_amd64.go | 204 +- .../sonic/internal/native/sse/vstring_subr.go | 36 +- .../internal/native/sse/vstring_text_amd64.go | 1272 +- .../internal/native/sse/vunsigned_subr.go | 44 +- .../native/sse/vunsigned_text_amd64.go | 197 +- .../internal/native/traceback_test.mock_tmpl | 379 + .../sonic/internal/native/types/types.go | 2 +- .../sonic/internal/native/u64toa.tmpl | 2 - .../sonic/internal/native/unquote.tmpl | 2 - .../sonic/internal/native/validate_one.tmpl | 2 - .../sonic/internal/native/validate_utf8.tmpl | 2 - .../internal/native/validate_utf8_fast.tmpl | 2 - .../internal/{encoder => optcaching}/asm.s | 0 .../sonic/internal/optcaching/fcache.go | 362 + .../bytedance/sonic/internal/rt/asm_amd64.s | 43 +- .../internal/rt/{asm_compat.s => asm_arm64.s} | 3 +- .../bytedance/sonic/internal/rt/assertI2I.go | 42 + .../sonic/internal/rt/base64_amd64.go | 20 + .../sonic/internal/rt/base64_compat.go | 20 + .../bytedance/sonic/internal/rt/fastconv.go | 175 + .../bytedance/sonic/internal/rt/fastmem.go | 36 +- .../bytedance/sonic/internal/rt/fastvalue.go | 236 +- .../bytedance/sonic/internal/rt/gcwb.go | 55 +- .../rt/gcwb_legacy.go} | 18 +- .../bytedance/sonic/internal/rt/growslice.go | 36 + .../rt/growslice_legacy.go} | 26 +- .../bytedance/sonic/internal/rt/pool.go | 31 + .../bytedance/sonic/internal/rt/stackmap.go | 2 +- .../bytedance/sonic/internal/rt/stubs.go | 165 + .../bytedance/sonic/internal/rt/table.go | 118 + .../bytedance/sonic/internal/rt/types.go | 45 + .../bytedance/sonic/loader/funcdata_compat.go | 4 +- .../bytedance/sonic/loader/funcdata_go116.go | 4 +- .../bytedance/sonic/loader/funcdata_go123.go | 118 + .../bytedance/sonic/loader/funcdata_latest.go | 4 +- .../bytedance/sonic/loader/loader_latest.go | 2 +- .../bytedance/sonic/option/option.go | 12 +- vendor/github.com/bytedance/sonic/sonic.go | 8 +- .../github.com/bytedance/sonic/utf8/utf8.go | 12 +- .../gabriel-vasile/mimetype/LICENSE | 2 +- .../gabriel-vasile/mimetype/README.md | 5 +- .../mimetype/internal/json/json.go | 27 +- .../mimetype/internal/magic/archive.go | 104 +- .../mimetype/internal/magic/magic.go | 5 +- .../mimetype/internal/magic/ms_office.go | 70 +- .../mimetype/internal/magic/text.go | 46 +- .../mimetype/internal/magic/text_csv.go | 22 +- .../mimetype/internal/magic/zip.go | 13 +- .../gabriel-vasile/mimetype/mimetype.gif | Bin 1343793 -> 0 bytes .../gabriel-vasile/mimetype/mimetype.go | 8 +- .../mimetype/supported_mimes.md | 2 +- .../gabriel-vasile/mimetype/tree.go | 2 +- .../github.com/gin-contrib/gzip/.golangci.yml | 10 - .../gin-contrib/gzip/.goreleaser.yaml | 29 + vendor/github.com/gin-gonic/gin/.gitignore | 4 + vendor/github.com/gin-gonic/gin/.golangci.yml | 1 - .../github.com/gin-gonic/gin/.goreleaser.yaml | 29 +- vendor/github.com/gin-gonic/gin/Makefile | 1 + vendor/github.com/gin-gonic/gin/auth.go | 25 + .../gin-gonic/gin/binding/binding.go | 27 +- .../gin/binding/binding_nomsgpack.go | 3 +- .../gin/binding/default_validator.go | 5 +- .../gin-gonic/gin/binding/form_mapping.go | 28 + vendor/github.com/gin-gonic/gin/codecov.yml | 13 + vendor/github.com/gin-gonic/gin/context.go | 52 +- vendor/github.com/gin-gonic/gin/debug.go | 20 +- vendor/github.com/gin-gonic/gin/deprecated.go | 2 + vendor/github.com/gin-gonic/gin/gin.go | 37 +- vendor/github.com/gin-gonic/gin/logger.go | 53 +- .../github.com/gin-gonic/gin/render/render.go | 32 +- .../github.com/gin-gonic/gin/render/yaml.go | 2 +- vendor/github.com/gin-gonic/gin/tree.go | 43 +- vendor/github.com/gin-gonic/gin/version.go | 2 +- vendor/github.com/go-logr/logr/README.md | 1 + vendor/github.com/go-logr/logr/funcr/funcr.go | 169 +- .../go-playground/validator/v10/README.md | 3 +- .../go-playground/validator/v10/baked_in.go | 210 +- .../go-playground/validator/v10/cache.go | 2 +- .../validator/v10/country_codes.go | 2305 +-- .../validator/v10/currency_codes.go | 148 +- .../go-playground/validator/v10/doc.go | 29 +- .../validator/v10/postcode_regexes.go | 12 +- .../go-playground/validator/v10/regexes.go | 158 +- .../go-playground/validator/v10/util.go | 5 +- .../validator/v10/validator_instance.go | 8 +- vendor/github.com/goccy/go-json/.golangci.yml | 3 + vendor/github.com/goccy/go-json/Makefile | 2 +- vendor/github.com/goccy/go-json/encode.go | 4 +- .../goccy/go-json/internal/decoder/ptr.go | 1 + .../internal/decoder/unmarshal_text.go | 2 +- .../goccy/go-json/internal/encoder/compact.go | 2 +- .../go-json/internal/encoder/compiler.go | 2 +- .../goccy/go-json/internal/encoder/int.go | 24 + .../goccy/go-json/internal/encoder/string.go | 24 + .../goccy/go-json/internal/runtime/rtype.go | 1 - vendor/github.com/goccy/go-json/json.go | 35 +- vendor/github.com/gorilla/sessions/LICENSE | 2 +- vendor/github.com/gorilla/sessions/README.md | 9 +- vendor/github.com/gorilla/sessions/cookie.go | 21 +- .../gorilla/sessions/cookie_go111.go | 21 - vendor/github.com/gorilla/sessions/options.go | 15 +- .../gorilla/sessions/options_go111.go | 23 - vendor/github.com/gorilla/sessions/store.go | 17 +- .../grpc-gateway/v2/runtime/BUILD.bazel | 2 +- .../grpc-gateway/v2/runtime/context.go | 28 +- .../grpc-gateway/v2/runtime/errors.go | 26 +- .../grpc-gateway/v2/runtime/handler.go | 76 +- .../grpc-gateway/v2/runtime/marshal_json.go | 5 + .../grpc-gateway/v2/runtime/marshal_jsonpb.go | 15 +- .../v2/runtime/marshaler_registry.go | 2 +- .../grpc-gateway/v2/runtime/mux.go | 69 +- .../grpc-gateway/v2/runtime/pattern.go | 18 +- .../klauspost/compress/zstd/dict.go | 31 + .../zstd/internal/xxhash/xxhash_arm64.s | 4 +- .../klauspost/compress/zstd/matchlen_amd64.s | 10 +- .../github.com/klauspost/cpuid/v2/README.md | 1 + vendor/github.com/klauspost/cpuid/v2/cpuid.go | 2 + .../klauspost/cpuid/v2/featureid_string.go | 369 +- .../mattn/go-sqlite3/sqlite3-binding.c | 9075 +++++---- .../mattn/go-sqlite3/sqlite3-binding.h | 116 +- vendor/github.com/mattn/go-sqlite3/sqlite3.go | 2 +- .../mattn/go-sqlite3/sqlite3_libsqlite3.go | 1 + .../mattn/go-sqlite3/sqlite3_opt_userauth.go | 2 +- .../pelletier/go-toml/v2/.goreleaser.yaml | 1 + .../github.com/pelletier/go-toml/v2/README.md | 2 +- .../go-toml/v2/internal/tracker/seen.go | 12 +- .../pelletier/go-toml/v2/marshaler.go | 24 +- .../pelletier/go-toml/v2/unmarshaler.go | 45 +- .../github.com/redis/go-redis/v9/CHANGELOG.md | 9 + vendor/github.com/redis/go-redis/v9/Makefile | 7 +- vendor/github.com/redis/go-redis/v9/README.md | 3 - .../redis/go-redis/v9/bitmap_commands.go | 26 +- .../github.com/redis/go-redis/v9/command.go | 12 +- vendor/github.com/redis/go-redis/v9/error.go | 3 + .../redis/go-redis/v9/hash_commands.go | 278 +- .../redis/go-redis/v9/internal/pool/conn.go | 22 + .../go-redis/v9/internal/pool/conn_check.go | 11 +- .../v9/internal/pool/conn_check_dummy.go | 4 +- .../redis/go-redis/v9/internal/pool/pool.go | 10 +- .../redis/go-redis/v9/internal/util.go | 17 + .../github.com/redis/go-redis/v9/options.go | 12 +- .../redis/go-redis/v9/osscluster.go | 76 +- vendor/github.com/redis/go-redis/v9/pubsub.go | 4 +- vendor/github.com/redis/go-redis/v9/redis.go | 11 +- .../redis/go-redis/v9/stream_commands.go | 14 +- .../github.com/redis/go-redis/v9/version.go | 2 +- vendor/github.com/youmark/pkcs8/.travis.yml | 14 - .../bson/bsoncodec/default_value_decoders.go | 4 +- .../mongo-driver/bson/bsoncodec/uint_codec.go | 8 +- .../bson/bsonrw/extjson_wrappers.go | 4 +- .../mongo-driver/bson/bsonrw/value_reader.go | 12 +- .../mongo-driver/bson/raw_value.go | 8 +- .../mongo-driver/bson/registry.go | 18 +- .../mongo-driver/internal/logger/io_sink.go | 7 +- .../mongo-driver/mongo/change_stream.go | 6 + .../mongo-driver/mongo/client.go | 4 - .../mongo-driver/mongo/collection.go | 14 +- .../mongo-driver/mongo/database.go | 2 +- .../mongo/integration/mtest/mongotest.go | 12 +- .../mongo/options/clientoptions.go | 15 +- .../mongo/options/mongooptions.go | 2 +- .../mongo/options/searchindexoptions.go | 7 + .../mongo-driver/mongo/search_index_view.go | 43 +- .../mongo/writeconcern/writeconcern.go | 2 +- .../mongo-driver/version/version.go | 2 +- .../mongo-driver/x/bsonx/bsoncore/bsoncore.go | 40 +- .../mongo-driver/x/bsonx/bsoncore/doc.go | 23 +- .../mongo-driver/x/mongo/driver/DESIGN.md | 27 - .../x/mongo/driver/auth/creds/doc.go | 14 + .../mongo-driver/x/mongo/driver/auth/doc.go | 21 +- .../x/mongo/driver/compression.go | 21 +- .../x/mongo/driver/connstring/connstring.go | 7 + .../mongo-driver/x/mongo/driver/dns/dns.go | 7 + .../mongo-driver/x/mongo/driver/driver.go | 7 + .../mongocrypt/mongocrypt_not_enabled.go | 7 + .../x/mongo/driver/mongocrypt/options/doc.go | 14 + .../mongo-driver/x/mongo/driver/ocsp/ocsp.go | 7 + .../mongo-driver/x/mongo/driver/operation.go | 21 +- .../driver/operation/create_search_indexes.go | 42 +- .../x/mongo/driver/operation/doc.go | 14 + .../driver/operation/drop_search_index.go | 37 +- .../driver/operation/update_search_index.go | 39 +- .../x/mongo/driver/session/doc.go | 14 + .../x/mongo/driver/topology/connection.go | 4 +- .../x/mongo/driver/topology/rtt_monitor.go | 6 - .../x/mongo/driver/topology/server.go | 9 +- .../x/mongo/driver/topology/topology.go | 17 +- .../x/mongo/driver/wiremessage/wiremessage.go | 40 +- .../contrib/propagators/jaeger/version.go | 2 +- .../go.opentelemetry.io/otel/.codespellignore | 2 + vendor/go.opentelemetry.io/otel/.codespellrc | 2 +- vendor/go.opentelemetry.io/otel/.gitmodules | 3 - vendor/go.opentelemetry.io/otel/.golangci.yml | 8 + vendor/go.opentelemetry.io/otel/CHANGELOG.md | 190 +- vendor/go.opentelemetry.io/otel/CODEOWNERS | 6 +- .../go.opentelemetry.io/otel/CONTRIBUTING.md | 15 +- vendor/go.opentelemetry.io/otel/Makefile | 74 +- vendor/go.opentelemetry.io/otel/README.md | 51 +- vendor/go.opentelemetry.io/otel/RELEASING.md | 7 + .../otel/attribute/value.go | 18 +- .../otel/baggage/baggage.go | 367 +- .../go.opentelemetry.io/otel/codes/codes.go | 2 +- vendor/go.opentelemetry.io/otel/doc.go | 2 + .../otlptrace/internal/tracetransform/span.go | 35 +- .../otlp/otlptrace/otlptracehttp/client.go | 62 +- .../internal/envconfig/envconfig.go | 32 +- .../internal/otlpconfig/options.go | 5 + .../otlptracehttp/internal/retry/retry.go | 2 +- .../otlp/otlptrace/otlptracehttp/options.go | 23 +- .../otel/exporters/otlp/otlptrace/version.go | 2 +- .../otel/internal/attribute/attribute.go | 24 +- .../otel/internal/global/instruments.go | 52 + .../otel/internal/global/meter.go | 23 + .../otel/internal/global/trace.go | 6 +- .../otel/internal/rawhelpers.go | 9 +- .../otel/metric/asyncfloat64.go | 6 +- vendor/go.opentelemetry.io/otel/metric/doc.go | 18 + .../otel/metric/embedded/embedded.go | 20 + .../otel/metric/instrument.go | 22 + .../go.opentelemetry.io/otel/metric/meter.go | 77 + .../otel/metric/syncfloat64.go | 60 +- .../otel/metric/syncint64.go | 54 +- .../otel/propagation/trace_context.go | 2 +- vendor/go.opentelemetry.io/otel/renovate.json | 24 + .../go.opentelemetry.io/otel/requirements.txt | 2 +- .../otel/sdk/instrumentation/library.go | 3 +- .../otel/sdk/internal/env/env.go | 2 +- .../otel/sdk/internal/gen.go | 18 - .../otel/sdk/internal/internal.go | 17 - .../otel/sdk/internal/x/README.md | 46 + .../otel/sdk/internal/x/x.go | 66 + .../otel/sdk/resource/builtin.go | 23 +- .../otel/sdk/resource/container.go | 2 +- .../otel/sdk/resource/env.go | 2 +- .../otel/sdk/resource/host_id.go | 2 +- .../otel/sdk/resource/os.go | 2 +- .../otel/sdk/resource/process.go | 2 +- .../otel/sdk/resource/resource.go | 11 +- .../otel/sdk/trace/batch_span_processor.go | 2 +- .../otel/sdk/trace/evictedqueue.go | 36 +- .../otel/sdk/trace/id_generator.go | 21 +- .../otel/sdk/trace/provider.go | 2 +- .../otel/sdk/trace/snapshot.go | 2 +- .../otel/sdk/trace/span.go | 79 +- .../otel/sdk/trace/tracer.go | 4 +- .../go.opentelemetry.io/otel/sdk/version.go | 2 +- .../otel/semconv/v1.24.0/README.md | 3 - .../otel/semconv/v1.24.0/attribute_group.go | 4387 ----- .../otel/semconv/v1.24.0/event.go | 200 - .../otel/semconv/v1.24.0/resource.go | 2545 --- .../otel/semconv/v1.24.0/trace.go | 1323 -- .../otel/semconv/v1.26.0/README.md | 3 + .../otel/semconv/v1.26.0/attribute_group.go | 8996 +++++++++ .../otel/semconv/{v1.24.0 => v1.26.0}/doc.go | 4 +- .../semconv/{v1.24.0 => v1.26.0}/exception.go | 2 +- .../semconv/{v1.24.0 => v1.26.0}/metric.go | 466 +- .../semconv/{v1.24.0 => v1.26.0}/schema.go | 4 +- .../otel/trace/noop/noop.go | 4 +- .../otel/trace/provider.go | 59 + vendor/go.opentelemetry.io/otel/trace/span.go | 177 + .../go.opentelemetry.io/otel/trace/trace.go | 249 - .../go.opentelemetry.io/otel/trace/tracer.go | 37 + .../otel/trace/tracestate.go | 10 + .../otel/verify_released_changelog.sh | 42 + vendor/go.opentelemetry.io/otel/version.go | 2 +- vendor/go.opentelemetry.io/otel/versions.yaml | 12 +- vendor/golang.org/x/arch/LICENSE | 4 +- vendor/golang.org/x/crypto/LICENSE | 4 +- vendor/golang.org/x/crypto/ocsp/ocsp.go | 2 +- vendor/golang.org/x/crypto/pbkdf2/pbkdf2.go | 2 +- vendor/golang.org/x/crypto/scrypt/scrypt.go | 2 +- vendor/golang.org/x/crypto/sha3/doc.go | 2 +- vendor/golang.org/x/crypto/sha3/hashes.go | 42 +- .../x/crypto/sha3/hashes_generic.go | 27 - .../golang.org/x/crypto/sha3/hashes_noasm.go | 23 + .../golang.org/x/crypto/sha3/keccakf_amd64.s | 5787 +++++- vendor/golang.org/x/crypto/sha3/register.go | 18 - vendor/golang.org/x/crypto/sha3/sha3.go | 62 +- vendor/golang.org/x/crypto/sha3/sha3_s390x.go | 67 +- vendor/golang.org/x/crypto/sha3/shake.go | 16 +- .../golang.org/x/crypto/sha3/shake_generic.go | 19 - .../golang.org/x/crypto/sha3/shake_noasm.go | 15 + vendor/golang.org/x/crypto/sha3/xor.go | 45 +- .../golang.org/x/crypto/sha3/xor_generic.go | 28 - .../golang.org/x/crypto/sha3/xor_unaligned.go | 66 - vendor/golang.org/x/net/LICENSE | 4 +- vendor/golang.org/x/net/html/doc.go | 2 +- .../golang.org/x/net/http/httpguts/httplex.go | 13 +- vendor/golang.org/x/net/http2/frame.go | 13 +- vendor/golang.org/x/net/http2/http2.go | 19 +- vendor/golang.org/x/net/http2/server.go | 105 +- vendor/golang.org/x/net/http2/testsync.go | 331 - vendor/golang.org/x/net/http2/timer.go | 20 + vendor/golang.org/x/net/http2/transport.go | 329 +- .../x/net/http2/writesched_priority.go | 4 +- vendor/golang.org/x/net/webdav/webdav.go | 3 + vendor/golang.org/x/sync/LICENSE | 4 +- vendor/golang.org/x/sys/LICENSE | 4 +- vendor/golang.org/x/sys/cpu/cpu.go | 22 + vendor/golang.org/x/sys/cpu/cpu_arm64.go | 22 + vendor/golang.org/x/sys/cpu/cpu_arm64.s | 8 + vendor/golang.org/x/sys/cpu/cpu_gc_arm64.go | 1 + .../golang.org/x/sys/cpu/cpu_linux_arm64.go | 10 + .../golang.org/x/sys/cpu/cpu_linux_noinit.go | 2 +- .../golang.org/x/sys/cpu/cpu_linux_riscv64.go | 137 + vendor/golang.org/x/sys/cpu/cpu_riscv64.go | 11 +- vendor/golang.org/x/sys/unix/asm_zos_s390x.s | 665 +- vendor/golang.org/x/sys/unix/bpxsvc_zos.go | 657 + vendor/golang.org/x/sys/unix/bpxsvc_zos.s | 192 + vendor/golang.org/x/sys/unix/epoll_zos.go | 220 - vendor/golang.org/x/sys/unix/fstatfs_zos.go | 163 - vendor/golang.org/x/sys/unix/mkerrors.sh | 4 + vendor/golang.org/x/sys/unix/mremap.go | 5 + vendor/golang.org/x/sys/unix/pagesize_unix.go | 2 +- .../x/sys/unix/readdirent_getdirentries.go | 2 +- vendor/golang.org/x/sys/unix/sockcmsg_zos.go | 58 + .../golang.org/x/sys/unix/symaddr_zos_s390x.s | 75 + .../golang.org/x/sys/unix/syscall_darwin.go | 61 + vendor/golang.org/x/sys/unix/syscall_hurd.go | 1 + vendor/golang.org/x/sys/unix/syscall_linux.go | 1 + .../golang.org/x/sys/unix/syscall_openbsd.go | 1 + vendor/golang.org/x/sys/unix/syscall_unix.go | 9 + .../x/sys/unix/syscall_zos_s390x.go | 1507 +- vendor/golang.org/x/sys/unix/sysvshm_unix.go | 2 +- .../x/sys/unix/sysvshm_unix_other.go | 2 +- .../x/sys/unix/zerrors_darwin_amd64.go | 12 + .../x/sys/unix/zerrors_darwin_arm64.go | 12 + vendor/golang.org/x/sys/unix/zerrors_linux.go | 67 +- .../x/sys/unix/zerrors_linux_386.go | 3 + .../x/sys/unix/zerrors_linux_amd64.go | 3 + .../x/sys/unix/zerrors_linux_arm.go | 2 + .../x/sys/unix/zerrors_linux_arm64.go | 3 + .../x/sys/unix/zerrors_linux_loong64.go | 2 + .../x/sys/unix/zerrors_linux_mips.go | 2 + .../x/sys/unix/zerrors_linux_mips64.go | 2 + .../x/sys/unix/zerrors_linux_mips64le.go | 2 + .../x/sys/unix/zerrors_linux_mipsle.go | 2 + .../x/sys/unix/zerrors_linux_ppc.go | 2 + .../x/sys/unix/zerrors_linux_ppc64.go | 2 + .../x/sys/unix/zerrors_linux_ppc64le.go | 2 + .../x/sys/unix/zerrors_linux_riscv64.go | 2 + .../x/sys/unix/zerrors_linux_s390x.go | 2 + .../x/sys/unix/zerrors_linux_sparc64.go | 2 + .../x/sys/unix/zerrors_zos_s390x.go | 235 +- .../x/sys/unix/zsymaddr_zos_s390x.s | 364 + .../x/sys/unix/zsyscall_darwin_amd64.go | 101 + .../x/sys/unix/zsyscall_darwin_amd64.s | 25 + .../x/sys/unix/zsyscall_darwin_arm64.go | 101 + .../x/sys/unix/zsyscall_darwin_arm64.s | 25 + .../golang.org/x/sys/unix/zsyscall_linux.go | 16 + .../x/sys/unix/zsyscall_openbsd_386.go | 24 + .../x/sys/unix/zsyscall_openbsd_386.s | 5 + .../x/sys/unix/zsyscall_openbsd_amd64.go | 24 + .../x/sys/unix/zsyscall_openbsd_amd64.s | 5 + .../x/sys/unix/zsyscall_openbsd_arm.go | 24 + .../x/sys/unix/zsyscall_openbsd_arm.s | 5 + .../x/sys/unix/zsyscall_openbsd_arm64.go | 24 + .../x/sys/unix/zsyscall_openbsd_arm64.s | 5 + .../x/sys/unix/zsyscall_openbsd_mips64.go | 24 + .../x/sys/unix/zsyscall_openbsd_mips64.s | 5 + .../x/sys/unix/zsyscall_openbsd_ppc64.go | 24 + .../x/sys/unix/zsyscall_openbsd_ppc64.s | 6 + .../x/sys/unix/zsyscall_openbsd_riscv64.go | 24 + .../x/sys/unix/zsyscall_openbsd_riscv64.s | 5 + .../x/sys/unix/zsyscall_zos_s390x.go | 3113 +++- .../x/sys/unix/zsysnum_linux_386.go | 6 + .../x/sys/unix/zsysnum_linux_amd64.go | 6 + .../x/sys/unix/zsysnum_linux_arm.go | 6 + .../x/sys/unix/zsysnum_linux_arm64.go | 6 + .../x/sys/unix/zsysnum_linux_loong64.go | 6 + .../x/sys/unix/zsysnum_linux_mips.go | 6 + .../x/sys/unix/zsysnum_linux_mips64.go | 6 + .../x/sys/unix/zsysnum_linux_mips64le.go | 6 + .../x/sys/unix/zsysnum_linux_mipsle.go | 6 + .../x/sys/unix/zsysnum_linux_ppc.go | 6 + .../x/sys/unix/zsysnum_linux_ppc64.go | 6 + .../x/sys/unix/zsysnum_linux_ppc64le.go | 6 + .../x/sys/unix/zsysnum_linux_riscv64.go | 6 + .../x/sys/unix/zsysnum_linux_s390x.go | 6 + .../x/sys/unix/zsysnum_linux_sparc64.go | 6 + .../x/sys/unix/zsysnum_zos_s390x.go | 5507 +++--- .../x/sys/unix/ztypes_darwin_amd64.go | 13 + .../x/sys/unix/ztypes_darwin_arm64.go | 13 + .../x/sys/unix/ztypes_freebsd_386.go | 1 + .../x/sys/unix/ztypes_freebsd_amd64.go | 1 + .../x/sys/unix/ztypes_freebsd_arm.go | 1 + .../x/sys/unix/ztypes_freebsd_arm64.go | 1 + .../x/sys/unix/ztypes_freebsd_riscv64.go | 1 + vendor/golang.org/x/sys/unix/ztypes_linux.go | 69 +- .../golang.org/x/sys/unix/ztypes_linux_386.go | 8 - .../x/sys/unix/ztypes_linux_amd64.go | 9 - .../golang.org/x/sys/unix/ztypes_linux_arm.go | 9 - .../x/sys/unix/ztypes_linux_arm64.go | 9 - .../x/sys/unix/ztypes_linux_loong64.go | 9 - .../x/sys/unix/ztypes_linux_mips.go | 9 - .../x/sys/unix/ztypes_linux_mips64.go | 9 - .../x/sys/unix/ztypes_linux_mips64le.go | 9 - .../x/sys/unix/ztypes_linux_mipsle.go | 9 - .../golang.org/x/sys/unix/ztypes_linux_ppc.go | 9 - .../x/sys/unix/ztypes_linux_ppc64.go | 9 - .../x/sys/unix/ztypes_linux_ppc64le.go | 9 - .../x/sys/unix/ztypes_linux_riscv64.go | 42 +- .../x/sys/unix/ztypes_linux_s390x.go | 9 - .../x/sys/unix/ztypes_linux_sparc64.go | 9 - .../golang.org/x/sys/unix/ztypes_zos_s390x.go | 146 +- vendor/golang.org/x/sys/windows/aliases.go | 2 +- vendor/golang.org/x/sys/windows/empty.s | 8 - .../x/sys/windows/security_windows.go | 25 +- .../x/sys/windows/syscall_windows.go | 16 +- .../golang.org/x/sys/windows/types_windows.go | 72 +- .../x/sys/windows/zsyscall_windows.go | 89 + vendor/golang.org/x/text/LICENSE | 4 +- vendor/golang.org/x/text/cases/cases.go | 162 + vendor/golang.org/x/text/cases/context.go | 376 + vendor/golang.org/x/text/cases/fold.go | 34 + vendor/golang.org/x/text/cases/icu.go | 61 + vendor/golang.org/x/text/cases/info.go | 82 + vendor/golang.org/x/text/cases/map.go | 816 + .../golang.org/x/text/cases/tables10.0.0.go | 2255 +++ .../golang.org/x/text/cases/tables11.0.0.go | 2316 +++ .../golang.org/x/text/cases/tables12.0.0.go | 2359 +++ .../golang.org/x/text/cases/tables13.0.0.go | 2399 +++ .../golang.org/x/text/cases/tables15.0.0.go | 2527 +++ vendor/golang.org/x/text/cases/tables9.0.0.go | 2215 +++ vendor/golang.org/x/text/cases/trieval.go | 217 + vendor/golang.org/x/text/internal/internal.go | 49 + vendor/golang.org/x/text/internal/match.go | 67 + vendor/golang.org/x/tools/LICENSE | 4 +- .../x/tools/go/ast/astutil/enclosing.go | 24 +- .../golang.org/x/tools/go/ast/astutil/util.go | 1 + .../x/tools/internal/versions/constraint.go | 13 + .../internal/versions/constraint_go121.go | 14 + .../x/tools/internal/versions/types_go122.go | 2 +- .../googleapis/api/httpbody/httpbody.pb.go | 4 +- .../googleapis/rpc/status/status.pb.go | 4 +- vendor/google.golang.org/grpc/CONTRIBUTING.md | 2 +- vendor/google.golang.org/grpc/MAINTAINERS.md | 34 +- vendor/google.golang.org/grpc/Makefile | 7 +- vendor/google.golang.org/grpc/README.md | 2 +- vendor/google.golang.org/grpc/SECURITY.md | 2 +- .../google.golang.org/grpc/backoff/backoff.go | 2 +- .../grpc/balancer/balancer.go | 21 + .../{ => balancer/pickfirst}/pickfirst.go | 70 +- .../grpc/balancer/roundrobin/roundrobin.go | 4 +- .../grpc/balancer_wrapper.go | 48 +- .../grpc_binarylog_v1/binarylog.pb.go | 4 +- vendor/google.golang.org/grpc/clientconn.go | 202 +- vendor/google.golang.org/grpc/codec.go | 69 +- vendor/google.golang.org/grpc/codegen.sh | 17 - vendor/google.golang.org/grpc/codes/codes.go | 2 +- .../grpc/credentials/credentials.go | 6 +- .../google.golang.org/grpc/credentials/tls.go | 34 +- vendor/google.golang.org/grpc/dialoptions.go | 103 +- vendor/google.golang.org/grpc/doc.go | 2 +- .../grpc/encoding/encoding.go | 5 +- .../grpc/encoding/encoding_v2.go | 81 + .../grpc/encoding/proto/proto.go | 44 +- .../grpc/experimental/stats/metricregistry.go | 270 + .../grpc/experimental/stats/metrics.go | 114 + .../grpc/grpclog/component.go | 10 +- .../google.golang.org/grpc/grpclog/grpclog.go | 104 +- .../grpc/grpclog/internal/grpclog.go | 26 + .../grpc/grpclog/internal/logger.go | 87 + .../internal/loggerv2.go} | 178 +- .../google.golang.org/grpc/grpclog/logger.go | 59 +- .../grpc/grpclog/loggerv2.go | 181 +- .../grpc/health/grpc_health_v1/health.pb.go | 4 +- .../health/grpc_health_v1/health_grpc.pb.go | 85 +- .../grpc/internal/backoff/backoff.go | 4 +- .../balancer/gracefulswitch/config.go | 1 - .../balancer/gracefulswitch/gracefulswitch.go | 1 - .../grpc/internal/binarylog/method_logger.go | 6 +- .../grpc/internal/channelz/channelmap.go | 2 +- .../grpc/internal/envconfig/envconfig.go | 13 +- .../grpc/internal/experimental.go | 8 +- .../{prefixLogger.go => prefix_logger.go} | 40 +- .../grpc/internal/grpcrand/grpcrand.go | 100 - .../grpc/internal/grpcrand/grpcrand_go1.21.go | 73 - .../internal/grpcsync/callback_serializer.go | 24 +- .../grpc/internal/grpcsync/pubsub.go | 4 +- .../grpc/internal/grpcutil/compressor.go | 5 - .../grpc/internal/internal.go | 58 +- .../internal/resolver/dns/dns_resolver.go | 40 +- .../resolver/dns/internal/internal.go | 19 +- .../grpc/internal/stats/labels.go | 42 + .../internal/stats/metrics_recorder_list.go | 95 + .../grpc/internal/tcp_keepalive_unix.go | 2 +- .../grpc/internal/tcp_keepalive_windows.go | 2 +- .../grpc/internal/transport/controlbuf.go | 280 +- .../grpc/internal/transport/handler_server.go | 45 +- .../grpc/internal/transport/http2_client.go | 123 +- .../grpc/internal/transport/http2_server.go | 61 +- .../grpc/internal/transport/http_util.go | 22 +- .../grpc/internal/transport/proxy.go | 10 +- .../grpc/internal/transport/transport.go | 244 +- .../google.golang.org/grpc/mem/buffer_pool.go | 194 + .../grpc/mem/buffer_slice.go | 224 + vendor/google.golang.org/grpc/mem/buffers.go | 252 + .../grpc/metadata/metadata.go | 7 +- vendor/google.golang.org/grpc/peer/peer.go | 30 + .../google.golang.org/grpc/picker_wrapper.go | 84 +- vendor/google.golang.org/grpc/preloader.go | 28 +- vendor/google.golang.org/grpc/regenerate.sh | 123 - .../grpc/resolver/dns/dns_resolver.go | 12 +- .../grpc/resolver_wrapper.go | 11 +- vendor/google.golang.org/grpc/rpc_util.go | 302 +- vendor/google.golang.org/grpc/server.go | 114 +- .../google.golang.org/grpc/service_config.go | 32 +- .../grpc/shared_buffer_pool.go | 154 - vendor/google.golang.org/grpc/stats/stats.go | 12 +- vendor/google.golang.org/grpc/stream.go | 218 +- .../grpc/stream_interfaces.go | 152 + vendor/google.golang.org/grpc/version.go | 2 +- vendor/google.golang.org/grpc/vet.sh | 195 - .../protobuf/encoding/protojson/decode.go | 4 +- .../protobuf/encoding/protojson/encode.go | 20 +- .../protobuf/encoding/prototext/decode.go | 4 +- .../protobuf/encoding/prototext/encode.go | 20 +- .../protobuf/internal/descfmt/stringer.go | 1 + .../editiondefaults/editions_defaults.binpb | Bin 63 -> 93 bytes .../protobuf/internal/encoding/json/decode.go | 2 +- .../protobuf/internal/encoding/tag/tag.go | 4 +- .../protobuf/internal/encoding/text/decode.go | 2 +- .../protobuf/internal/errors/errors.go | 21 +- .../protobuf/internal/filedesc/desc.go | 88 +- .../protobuf/internal/filedesc/desc_init.go | 43 +- .../protobuf/internal/filedesc/desc_lazy.go | 49 +- .../internal/filedesc/desc_list_gen.go | 11 + .../protobuf/internal/filedesc/editions.go | 22 +- .../protobuf/internal/filedesc/placeholder.go | 1 + .../protobuf/internal/filetype/build.go | 4 +- .../protobuf/internal/genid/descriptor_gen.go | 49 +- .../internal/genid/go_features_gen.go | 2 +- .../protobuf/internal/impl/api_export.go | 6 +- .../protobuf/internal/impl/checkinit.go | 2 +- .../protobuf/internal/impl/codec_extension.go | 22 + .../protobuf/internal/impl/codec_field.go | 64 +- .../protobuf/internal/impl/codec_map.go | 15 +- .../internal/impl/codec_messageset.go | 22 + .../protobuf/internal/impl/convert.go | 2 +- .../protobuf/internal/impl/convert_list.go | 2 +- .../protobuf/internal/impl/convert_map.go | 2 +- .../protobuf/internal/impl/encode.go | 48 +- .../protobuf/internal/impl/extension.go | 8 +- .../protobuf/internal/impl/legacy_enum.go | 3 +- .../internal/impl/legacy_extension.go | 2 +- .../protobuf/internal/impl/legacy_file.go | 4 +- .../protobuf/internal/impl/legacy_message.go | 14 +- .../protobuf/internal/impl/message.go | 8 +- .../protobuf/internal/impl/message_reflect.go | 45 +- .../internal/impl/message_reflect_gen.go | 146 +- .../protobuf/internal/impl/pointer_reflect.go | 6 +- .../protobuf/internal/impl/pointer_unsafe.go | 4 +- .../protobuf/internal/order/range.go | 4 +- .../protobuf/internal/version/version.go | 4 +- .../protobuf/proto/decode.go | 2 + .../protobuf/proto/encode.go | 44 +- .../protobuf/proto/extension.go | 17 +- .../protobuf/proto/messageset.go | 7 +- .../google.golang.org/protobuf/proto/size.go | 2 + .../protobuf/reflect/protoreflect/proto.go | 2 +- .../reflect/protoreflect/source_gen.go | 21 + .../protobuf/reflect/protoreflect/type.go | 12 +- .../reflect/protoreflect/value_pure.go | 14 +- .../reflect/protoreflect/value_union.go | 14 +- .../protoreflect/value_unsafe_go120.go | 6 +- .../protoreflect/value_unsafe_go121.go | 8 +- .../reflect/protoregistry/registry.go | 14 +- .../protobuf/types/known/anypb/any.pb.go | 4 +- .../types/known/durationpb/duration.pb.go | 4 +- .../types/known/fieldmaskpb/field_mask.pb.go | 4 +- .../types/known/structpb/struct.pb.go | 50 +- .../types/known/timestamppb/timestamp.pb.go | 4 +- .../types/known/wrapperspb/wrappers.pb.go | 20 +- vendor/gorm.io/driver/sqlite/migrator.go | 4 +- vendor/gorm.io/driver/sqlite/sqlite.go | 10 + vendor/gorm.io/gorm/association.go | 14 + vendor/gorm.io/gorm/callbacks/create.go | 4 +- vendor/gorm.io/gorm/callbacks/preload.go | 20 +- vendor/gorm.io/gorm/callbacks/query.go | 6 +- vendor/gorm.io/gorm/chainable_api.go | 22 + vendor/gorm.io/gorm/clause/where.go | 7 +- vendor/gorm.io/gorm/errors.go | 2 + vendor/gorm.io/gorm/gorm.go | 10 + vendor/gorm.io/gorm/migrator/migrator.go | 66 +- vendor/gorm.io/gorm/prepare_stmt.go | 16 + vendor/gorm.io/gorm/scan.go | 20 +- vendor/gorm.io/gorm/schema/field.go | 5 + vendor/gorm.io/gorm/schema/naming.go | 8 +- vendor/gorm.io/gorm/schema/relationship.go | 19 +- vendor/gorm.io/gorm/schema/schema.go | 5 +- vendor/gorm.io/gorm/schema/serializer.go | 5 +- vendor/gorm.io/gorm/statement.go | 6 +- vendor/gorm.io/gorm/utils/utils.go | 16 +- vendor/modules.txt | 137 +- 945 files changed, 186223 insertions(+), 144457 deletions(-) create mode 100644 vendor/github.com/bytedance/sonic/ast/stubs.go delete mode 100644 vendor/github.com/bytedance/sonic/ast/stubs_go115.go delete mode 100644 vendor/github.com/bytedance/sonic/ast/stubs_go120.go rename vendor/github.com/bytedance/sonic/decoder/{decoder_amd64.go => decoder_native.go} (65%) rename vendor/github.com/bytedance/sonic/encoder/{encoder_amd64.go => encoder_native.go} (95%) create mode 100644 vendor/github.com/bytedance/sonic/go.work.sum create mode 100644 vendor/github.com/bytedance/sonic/internal/base64/b64_amd64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/base64/b64_compat.go rename vendor/github.com/bytedance/sonic/internal/decoder/{ => api}/decoder.go (57%) create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go rename vendor/github.com/bytedance/sonic/internal/{native/avx/u64toa.go => decoder/api/decoder_arm64.go} (63%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => api}/stream.go (90%) delete mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go rename vendor/github.com/bytedance/sonic/internal/decoder/{ => errors}/errors.go (90%) delete mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/asm.s (100%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/asm_stubs_amd64_go117.go (99%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/asm_stubs_amd64_go121.go (99%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/assembler_regabi_amd64.go (98%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/compiler.go (94%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/debug.go (99%) create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/generic_regabi_amd64.go (99%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/generic_regabi_amd64_test.s (97%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/pools.go (97%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/primitives.go (84%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/stubs_go116.go (94%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/stubs_go120.go (95%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/types.go (99%) rename vendor/github.com/bytedance/sonic/internal/decoder/{ => jitdec}/utils.go (98%) create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go create mode 100644 vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go rename vendor/github.com/bytedance/sonic/internal/encoder/{ => alg}/mapiter.go (52%) create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go rename vendor/github.com/bytedance/sonic/internal/encoder/{ => alg}/sort.go (99%) create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/alg/spec.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/ir/op.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/pools.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/pools_amd64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/pools_compt.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/primitives.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/stubs_go116.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/stubs_go117.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/stubs_go120.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/stubs_go121.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/utils.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/vars/cache.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/vars/const.go rename vendor/github.com/bytedance/sonic/internal/encoder/{ => vars}/errors.go (68%) create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/vars/stack.go rename vendor/github.com/bytedance/sonic/internal/encoder/{ => vars}/types.go (63%) create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/vm/stbus.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/vm/vm.go rename vendor/github.com/bytedance/sonic/internal/encoder/{ => x86}/asm_stubs_amd64_go117.go (74%) rename vendor/github.com/bytedance/sonic/internal/encoder/{ => x86}/asm_stubs_amd64_go121.go (72%) create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/x86/assembler_regabi_amd64.go rename vendor/github.com/bytedance/sonic/internal/encoder/{ => x86}/debug_go116.go (97%) create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/x86/debug_go117.go create mode 100644 vendor/github.com/bytedance/sonic/internal/encoder/x86/stbus.go create mode 100644 vendor/github.com/bytedance/sonic/internal/envs/decode.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/f32toa_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/f32toa_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/f64toa_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/f64toa_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/get_by_path.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/get_by_path_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/get_by_path_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/html_escape_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/html_escape_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/i64toa_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/i64toa_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/lspace_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/lspace_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/native_export.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/quote.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/quote_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/quote_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_array.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_array_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_array_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_number.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_number_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_number_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_object.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_object_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_object_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_one.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_one_fast_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_one_fast_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_one_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/skip_one_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/u64toa_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/u64toa_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/unquote.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/unquote_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/unquote_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_one.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_one_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_one_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_utf8.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_utf8_fast.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_utf8_fast_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_utf8_fast_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_utf8_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/validate_utf8_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/value.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/value_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/value_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vnumber.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vnumber_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vnumber_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vsigned.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vsigned_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vsigned_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vstring.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vstring_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vstring_text_amd64.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vunsigned.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vunsigned_subr.go delete mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx/vunsigned_text_amd64.go rename vendor/github.com/bytedance/sonic/internal/native/{avx/i64toa.go => avx2/lookup_small_key.go} (70%) create mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx2/lookup_small_key_subr.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx2/lookup_small_key_text_amd64.go rename vendor/github.com/bytedance/sonic/internal/native/{avx/lspace.go => avx2/parse_with_padding.go} (77%) create mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx2/parse_with_padding_subr.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/avx2/parse_with_padding_text_amd64.go rename vendor/github.com/bytedance/sonic/internal/native/{avx/html_escape.go => lookup_small_key.tmpl} (70%) create mode 100644 vendor/github.com/bytedance/sonic/internal/native/neon/lookup_small_key_arm64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/neon/lookup_small_key_arm64.s create mode 100644 vendor/github.com/bytedance/sonic/internal/native/neon/lookup_small_key_subr_arm64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/neon/parse_with_padding_arm64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/neon/parse_with_padding_arm64.s create mode 100644 vendor/github.com/bytedance/sonic/internal/native/neon/parse_with_padding_subr_arm64.go rename vendor/github.com/bytedance/sonic/internal/native/{avx/f32toa.go => parse_with_padding.tmpl} (77%) rename vendor/github.com/bytedance/sonic/internal/native/{avx/skip_one_fast.go => sse/lookup_small_key.go} (70%) create mode 100644 vendor/github.com/bytedance/sonic/internal/native/sse/lookup_small_key_subr.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/sse/lookup_small_key_text_amd64.go rename vendor/github.com/bytedance/sonic/internal/native/{avx/f64toa.go => sse/parse_with_padding.go} (77%) create mode 100644 vendor/github.com/bytedance/sonic/internal/native/sse/parse_with_padding_subr.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/sse/parse_with_padding_text_amd64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/native/traceback_test.mock_tmpl rename vendor/github.com/bytedance/sonic/internal/{encoder => optcaching}/asm.s (100%) create mode 100644 vendor/github.com/bytedance/sonic/internal/optcaching/fcache.go rename vendor/github.com/bytedance/sonic/internal/rt/{asm_compat.s => asm_arm64.s} (65%) create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/assertI2I.go create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/base64_amd64.go create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/base64_compat.go create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/fastconv.go rename vendor/github.com/bytedance/sonic/{ast/b64_compat.go => internal/rt/gcwb_legacy.go} (67%) create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/growslice.go rename vendor/github.com/bytedance/sonic/{ast/b64_amd64.go => internal/rt/growslice_legacy.go} (64%) create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/pool.go create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/stubs.go create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/table.go create mode 100644 vendor/github.com/bytedance/sonic/internal/rt/types.go create mode 100644 vendor/github.com/bytedance/sonic/loader/funcdata_go123.go delete mode 100644 vendor/github.com/gabriel-vasile/mimetype/mimetype.gif create mode 100644 vendor/github.com/gin-contrib/gzip/.goreleaser.yaml create mode 100644 vendor/github.com/gin-gonic/gin/codecov.yml delete mode 100644 vendor/github.com/gorilla/sessions/cookie_go111.go delete mode 100644 vendor/github.com/gorilla/sessions/options_go111.go delete mode 100644 vendor/github.com/youmark/pkcs8/.travis.yml delete mode 100644 vendor/go.mongodb.org/mongo-driver/x/mongo/driver/DESIGN.md create mode 100644 vendor/go.mongodb.org/mongo-driver/x/mongo/driver/auth/creds/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/mongo/driver/mongocrypt/options/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/mongo/driver/operation/doc.go create mode 100644 vendor/go.mongodb.org/mongo-driver/x/mongo/driver/session/doc.go delete mode 100644 vendor/go.opentelemetry.io/otel/.gitmodules create mode 100644 vendor/go.opentelemetry.io/otel/renovate.json delete mode 100644 vendor/go.opentelemetry.io/otel/sdk/internal/gen.go delete mode 100644 vendor/go.opentelemetry.io/otel/sdk/internal/internal.go create mode 100644 vendor/go.opentelemetry.io/otel/sdk/internal/x/README.md create mode 100644 vendor/go.opentelemetry.io/otel/sdk/internal/x/x.go delete mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.24.0/README.md delete mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.24.0/attribute_group.go delete mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.24.0/event.go delete mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.24.0/resource.go delete mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.24.0/trace.go create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.26.0/README.md create mode 100644 vendor/go.opentelemetry.io/otel/semconv/v1.26.0/attribute_group.go rename vendor/go.opentelemetry.io/otel/semconv/{v1.24.0 => v1.26.0}/doc.go (96%) rename vendor/go.opentelemetry.io/otel/semconv/{v1.24.0 => v1.26.0}/exception.go (98%) rename vendor/go.opentelemetry.io/otel/semconv/{v1.24.0 => v1.26.0}/metric.go (77%) rename vendor/go.opentelemetry.io/otel/semconv/{v1.24.0 => v1.26.0}/schema.go (85%) create mode 100644 vendor/go.opentelemetry.io/otel/trace/provider.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/span.go create mode 100644 vendor/go.opentelemetry.io/otel/trace/tracer.go create mode 100644 vendor/go.opentelemetry.io/otel/verify_released_changelog.sh delete mode 100644 vendor/golang.org/x/crypto/sha3/hashes_generic.go create mode 100644 vendor/golang.org/x/crypto/sha3/hashes_noasm.go delete mode 100644 vendor/golang.org/x/crypto/sha3/register.go delete mode 100644 vendor/golang.org/x/crypto/sha3/shake_generic.go create mode 100644 vendor/golang.org/x/crypto/sha3/shake_noasm.go delete mode 100644 vendor/golang.org/x/crypto/sha3/xor_generic.go delete mode 100644 vendor/golang.org/x/crypto/sha3/xor_unaligned.go delete mode 100644 vendor/golang.org/x/net/http2/testsync.go create mode 100644 vendor/golang.org/x/net/http2/timer.go create mode 100644 vendor/golang.org/x/sys/cpu/cpu_linux_riscv64.go create mode 100644 vendor/golang.org/x/sys/unix/bpxsvc_zos.go create mode 100644 vendor/golang.org/x/sys/unix/bpxsvc_zos.s delete mode 100644 vendor/golang.org/x/sys/unix/epoll_zos.go delete mode 100644 vendor/golang.org/x/sys/unix/fstatfs_zos.go create mode 100644 vendor/golang.org/x/sys/unix/sockcmsg_zos.go create mode 100644 vendor/golang.org/x/sys/unix/symaddr_zos_s390x.s create mode 100644 vendor/golang.org/x/sys/unix/zsymaddr_zos_s390x.s delete mode 100644 vendor/golang.org/x/sys/windows/empty.s create mode 100644 vendor/golang.org/x/text/cases/cases.go create mode 100644 vendor/golang.org/x/text/cases/context.go create mode 100644 vendor/golang.org/x/text/cases/fold.go create mode 100644 vendor/golang.org/x/text/cases/icu.go create mode 100644 vendor/golang.org/x/text/cases/info.go create mode 100644 vendor/golang.org/x/text/cases/map.go create mode 100644 vendor/golang.org/x/text/cases/tables10.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables11.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables12.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables13.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables15.0.0.go create mode 100644 vendor/golang.org/x/text/cases/tables9.0.0.go create mode 100644 vendor/golang.org/x/text/cases/trieval.go create mode 100644 vendor/golang.org/x/text/internal/internal.go create mode 100644 vendor/golang.org/x/text/internal/match.go create mode 100644 vendor/golang.org/x/tools/internal/versions/constraint.go create mode 100644 vendor/golang.org/x/tools/internal/versions/constraint_go121.go rename vendor/google.golang.org/grpc/{ => balancer/pickfirst}/pickfirst.go (74%) delete mode 100644 vendor/google.golang.org/grpc/codegen.sh create mode 100644 vendor/google.golang.org/grpc/encoding/encoding_v2.go create mode 100644 vendor/google.golang.org/grpc/experimental/stats/metricregistry.go create mode 100644 vendor/google.golang.org/grpc/experimental/stats/metrics.go create mode 100644 vendor/google.golang.org/grpc/grpclog/internal/grpclog.go create mode 100644 vendor/google.golang.org/grpc/grpclog/internal/logger.go rename vendor/google.golang.org/grpc/{internal/grpclog/grpclog.go => grpclog/internal/loggerv2.go} (52%) rename vendor/google.golang.org/grpc/internal/grpclog/{prefixLogger.go => prefix_logger.go} (63%) delete mode 100644 vendor/google.golang.org/grpc/internal/grpcrand/grpcrand.go delete mode 100644 vendor/google.golang.org/grpc/internal/grpcrand/grpcrand_go1.21.go create mode 100644 vendor/google.golang.org/grpc/internal/stats/labels.go create mode 100644 vendor/google.golang.org/grpc/internal/stats/metrics_recorder_list.go create mode 100644 vendor/google.golang.org/grpc/mem/buffer_pool.go create mode 100644 vendor/google.golang.org/grpc/mem/buffer_slice.go create mode 100644 vendor/google.golang.org/grpc/mem/buffers.go delete mode 100644 vendor/google.golang.org/grpc/regenerate.sh delete mode 100644 vendor/google.golang.org/grpc/shared_buffer_pool.go create mode 100644 vendor/google.golang.org/grpc/stream_interfaces.go delete mode 100644 vendor/google.golang.org/grpc/vet.sh diff --git a/docs/flowshart.md b/docs/flowshart.md index c8df0314..727dda62 100644 --- a/docs/flowshart.md +++ b/docs/flowshart.md @@ -4,19 +4,29 @@ ```mermaid sequenceDiagram; - authentic source->>datastore: POST /notification; + authentic source->>datastore: POST /upload; datastore->>authentic source: 200/400 ; ``` - -## Fetch a credential +## Fetch credential ```mermaid sequenceDiagram; wallet->>satosa: openID Federation; satosa->>apigw: POST /credential; apigw->>issuer: gRPC makeSDJWT(); + issuer->>registry: gRPC AddCredential + registry->>apigw: Callback; issuer->>apigw: Callback; apigw->>satosa: Callback; satosa->>wallet: openID Federation; ``` + +## Revoke credential + +```mermaid + sequenceDiagram; + authentic source->>datastore: POST /document/revoke; + datastore->>registry: gRPC Revoke; + datastore->>database: change revocation.revoked to true +``` diff --git a/go.mod b/go.mod index bd1f5ab3..befdc3c6 100644 --- a/go.mod +++ b/go.mod @@ -1,22 +1,20 @@ module vc -go 1.22 - -toolchain go1.22.0 +go 1.23 require ( github.com/brianvoe/gofakeit/v6 v6.28.0 - github.com/gin-contrib/gzip v0.0.6 + github.com/gin-contrib/gzip v1.0.1 github.com/gin-contrib/sessions v1.0.1 - github.com/gin-gonic/gin v1.9.1 - github.com/go-logr/logr v1.4.1 + github.com/gin-gonic/gin v1.10.0 + github.com/go-logr/logr v1.4.2 github.com/go-logr/zapr v1.3.0 - github.com/go-playground/validator/v10 v10.19.0 + github.com/go-playground/validator/v10 v10.22.0 github.com/golang-jwt/jwt/v5 v5.2.1 github.com/google/uuid v1.6.0 github.com/kelseyhightower/envconfig v1.4.0 - github.com/masv3971/goretask v0.0.0-20240112121936-7f1366ce1e15 - github.com/masv3971/gosdjwt v0.0.0-20240229085317-d406fd94329c + github.com/masv3971/goretask v0.0.2 + github.com/masv3971/gosdjwt v0.0.10 github.com/moogar0880/problems v0.1.1 github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e github.com/stretchr/testify v1.9.0 @@ -24,28 +22,28 @@ require ( github.com/swaggo/gin-swagger v1.6.0 github.com/swaggo/swag v1.16.3 github.com/wealdtech/go-merkletree v1.0.0 - go.mongodb.org/mongo-driver v1.15.0 - go.opentelemetry.io/contrib/propagators/jaeger v1.25.0 - go.opentelemetry.io/otel v1.25.0 - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.25.0 - go.opentelemetry.io/otel/sdk v1.25.0 - go.opentelemetry.io/otel/trace v1.25.0 + go.mongodb.org/mongo-driver v1.16.1 + go.opentelemetry.io/contrib/propagators/jaeger v1.29.0 + go.opentelemetry.io/otel v1.29.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0 + go.opentelemetry.io/otel/sdk v1.29.0 + go.opentelemetry.io/otel/trace v1.29.0 go.uber.org/zap v1.27.0 - google.golang.org/grpc v1.63.2 + google.golang.org/grpc v1.66.0 gopkg.in/yaml.v2 v2.4.0 - gorm.io/driver/sqlite v1.5.5 - gorm.io/gorm v1.25.9 + gorm.io/driver/sqlite v1.5.6 + gorm.io/gorm v1.25.11 ) require ( github.com/KyleBanks/depth v1.2.1 // indirect - github.com/bytedance/sonic/loader v0.1.1 // indirect + github.com/bytedance/sonic/loader v0.2.0 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect - github.com/cloudwego/base64x v0.1.3 // indirect + github.com/cloudwego/base64x v0.1.4 // indirect github.com/cloudwego/iasm v0.2.0 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect - github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/gabriel-vasile/mimetype v1.4.5 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/jsonreference v0.21.0 // indirect @@ -53,57 +51,57 @@ require ( github.com/go-openapi/swag v0.23.0 // indirect github.com/gorilla/context v1.1.2 // indirect github.com/gorilla/securecookie v1.1.2 // indirect - github.com/gorilla/sessions v1.2.2 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 // indirect + github.com/gorilla/sessions v1.4.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/now v1.1.5 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/mailru/easyjson v0.7.7 // indirect - github.com/mattn/go-sqlite3 v1.14.22 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.25.0 // indirect - go.opentelemetry.io/otel/metric v1.25.0 // indirect - go.opentelemetry.io/proto/otlp v1.2.0 // indirect - go.step.sm/crypto v0.44.6 // indirect - golang.org/x/tools v0.20.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be // indirect + github.com/mattn/go-sqlite3 v1.14.23 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.29.0 // indirect + go.opentelemetry.io/otel/metric v1.29.0 // indirect + go.opentelemetry.io/proto/otlp v1.3.1 // indirect + go.step.sm/crypto v0.51.2 // indirect + golang.org/x/tools v0.24.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) require ( - github.com/bytedance/sonic v1.11.5 // indirect + github.com/bytedance/sonic v1.12.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/gin-contrib/sse v0.1.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/goccy/go-json v0.10.2 + github.com/goccy/go-json v0.10.3 github.com/golang/snappy v0.0.4 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/compress v1.17.8 // indirect - github.com/klauspost/cpuid/v2 v2.2.7 // indirect + github.com/klauspost/compress v1.17.9 // indirect + github.com/klauspost/cpuid/v2 v2.2.8 // indirect github.com/leodido/go-urn v1.4.0 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect github.com/montanaflynn/stats v0.7.1 // indirect - github.com/pelletier/go-toml/v2 v2.2.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/redis/go-redis/v9 v9.5.1 + github.com/redis/go-redis/v9 v9.6.1 github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/ugorji/go/codec v1.2.12 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect - github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect + github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/arch v0.7.0 // indirect - golang.org/x/crypto v0.22.0 // indirect - golang.org/x/net v0.24.0 // indirect - golang.org/x/sync v0.7.0 // indirect - golang.org/x/sys v0.19.0 // indirect - golang.org/x/text v0.14.0 // indirect - google.golang.org/protobuf v1.33.0 + golang.org/x/arch v0.10.0 // indirect + golang.org/x/crypto v0.26.0 // indirect + golang.org/x/net v0.28.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.25.0 // indirect + golang.org/x/text v0.18.0 // indirect + google.golang.org/protobuf v1.34.2 ) replace github.com/masv3971/gosdjwt => ../gosdjwt diff --git a/go.sum b/go.sum index 7e7fb35c..d3f006ee 100644 --- a/go.sum +++ b/go.sum @@ -6,39 +6,37 @@ github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= -github.com/bytedance/sonic v1.11.5 h1:G00FYjjqll5iQ1PYXynbg/hyzqBqavH8Mo9/oTopd9k= -github.com/bytedance/sonic v1.11.5/go.mod h1:X2PC2giUdj/Cv2lliWFLk6c/DUQok5rViJSemeB0wDw= -github.com/bytedance/sonic/loader v0.1.0/go.mod h1:UmRT+IRTGKz/DAkzcEGzyVqQFJ7H9BqwBO3pm9H/+HY= -github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM= +github.com/bytedance/sonic v1.12.2 h1:oaMFuRTpMHYLpCntGca65YWt5ny+wAceDERTkT2L9lg= +github.com/bytedance/sonic v1.12.2/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk= github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= +github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM= +github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/cloudwego/base64x v0.1.3 h1:b5J/l8xolB7dyDTTmhJP2oTs5LdrjyrUFuNxdfq5hAg= -github.com/cloudwego/base64x v0.1.3/go.mod h1:1+1K5BUHIQzyapgpF7LwvOGAEDicKtt1umPV+aN8pi8= +github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y= +github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w= github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg= github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= -github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= -github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= -github.com/gin-contrib/gzip v0.0.6 h1:NjcunTcGAj5CO1gn4N8jHOSIeRFHIbn51z6K+xaN4d4= -github.com/gin-contrib/gzip v0.0.6/go.mod h1:QOJlmV2xmayAjkNS2Y8NQsMneuRShOU/kjovCXNuzzk= +github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4= +github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4= +github.com/gin-contrib/gzip v1.0.1 h1:HQ8ENHODeLY7a4g1Au/46Z92bdGFl74OhxcZble9WJE= +github.com/gin-contrib/gzip v1.0.1/go.mod h1:njt428fdUNRvjuJf16tZMYZ2Yl+WQB53X5wmhDwXvC4= github.com/gin-contrib/sessions v1.0.1 h1:3hsJyNs7v7N8OtelFmYXFrulAf6zSR7nW/putcPEHxI= github.com/gin-contrib/sessions v1.0.1/go.mod h1:ouxSFM24/OgIud5MJYQJLpy6AwxQ5EYO9yLhbtObGkM= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= -github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= -github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= +github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU= +github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= -github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-logr/zapr v1.3.0 h1:XGdV8XW8zdwFiwOA2Dryh1gj2KRQyOOoNmBy4EplIcQ= @@ -51,29 +49,22 @@ github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9Z github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= -github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= -github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= -github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4= -github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-playground/validator/v10 v10.22.0 h1:k6HsTZ0sTnROkhS//R0O+55JgM8C4Bx7ia+JlgcnOao= +github.com/go-playground/validator/v10 v10.22.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-redis/redismock/v9 v9.2.0 h1:ZrMYQeKPECZPjOj5u9eyOjg8Nnb0BS9lkVIZ6IpsKLw= github.com/go-redis/redismock/v9 v9.2.0/go.mod h1:18KHfGDK4Y6c2R0H38EUGWAdc7ZQS9gfYxc94k7rWT0= -github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= -github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= -github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA= +github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= @@ -85,10 +76,10 @@ github.com/gorilla/context v1.1.2 h1:WRkNAv2uoa03QNIc1A6u4O7DAGMUVoopZhkiXWA2V1o github.com/gorilla/context v1.1.2/go.mod h1:KDPwT9i/MeWHiLl90fuTgrt4/wPcv75vFAZLaOOcbxM= github.com/gorilla/securecookie v1.1.2 h1:YCIWL56dvtr73r6715mJs5ZvhtnY73hBvEF8kXD8ePA= github.com/gorilla/securecookie v1.1.2/go.mod h1:NfCASbcHqRSY+3a8tlWJwsQap2VX5pwzwo4h3eOamfo= -github.com/gorilla/sessions v1.2.2 h1:lqzMYz6bOfvn2WriPUjNByzeXIlVzURcPmgMczkmTjY= -github.com/gorilla/sessions v1.2.2/go.mod h1:ePLdVu+jbEgHH+KWw8I1z2wqd0BAdAQh/8LRvBeoNcQ= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 h1:/c3QmbOGMGTOumP2iT/rCwB7b0QDGLKzqOmktBjT+Is= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1/go.mod h1:5SN9VR2LTsRFsrEC6FHgRbTWrTHu6tqPeKxEQv15giM= +github.com/gorilla/sessions v1.4.0 h1:kpIYOp/oi6MG/p5PgxApU8srsSw9tuFbt46Lt7auzqQ= +github.com/gorilla/sessions v1.4.0/go.mod h1:FLWm50oby91+hl7p/wRxDth9bWSuk0qVL2emc7lT5ik= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 h1:asbCHRVmodnJTuQ3qamDwqVOIjwqUPTYmYuemVOx+Ys= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0/go.mod h1:ggCgvZ2r7uOoQjOyu2Y1NhHmEPPzzuhWgcza5M1Ji1I= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= @@ -99,31 +90,24 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kelseyhightower/envconfig v1.4.0 h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8= github.com/kelseyhightower/envconfig v1.4.0/go.mod h1:cccZRl6mQpaq41TPp5QxidR+Sa3axMbJDNb//FQX6Gg= -github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU= -github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= +github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= -github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM= -github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM= +github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= -github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= +github.com/mattn/go-sqlite3 v1.14.23 h1:gbShiuAP1W5j9UOksQ06aiiqPMxYecovVGwmTxWtuw0= +github.com/mattn/go-sqlite3 v1.14.23/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -133,20 +117,16 @@ github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8 github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow= github.com/moogar0880/problems v0.1.1 h1:bktLhq8NDG/czU2ZziYNigBFksx13RaYe5AVdNmHDT4= github.com/moogar0880/problems v0.1.1/go.mod h1:5Dxrk2sD7BfBAgnOzQ1yaTiuCYdGPUh49L8Vhfky62c= -github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= -github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= -github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= +github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/redis/go-redis/v9 v9.5.1 h1:H1X4D3yHPaYrkL5X06Wh6xNVM/pX0Ft4RV0vMGvLBh8= -github.com/redis/go-redis/v9 v9.5.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= -github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= -github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4= +github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA= +github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= +github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0= github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M= github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262 h1:unQFBIznI+VYD1/1fApl1A+9VcBk+9dcqGfnePY87LY= @@ -154,14 +134,11 @@ github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262/go.mod h1:MyOHs9P github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= -github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE= @@ -172,8 +149,6 @@ github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg= github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= -github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= -github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/wealdtech/go-merkletree v1.0.0 h1:DsF1xMzj5rK3pSQM6mPv8jlyJyHXhFxpnA2bwEjMMBY= @@ -184,112 +159,97 @@ github.com/xdg-go/scram v1.1.2 h1:FHX5I5B4i4hKRVRBCFRxq1iQRej7WO3hhBuJf+UUySY= github.com/xdg-go/scram v1.1.2/go.mod h1:RT/sEzTbU5y00aCK8UOx6R7YryM0iF1N2MOmC3kKLN4= github.com/xdg-go/stringprep v1.0.4 h1:XLI/Ng3O1Atzq0oBs3TWm+5ZVgkq2aqdlvP9JtoZ6c8= github.com/xdg-go/stringprep v1.0.4/go.mod h1:mPGuuIYwz7CmR2bT9j4GbQqutWS1zV24gijq1dTyGkM= -github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a h1:fZHgsYlfvtyqToslyjUt3VOPF4J7aK/3MPcK7xp3PDk= -github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a/go.mod h1:ul22v+Nro/R083muKhosV54bj5niojjWZvU8xrevuH4= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM= +github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -go.mongodb.org/mongo-driver v1.15.0 h1:rJCKC8eEliewXjZGf0ddURtl7tTVy1TK3bfl0gkUSLc= -go.mongodb.org/mongo-driver v1.15.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= -go.opentelemetry.io/contrib/propagators/jaeger v1.25.0 h1:GPnu8mDgqHlISYc0Ub0EbYlPWCOJE0biicGrE7vcE/M= -go.opentelemetry.io/contrib/propagators/jaeger v1.25.0/go.mod h1:WWa6gdfrRy23dFALEkiT+ynOI5Ke2g+fUa5Q2v0VGyg= -go.opentelemetry.io/otel v1.25.0 h1:gldB5FfhRl7OJQbUHt/8s0a7cE8fbsPAtdpRaApKy4k= -go.opentelemetry.io/otel v1.25.0/go.mod h1:Wa2ds5NOXEMkCmUou1WA7ZBfLTHWIsp034OVD7AO+Vg= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.25.0 h1:dT33yIHtmsqpixFsSQPwNeY5drM9wTcoL8h0FWF4oGM= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.25.0/go.mod h1:h95q0LBGh7hlAC08X2DhSeyIG02YQ0UyioTCVAqRPmc= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.25.0 h1:Mbi5PKN7u322woPa85d7ebZ+SOvEoPvoiBu+ryHWgfA= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.25.0/go.mod h1:e7ciERRhZaOZXVjx5MiL8TK5+Xv7G5Gv5PA2ZDEJdL8= -go.opentelemetry.io/otel/metric v1.25.0 h1:LUKbS7ArpFL/I2jJHdJcqMGxkRdxpPHE0VU/D4NuEwA= -go.opentelemetry.io/otel/metric v1.25.0/go.mod h1:rkDLUSd2lC5lq2dFNrX9LGAbINP5B7WBkC78RXCpH5s= -go.opentelemetry.io/otel/sdk v1.25.0 h1:PDryEJPC8YJZQSyLY5eqLeafHtG+X7FWnf3aXMtxbqo= -go.opentelemetry.io/otel/sdk v1.25.0/go.mod h1:oFgzCM2zdsxKzz6zwpTZYLLQsFwc+K0daArPdIhuxkw= -go.opentelemetry.io/otel/trace v1.25.0 h1:tqukZGLwQYRIFtSQM2u2+yfMVTgGVeqRLPUYx1Dq6RM= -go.opentelemetry.io/otel/trace v1.25.0/go.mod h1:hCCs70XM/ljO+BeQkyFnbK28SBIJ/Emuha+ccrCRT7I= -go.opentelemetry.io/proto/otlp v1.2.0 h1:pVeZGk7nXDC9O2hncA6nHldxEjm6LByfA2aN8IOkz94= -go.opentelemetry.io/proto/otlp v1.2.0/go.mod h1:gGpR8txAl5M03pDhMC79G6SdqNV26naRm/KDsgaHD8A= -go.step.sm/crypto v0.44.6 h1:vQg8ujce7fNXDO8EWdriSz+ZSJpYnNh22QrFtRjdyoY= -go.step.sm/crypto v0.44.6/go.mod h1:oKRO4jaf2MaCohJDN+/8ShImkvIgUKfJxxy87gqsnXs= +go.mongodb.org/mongo-driver v1.16.1 h1:rIVLL3q0IHM39dvE+z2ulZLp9ENZKThVfuvN/IiN4l8= +go.mongodb.org/mongo-driver v1.16.1/go.mod h1:oB6AhJQvFQL4LEHyXi6aJzQJtBiTQHiAd83l0GdFaiw= +go.opentelemetry.io/contrib/propagators/jaeger v1.29.0 h1:+YPiqF5rR6PqHBlmEFLPumbSP0gY0WmCGFayXRcCLvs= +go.opentelemetry.io/contrib/propagators/jaeger v1.29.0/go.mod h1:6PD7q7qquWSp3Z4HeM3e/2ipRubaY1rXZO8NIHVDZjs= +go.opentelemetry.io/otel v1.29.0 h1:PdomN/Al4q/lN6iBJEN3AwPvUiHPMlt93c8bqTG5Llw= +go.opentelemetry.io/otel v1.29.0/go.mod h1:N/WtXPs1CNCUEx+Agz5uouwCba+i+bJGFicT8SR4NP8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.29.0 h1:dIIDULZJpgdiHz5tXrTgKIMLkus6jEFa7x5SOKcyR7E= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.29.0/go.mod h1:jlRVBe7+Z1wyxFSUs48L6OBQZ5JwH2Hg/Vbl+t9rAgI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0 h1:JAv0Jwtl01UFiyWZEMiJZBiTlv5A50zNs8lsthXqIio= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0/go.mod h1:QNKLmUEAq2QUbPQUfvw4fmv0bgbK7UlOSFCnXyfvSNc= +go.opentelemetry.io/otel/metric v1.29.0 h1:vPf/HFWTNkPu1aYeIsc98l4ktOQaL6LeSoeV2g+8YLc= +go.opentelemetry.io/otel/metric v1.29.0/go.mod h1:auu/QWieFVWx+DmQOUMgj0F8LHWdgalxXqvp7BII/W8= +go.opentelemetry.io/otel/sdk v1.29.0 h1:vkqKjk7gwhS8VaWb0POZKmIEDimRCMsopNYnriHyryo= +go.opentelemetry.io/otel/sdk v1.29.0/go.mod h1:pM8Dx5WKnvxLCb+8lG1PRNIDxu9g9b9g59Qr7hfAAok= +go.opentelemetry.io/otel/trace v1.29.0 h1:J/8ZNK4XgR7a21DZUAsbF8pZ5Jcw1VhACmnYt39JTi4= +go.opentelemetry.io/otel/trace v1.29.0/go.mod h1:eHl3w0sp3paPkYstJOmAimxhiFXPg+MMTlEh3nsQgWQ= +go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= +go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= +go.step.sm/crypto v0.51.2 h1:5EiCGIMg7IvQTGmJrwRosbXeprtT80OhoS/PJarg60o= +go.step.sm/crypto v0.51.2/go.mod h1:QK7czLjN2k+uqVp5CHXxJbhc70kVRSP+0CQF3zsR5M0= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= -golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= -golang.org/x/arch v0.7.0 h1:pskyeJh/3AmoQ8CPE95vxHLqp1G1GfGNXTmcl9NEKTc= -golang.org/x/arch v0.7.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= +golang.org/x/arch v0.10.0 h1:S3huipmSclq3PJMNe76NGwkBR504WFkQ5dhzWzP8ZW8= +golang.org/x/arch v0.10.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= -golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw= +golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA= -golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0= +golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= -golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE= +golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= -golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= -golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY= -golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg= +golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24= +golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be h1:Zz7rLWqp0ApfsR/l7+zSHhY3PMiH2xqgxlfYfAfNpoU= -google.golang.org/genproto/googleapis/api v0.0.0-20240415180920-8c6c420018be/go.mod h1:dvdCTIoAGbkWbcIKBniID56/7XHTt6WfxXNMxuziJ+w= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be h1:LG9vZxsWGOmUKieR8wPAUR3u3MpnYFQZROPIMaXh7/A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= -google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= -google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1 h1:hjSy6tcFQZ171igDaN5QHOw2n6vx40juYbC/x67CEhc= +google.golang.org/genproto/googleapis/api v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:qpvKtACPCQhAdu3PyQgV4l3LMXZEtft7y8QcarRsp9I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU= +google.golang.org/grpc v1.66.0 h1:DibZuoBznOxbDQxRINckZcUvnCEvrW9pcWIE2yF9r1c= +google.golang.org/grpc v1.66.0/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y= +google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= +google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gorm.io/driver/sqlite v1.5.5 h1:7MDMtUZhV065SilG62E0MquljeArQZNfJnjd9i9gx3E= -gorm.io/driver/sqlite v1.5.5/go.mod h1:6NgQ7sQWAIFsPrJJl1lSNSu2TABh0ZZ/zm5fosATavE= -gorm.io/gorm v1.25.9 h1:wct0gxZIELDk8+ZqF/MVnHLkA1rvYlBWUMv2EdsK1g8= -gorm.io/gorm v1.25.9/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= +gorm.io/driver/sqlite v1.5.6 h1:fO/X46qn5NUEEOZtnjJRWRzZMe8nqJiQ9E+0hi+hKQE= +gorm.io/driver/sqlite v1.5.6/go.mod h1:U+J8craQU6Fzkcvu8oLeAQmi50TkwPEhHDEjQZXDah4= +gorm.io/gorm v1.25.11 h1:/Wfyg1B/je1hnDx3sMkX+gAlxrlZpn6X0BXRlwXlvHg= +gorm.io/gorm v1.25.11/go.mod h1:xh7N7RHfYlNc5EmcI/El95gXusucDrQnHXe0+CgWcLQ= nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50= -rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= diff --git a/internal/apigw/httpserver/service.go b/internal/apigw/httpserver/service.go index 2a0eadb1..ae7d9076 100644 --- a/internal/apigw/httpserver/service.go +++ b/internal/apigw/httpserver/service.go @@ -93,7 +93,7 @@ func New(ctx context.Context, config *model.Cfg, api *apiv1.Client, tp *trace.Tr s.regEndpoint(ctx, rgAPIv1, http.MethodPost, "/document/revoke", s.endpointRevokeDocument) s.regEndpoint(ctx, rgAPIv1, http.MethodPost, "/id/mapping", s.endpointIDMapping) - s.regEndpoint(ctx, rgAPIv1, http.MethodGet, "/credential", s.endpointCredential) + s.regEndpoint(ctx, rgAPIv1, http.MethodPost, "/credential", s.endpointCredential) // Run http server go func() { diff --git a/vendor/github.com/bytedance/sonic/.gitignore b/vendor/github.com/bytedance/sonic/.gitignore index 0d884470..82c9e885 100644 --- a/vendor/github.com/bytedance/sonic/.gitignore +++ b/vendor/github.com/bytedance/sonic/.gitignore @@ -49,4 +49,6 @@ ast/bench.sh !testdata/*.json.gz fuzz/testdata -*__debug_bin \ No newline at end of file +*__debug_bin* +*pprof +*coverage.txt diff --git a/vendor/github.com/bytedance/sonic/README.md b/vendor/github.com/bytedance/sonic/README.md index f3c73635..defe2e4b 100644 --- a/vendor/github.com/bytedance/sonic/README.md +++ b/vendor/github.com/bytedance/sonic/README.md @@ -6,9 +6,9 @@ A blazingly fast JSON serializing & deserializing library, accelerated by JI ## Requirement -- Go 1.16~1.22 -- Linux / MacOS / Windows(need go1.17 above) -- Amd64 ARCH +- Go: 1.17~1.23 +- OS: Linux / MacOS / Windows +- CPU: AMD64 / ARM64(need go1.20 above) ## Features @@ -282,6 +282,20 @@ sub := root.Get("key3").Index(2).Int64() // == 3 **Tip**: since `Index()` uses offset to locate data, which is much faster than scanning like `Get()`, we suggest you use it as much as possible. And sonic also provides another API `IndexOrGet()` to underlying use offset as well as ensure the key is matched. +#### SearchOption +`Searcher` provides some options for user to meet different needs: +```go +opts := ast.SearchOption{ CopyReturn: true ... } +val, err := sonic.GetWithOptions(JSON, opts, "key") +``` +- CopyReturn +Indicate the searcher to copy the result JSON string instead of refer from the input. This can help to reduce memory usage if you cache the results +- ConcurentRead +Since `ast.Node` use `Lazy-Load` design, it doesn't support Concurrently-Read by default. If you want to read it concurrently, please specify it. +- ValidateJSON +Indicate the searcher to validate the entire JSON. This option is enabled by default, which slow down the search speed a little. + + #### Set/Unset Modify the json content by Set()/Unset() @@ -300,7 +314,6 @@ println(alias1 == alias2) // true exist, err := root.UnsetByIndex(1) // exist == true println(root.Get("key4").Check()) // "value not exist" ``` - #### Serialize To encode `ast.Node` as json, use `MarshalJson()` or `json.Marshal()` (MUST pass the node's pointer) @@ -466,6 +479,9 @@ For better performance, in previous case the `ast.Visitor` will be the better ch But `ast.Visitor` is not a very handy API. You might need to write a lot of code to implement your visitor and carefully maintain the tree hierarchy during decoding. Please read the comments in [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) carefully if you decide to use this API. +### Buffer Size +Sonic use memory pool in many places like `encoder.Encode`, `ast.Node.MarshalJSON` to improve performace, which may produce more memory usage (in-use) when server's load is high. See [issue 614](https://github.com/bytedance/sonic/issues/614). Therefore, we introduce some options to let user control the behavior of memory pool. See [option](https://pkg.go.dev/github.com/bytedance/sonic@v1.11.9/option#pkg-variables) package. + ## Community Sonic is a subproject of [CloudWeGo](https://www.cloudwego.io/). We are committed to building a cloud native ecosystem. diff --git a/vendor/github.com/bytedance/sonic/README_ZH_CN.md b/vendor/github.com/bytedance/sonic/README_ZH_CN.md index d0341ab7..e4a3015f 100644 --- a/vendor/github.com/bytedance/sonic/README_ZH_CN.md +++ b/vendor/github.com/bytedance/sonic/README_ZH_CN.md @@ -6,9 +6,9 @@ ## 依赖 -- Go 1.16~1.22 -- Linux / MacOS / Windows(需要 Go1.17 以上) -- Amd64 架构 +- Go: 1.17~1.23 +- OS: Linux / MacOS / Windows +- CPU: AMD64 / ARM64(需要 Go1.20 以上) ## 接口 @@ -260,7 +260,7 @@ fmt.Printf("%+v", data) // {A:0 B:1} ### `Ast.Node` -Sonic/ast.Node 是完全独立的 JSON 抽象语法树库。它实现了序列化和反序列化,并提供了获取和修改通用数据的鲁棒的 API。 +Sonic/ast.Node 是完全独立的 JSON 抽象语法树库。它实现了序列化和反序列化,并提供了获取和修改JSON数据的鲁棒的 API。 #### 查找/索引 @@ -282,6 +282,19 @@ sub := root.Get("key3").Index(2).Int64() // == 3 **注意**:由于 `Index()` 使用偏移量来定位数据,比使用扫描的 `Get()` 要快的多,建议尽可能的使用 `Index` 。 Sonic 也提供了另一个 API, `IndexOrGet()` ,以偏移量为基础并且也确保键的匹配。 +#### 查找选项 +`ast.Searcher`提供了一些选项,以满足用户的不同需求: +``` +opts:= ast.SearchOption{CopyReturn: true…} +Val, err:= sonic。gettwithoptions (JSON, opts, "key") +``` +- CopyReturn +指示搜索器复制结果JSON字符串,而不是从输入引用。如果用户缓存结果,这有助于减少内存使用 +- ConcurentRead +因为`ast.Node`使用`Lazy-Load`设计,默认不支持并发读取。如果您想同时读取,请指定它。 +- ValidateJSON +指示搜索器来验证整个JSON。默认情况下启用该选项, 但是对于查找速度有一定影响。 + #### 修改 使用 `Set()` / `Unset()` 修改 json 的内容 @@ -464,6 +477,9 @@ go someFunc(user) 但是,`ast.Visitor` 并不是一个很易用的 API。你可能需要写大量的代码去实现自己的 `ast.Visitor`,并且需要在解析过程中仔细维护树的层级。如果你决定要使用这个 API,请先仔细阅读 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) 中的注释。 +### 缓冲区大小 +Sonic在许多地方使用内存池,如`encoder.Encode`, `ast.Node.MarshalJSON`等来提高性能,这可能会在服务器负载高时产生更多的内存使用(in-use)。参见[issue 614](https://github.com/bytedance/sonic/issues/614)。因此,我们引入了一些选项来让用户配置内存池的行为。参见[option](https://pkg.go.dev/github.com/bytedance/sonic@v1.11.9/option#pkg-variables)包。 + ## 社区 Sonic 是 [CloudWeGo](https://www.cloudwego.io/) 下的一个子项目。我们致力于构建云原生生态系统。 diff --git a/vendor/github.com/bytedance/sonic/api.go b/vendor/github.com/bytedance/sonic/api.go index 09332912..3a3d1d5f 100644 --- a/vendor/github.com/bytedance/sonic/api.go +++ b/vendor/github.com/bytedance/sonic/api.go @@ -23,6 +23,16 @@ import ( `github.com/bytedance/sonic/internal/rt` ) +const ( + // UseStdJSON indicates you are using fallback implementation (encoding/json) + UseStdJSON = iota + // UseSonicJSON indicates you are using real sonic implementation + UseSonicJSON +) + +// APIKind is the kind of API, 0 is std json, 1 is sonic. +const APIKind = apiKind + // Config is a combination of sonic/encoder.Options and sonic/decoder.Options type Config struct { // EscapeHTML indicates encoder to escape all HTML characters @@ -77,6 +87,9 @@ type Config struct { // NoEncoderNewline indicates that the encoder should not add a newline after every message NoEncoderNewline bool + + // Encode Infinity or Nan float into `null`, instead of returning an error. + EncodeNullForInfOrNan bool } var ( @@ -157,6 +170,13 @@ func Marshal(val interface{}) ([]byte, error) { return ConfigDefault.Marshal(val) } +// MarshalIndent is like Marshal but applies Indent to format the output. +// Each JSON element in the output will begin on a new line beginning with prefix +// followed by one or more copies of indent according to the indentation nesting. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + return ConfigDefault.MarshalIndent(v, prefix, indent) +} + // MarshalString returns the JSON encoding string of v. func MarshalString(val interface{}) (string, error) { return ConfigDefault.MarshalToString(val) @@ -189,6 +209,14 @@ func Get(src []byte, path ...interface{}) (ast.Node, error) { return GetCopyFromString(rt.Mem2Str(src), path...) } +//GetWithOptions searches and locates the given path from src json, +// with specific options of ast.Searcher +func GetWithOptions(src []byte, opts ast.SearchOptions, path ...interface{}) (ast.Node, error) { + s := ast.NewSearcher(rt.Mem2Str(src)) + s.SearchOptions = opts + return s.GetByPath(path...) +} + // GetFromString is same with Get except src is string. // // WARNING: The returned JSON is **Referenced** from the input. diff --git a/vendor/github.com/bytedance/sonic/ast/api.go b/vendor/github.com/bytedance/sonic/ast/api.go index 316a62a9..7c8253aa 100644 --- a/vendor/github.com/bytedance/sonic/ast/api.go +++ b/vendor/github.com/bytedance/sonic/ast/api.go @@ -1,5 +1,5 @@ -//go:build (amd64 && go1.16 && !go1.23) || (arm64 && go1.20 && !go1.23) -// +build amd64,go1.16,!go1.23 arm64,go1.20,!go1.23 +//go:build (amd64 && go1.17 && !go1.24) || (arm64 && go1.20 && !go1.24) +// +build amd64,go1.17,!go1.24 arm64,go1.20,!go1.24 /* * Copyright 2022 ByteDance Inc. @@ -61,7 +61,7 @@ func quote(buf *[]byte, val string) { } // double buf size - *b = growslice(typeByte, *b, b.Cap*2) + *b = rt.GrowSlice(typeByte, *b, b.Cap*2) // ret is the complement of consumed input ret = ^ret // update input buffer diff --git a/vendor/github.com/bytedance/sonic/ast/api_compat.go b/vendor/github.com/bytedance/sonic/ast/api_compat.go index 82d1eacd..a349afc0 100644 --- a/vendor/github.com/bytedance/sonic/ast/api_compat.go +++ b/vendor/github.com/bytedance/sonic/ast/api_compat.go @@ -1,4 +1,4 @@ -// +build !amd64,!arm64 go1.23 !go1.16 arm64,!go1.20 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* * Copyright 2022 ByteDance Inc. @@ -27,7 +27,7 @@ import ( ) func init() { - println("WARNING:(ast) sonic only supports Go1.16~1.22, but your environment is not suitable") + println("WARNING:(ast) sonic only supports go1.17~1.23, but your environment is not suitable") } func quote(buf *[]byte, val string) { diff --git a/vendor/github.com/bytedance/sonic/ast/buffer.go b/vendor/github.com/bytedance/sonic/ast/buffer.go index bccbf481..04701ef5 100644 --- a/vendor/github.com/bytedance/sonic/ast/buffer.go +++ b/vendor/github.com/bytedance/sonic/ast/buffer.go @@ -17,8 +17,10 @@ package ast import ( - `sort` - `unsafe` + "sort" + "unsafe" + + "github.com/bytedance/sonic/internal/caching" ) type nodeChunk [_DEFAULT_NODE_CAP]Node @@ -90,18 +92,11 @@ func (self *linkedNodes) Pop() { self.size-- } -func (self *linkedPairs) Pop() { - if self == nil || self.size == 0 { - return - } - self.Set(self.size-1, Pair{}) - self.size-- -} - func (self *linkedNodes) Push(v Node) { self.Set(self.size, v) } + func (self *linkedNodes) Set(i int, v Node) { if i < _DEFAULT_NODE_CAP { self.head[i] = v @@ -195,11 +190,22 @@ func (self *linkedNodes) FromSlice(con []Node) { type pairChunk [_DEFAULT_NODE_CAP]Pair type linkedPairs struct { + index map[uint64]int head pairChunk tail []*pairChunk size int } +func (self *linkedPairs) BuildIndex() { + if self.index == nil { + self.index = make(map[uint64]int, self.size) + } + for i:=0; i len(src) { return -int(types.ERR_EOF) } - if src[pos:ret] == bytesNull { + if src[pos:ret] == strNull { return ret } else { return -int(types.ERR_INVALID_CHAR) diff --git a/vendor/github.com/bytedance/sonic/ast/encode.go b/vendor/github.com/bytedance/sonic/ast/encode.go index 956809c2..eae0bd25 100644 --- a/vendor/github.com/bytedance/sonic/ast/encode.go +++ b/vendor/github.com/bytedance/sonic/ast/encode.go @@ -17,12 +17,11 @@ package ast import ( - `sync` - `unicode/utf8` -) + "sync" + "unicode/utf8" -const ( - _MaxBuffer = 1024 // 1KB buffer size + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" ) func quoteString(e *[]byte, s string) { @@ -30,7 +29,7 @@ func quoteString(e *[]byte, s string) { start := 0 for i := 0; i < len(s); { if b := s[i]; b < utf8.RuneSelf { - if safeSet[b] { + if rt.SafeSet[b] { i++ continue } @@ -54,8 +53,8 @@ func quoteString(e *[]byte, s string) { // user-controlled strings are rendered into JSON // and served to some browsers. *e = append(*e, `u00`...) - *e = append(*e, hex[b>>4]) - *e = append(*e, hex[b&0xF]) + *e = append(*e, rt.Hex[b>>4]) + *e = append(*e, rt.Hex[b&0xF]) } i++ start = i @@ -76,7 +75,7 @@ func quoteString(e *[]byte, s string) { *e = append(*e, s[start:i]...) } *e = append(*e, `\u202`...) - *e = append(*e, hex[c&0xF]) + *e = append(*e, rt.Hex[c&0xF]) i += size start = i continue @@ -92,16 +91,24 @@ func quoteString(e *[]byte, s string) { var bytesPool = sync.Pool{} func (self *Node) MarshalJSON() ([]byte, error) { + if self == nil { + return bytesNull, nil + } + buf := newBuffer() err := self.encode(buf) if err != nil { freeBuffer(buf) return nil, err } - - ret := make([]byte, len(*buf)) - copy(ret, *buf) - freeBuffer(buf) + var ret []byte + if !rt.CanSizeResue(cap(*buf)) { + ret = *buf + } else { + ret = make([]byte, len(*buf)) + copy(ret, *buf) + freeBuffer(buf) + } return ret, err } @@ -109,21 +116,24 @@ func newBuffer() *[]byte { if ret := bytesPool.Get(); ret != nil { return ret.(*[]byte) } else { - buf := make([]byte, 0, _MaxBuffer) + buf := make([]byte, 0, option.DefaultAstBufferSize) return &buf } } func freeBuffer(buf *[]byte) { + if !rt.CanSizeResue(cap(*buf)) { + return + } *buf = (*buf)[:0] bytesPool.Put(buf) } func (self *Node) encode(buf *[]byte) error { - if self.IsRaw() { + if self.isRaw() { return self.encodeRaw(buf) } - switch self.Type() { + switch int(self.itype()) { case V_NONE : return ErrNotExist case V_ERROR : return self.Check() case V_NULL : return self.encodeNull(buf) @@ -139,16 +149,21 @@ func (self *Node) encode(buf *[]byte) error { } func (self *Node) encodeRaw(buf *[]byte) error { - raw, err := self.Raw() - if err != nil { - return err + lock := self.rlock() + if !self.isRaw() { + self.runlock() + return self.encode(buf) + } + raw := self.toString() + if lock { + self.runlock() } *buf = append(*buf, raw...) return nil } func (self *Node) encodeNull(buf *[]byte) error { - *buf = append(*buf, bytesNull...) + *buf = append(*buf, strNull...) return nil } diff --git a/vendor/github.com/bytedance/sonic/ast/error.go b/vendor/github.com/bytedance/sonic/ast/error.go index 00a04468..3716e7a9 100644 --- a/vendor/github.com/bytedance/sonic/ast/error.go +++ b/vendor/github.com/bytedance/sonic/ast/error.go @@ -17,6 +17,10 @@ func newError(err types.ParsingError, msg string) *Node { } } +func newErrorPair(err SyntaxError) *Pair { + return &Pair{0, "", *newSyntaxError(err)} +} + // Error returns error message if the node is invalid func (self Node) Error() string { if self.t != V_ERROR { @@ -79,7 +83,7 @@ func (self SyntaxError) description() string { /* check for empty source */ if self.Src == "" { - return fmt.Sprintf("no sources available: %#v", self) + return fmt.Sprintf("no sources available, the input json is empty: %#v", self) } /* prevent slicing before the beginning */ diff --git a/vendor/github.com/bytedance/sonic/ast/iterator.go b/vendor/github.com/bytedance/sonic/ast/iterator.go index 64e1e5a9..07664715 100644 --- a/vendor/github.com/bytedance/sonic/ast/iterator.go +++ b/vendor/github.com/bytedance/sonic/ast/iterator.go @@ -17,19 +17,29 @@ package ast import ( - `fmt` + "fmt" - `github.com/bytedance/sonic/internal/native/types` + "github.com/bytedance/sonic/internal/caching" + "github.com/bytedance/sonic/internal/native/types" ) type Pair struct { + hash uint64 Key string Value Node } +func NewPair(key string, val Node) Pair { + return Pair{ + hash: caching.StrHash(key), + Key: key, + Value: val, + } +} + // Values returns iterator for array's children traversal func (self *Node) Values() (ListIterator, error) { - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return ListIterator{}, err } return self.values(), nil @@ -41,7 +51,7 @@ func (self *Node) values() ListIterator { // Properties returns iterator for object's children traversal func (self *Node) Properties() (ObjectIterator, error) { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return ObjectIterator{}, err } return self.properties(), nil @@ -168,6 +178,9 @@ type Scanner func(path Sequence, node *Node) bool // // NOTICE: A unsetted node WON'T trigger sc, but its index still counts into Path.Index func (self *Node) ForEach(sc Scanner) error { + if err := self.checkRaw(); err != nil { + return err + } switch self.itype() { case types.V_ARRAY: iter, err := self.Values() diff --git a/vendor/github.com/bytedance/sonic/ast/node.go b/vendor/github.com/bytedance/sonic/ast/node.go index ac6d2280..0fbcf783 100644 --- a/vendor/github.com/bytedance/sonic/ast/node.go +++ b/vendor/github.com/bytedance/sonic/ast/node.go @@ -17,13 +17,15 @@ package ast import ( - `encoding/json` - `fmt` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` + "encoding/json" + "fmt" + "strconv" + "sync" + "sync/atomic" + "unsafe" + + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" ) const ( @@ -36,7 +38,7 @@ const ( _V_ARRAY_LAZY = _V_LAZY | types.V_ARRAY _V_OBJECT_LAZY = _V_LAZY | types.V_OBJECT _MASK_LAZY = _V_LAZY - 1 - _MASK_RAW = _V_RAW - 1 + _MASK_RAW = _V_RAW - 1 ) const ( @@ -56,6 +58,7 @@ type Node struct { t types.ValueType l uint p unsafe.Pointer + m *sync.RWMutex } // UnmarshalJSON is just an adapter to json.Unmarshaler. @@ -79,17 +82,39 @@ func (self *Node) UnmarshalJSON(data []byte) (err error) { // V_STRING = 7 (json value string) // V_NUMBER = 33 (json value number ) // V_ANY = 34 (golang interface{}) +// +// Deprecated: not concurrent safe. Use TypeSafe instead func (self Node) Type() int { return int(self.t & _MASK_LAZY & _MASK_RAW) } -func (self Node) itype() types.ValueType { +// Type concurrently-safe returns json type represented by the node +// It will be one of belows: +// V_NONE = 0 (empty node, key not exists) +// V_ERROR = 1 (error node) +// V_NULL = 2 (json value `null`, key exists) +// V_TRUE = 3 (json value `true`) +// V_FALSE = 4 (json value `false`) +// V_ARRAY = 5 (json value array) +// V_OBJECT = 6 (json value object) +// V_STRING = 7 (json value string) +// V_NUMBER = 33 (json value number ) +// V_ANY = 34 (golang interface{}) +func (self *Node) TypeSafe() int { + return int(self.loadt() & _MASK_LAZY & _MASK_RAW) +} + +func (self *Node) itype() types.ValueType { return self.t & _MASK_LAZY & _MASK_RAW } // Exists returns false only if the self is nil or empty node V_NONE func (self *Node) Exists() bool { - return self.Valid() && self.t != _V_NONE + if self == nil { + return false + } + t := self.loadt() + return t != V_ERROR && t != _V_NONE } // Valid reports if self is NOT V_ERROR or nil @@ -97,7 +122,7 @@ func (self *Node) Valid() bool { if self == nil { return false } - return self.t != V_ERROR + return self.loadt() != V_ERROR } // Check checks if the node itself is valid, and return: @@ -106,24 +131,31 @@ func (self *Node) Valid() bool { func (self *Node) Check() error { if self == nil { return ErrNotExist - } else if self.t != V_ERROR { + } else if self.loadt() != V_ERROR { return nil } else { return self } } -// IsRaw returns true if node's underlying value is raw json +// isRaw returns true if node's underlying value is raw json +// +// Deprecated: not concurent safe func (self Node) IsRaw() bool { - return self.t&_V_RAW != 0 + return self.t & _V_RAW != 0 +} + +// IsRaw returns true if node's underlying value is raw json +func (self *Node) isRaw() bool { + return self.loadt() & _V_RAW != 0 } func (self *Node) isLazy() bool { - return self != nil && self.t&_V_LAZY != 0 + return self != nil && self.t & _V_LAZY != 0 } func (self *Node) isAny() bool { - return self != nil && self.t == _V_ANY + return self != nil && self.loadt() == _V_ANY } /** Simple Value Methods **/ @@ -133,18 +165,26 @@ func (self *Node) Raw() (string, error) { if self == nil { return "", ErrNotExist } - if !self.IsRaw() { + lock := self.rlock() + if !self.isRaw() { + if lock { + self.runlock() + } buf, err := self.MarshalJSON() return rt.Mem2Str(buf), err } - return self.toString(), nil + ret := self.toString() + if lock { + self.runlock() + } + return ret, nil } func (self *Node) checkRaw() error { if err := self.Check(); err != nil { return err } - if self.IsRaw() { + if self.isRaw() { self.parseRaw(false) } return self.Check() @@ -504,7 +544,7 @@ func (self *Node) Len() (int, error) { } } -func (self Node) len() int { +func (self *Node) len() int { return int(self.l) } @@ -527,7 +567,7 @@ func (self *Node) Cap() (int, error) { // // If self is V_NONE or V_NULL, it becomes V_OBJECT and sets the node at the key. func (self *Node) Set(key string, node Node) (bool, error) { - if err := self.Check(); err != nil { + if err := self.checkRaw(); err != nil { return false, err } if err := node.Check(); err != nil { @@ -535,7 +575,7 @@ func (self *Node) Set(key string, node Node) (bool, error) { } if self.t == _V_NONE || self.t == types.V_NULL { - *self = NewObject([]Pair{{key, node}}) + *self = NewObject([]Pair{NewPair(key, node)}) return false, nil } else if self.itype() != types.V_OBJECT { return false, ErrUnsupportType @@ -549,7 +589,7 @@ func (self *Node) Set(key string, node Node) (bool, error) { *self = newObject(new(linkedPairs)) } s := (*linkedPairs)(self.p) - s.Push(Pair{key, node}) + s.Push(NewPair(key, node)) self.l++ return false, nil @@ -568,7 +608,7 @@ func (self *Node) SetAny(key string, val interface{}) (bool, error) { // Unset REMOVE (soft) the node of given key under object parent, and reports if the key has existed. func (self *Node) Unset(key string) (bool, error) { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return false, err } // NOTICE: must get acurate length before deduct @@ -589,7 +629,7 @@ func (self *Node) Unset(key string) (bool, error) { // // The index must be within self's children. func (self *Node) SetByIndex(index int, node Node) (bool, error) { - if err := self.Check(); err != nil { + if err := self.checkRaw(); err != nil { return false, err } if err := node.Check(); err != nil { @@ -669,7 +709,7 @@ func (self *Node) UnsetByIndex(index int) (bool, error) { // // If self is V_NONE or V_NULL, it becomes V_ARRAY and sets the node at index 0. func (self *Node) Add(node Node) error { - if err := self.Check(); err != nil { + if err := self.checkRaw(); err != nil { return err } @@ -677,7 +717,7 @@ func (self *Node) Add(node Node) error { *self = NewArray([]Node{node}) return nil } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return err } @@ -740,7 +780,7 @@ func (self *Node) Pop() error { // // WARN: this will change address of elements, which is a dangerous action. func (self *Node) Move(dst, src int) error { - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return err } @@ -812,7 +852,7 @@ func (self *Node) GetByPath(path ...interface{}) *Node { // Get loads given key of an object node on demands func (self *Node) Get(key string) *Node { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return unwrapError(err) } n, _ := self.skipKey(key) @@ -845,14 +885,14 @@ func (self *Node) Index(idx int) *Node { // IndexPair indexies pair at given idx, // node type MUST be either V_OBJECT func (self *Node) IndexPair(idx int) *Pair { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil } return self.skipIndexPair(idx) } func (self *Node) indexOrGet(idx int, key string) (*Node, int) { - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return unwrapError(err), idx } @@ -889,10 +929,10 @@ func (self *Node) Map() (map[string]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil, err } - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObject() @@ -908,10 +948,10 @@ func (self *Node) MapUseNumber() (map[string]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil, err } - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObjectUseNumber() @@ -928,7 +968,7 @@ func (self *Node) MapUseNode() (map[string]Node, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_OBJECT, "an object"); err != nil { + if err := self.should(types.V_OBJECT); err != nil { return nil, err } if err := self.skipAllKey(); err != nil { @@ -1034,10 +1074,10 @@ func (self *Node) Array() ([]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return nil, err } - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArray() @@ -1053,10 +1093,10 @@ func (self *Node) ArrayUseNumber() ([]interface{}, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return nil, err } - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArrayUseNumber() @@ -1073,7 +1113,7 @@ func (self *Node) ArrayUseNode() ([]Node, error) { return nil, ErrUnsupportType } } - if err := self.should(types.V_ARRAY, "an array"); err != nil { + if err := self.should(types.V_ARRAY); err != nil { return nil, err } if err := self.skipAllIndex(); err != nil { @@ -1129,12 +1169,12 @@ func (self *Node) Interface() (interface{}, error) { } return v, nil case _V_ARRAY_LAZY : - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArray() case _V_OBJECT_LAZY : - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObject() @@ -1168,12 +1208,12 @@ func (self *Node) InterfaceUseNumber() (interface{}, error) { case types.V_STRING : return self.toString(), nil case _V_NUMBER : return self.toNumber(), nil case _V_ARRAY_LAZY : - if err := self.loadAllIndex(); err != nil { + if err := self.loadAllIndex(false); err != nil { return nil, err } return self.toGenericArrayUseNumber() case _V_OBJECT_LAZY : - if err := self.loadAllKey(); err != nil { + if err := self.loadAllKey(false); err != nil { return nil, err } return self.toGenericObjectUseNumber() @@ -1205,70 +1245,30 @@ func (self *Node) InterfaceUseNode() (interface{}, error) { } } -// LoadAll loads all the node's children and children's children as parsed. -// After calling it, the node can be safely used on concurrency +// LoadAll loads the node's children +// and ensure all its children can be READ concurrently (include its children's children) func (self *Node) LoadAll() error { - if self.IsRaw() { - self.parseRaw(true) - return self.Check() - } - - switch self.itype() { - case types.V_ARRAY: - e := self.len() - if err := self.loadAllIndex(); err != nil { - return err - } - for i := 0; i < e; i++ { - n := self.nodeAt(i) - if n.IsRaw() { - n.parseRaw(true) - } - if err := n.Check(); err != nil { - return err - } - } - return nil - case types.V_OBJECT: - e := self.len() - if err := self.loadAllKey(); err != nil { - return err - } - for i := 0; i < e; i++ { - n := self.pairAt(i) - if n.Value.IsRaw() { - n.Value.parseRaw(true) - } - if err := n.Value.Check(); err != nil { - return err - } - } - return nil - default: - return self.Check() - } + return self.Load() } // Load loads the node's children as parsed. -// After calling it, only the node itself can be used on concurrency (not include its children) +// and ensure all its children can be READ concurrently (include its children's children) func (self *Node) Load() error { - if err := self.checkRaw(); err != nil { - return err - } - switch self.t { - case _V_ARRAY_LAZY: - return self.skipAllIndex() - case _V_OBJECT_LAZY: - return self.skipAllKey() - default: - return self.Check() + case _V_ARRAY_LAZY: self.loadAllIndex(true) + case _V_OBJECT_LAZY: self.loadAllKey(true) + case V_ERROR: return self + case V_NONE: return nil } + if self.m == nil { + self.m = new(sync.RWMutex) + } + return self.checkRaw() } /**---------------------------------- Internal Helper Methods ----------------------------------**/ -func (self *Node) should(t types.ValueType, s string) error { +func (self *Node) should(t types.ValueType) error { if err := self.checkRaw(); err != nil { return err } @@ -1439,13 +1439,17 @@ func (self *Node) skipIndexPair(index int) *Pair { return nil } -func (self *Node) loadAllIndex() error { +func (self *Node) loadAllIndex(loadOnce bool) error { if !self.isLazy() { return nil } var err types.ParsingError parser, stack := self.getParserAndArrayStack() - parser.noLazy = true + if !loadOnce { + parser.noLazy = true + } else { + parser.loadOnce = true + } *self, err = parser.decodeArray(&stack.v) if err != 0 { return parser.ExportError(err) @@ -1453,14 +1457,19 @@ func (self *Node) loadAllIndex() error { return nil } -func (self *Node) loadAllKey() error { +func (self *Node) loadAllKey(loadOnce bool) error { if !self.isLazy() { return nil } var err types.ParsingError parser, stack := self.getParserAndObjectStack() - parser.noLazy = true - *self, err = parser.decodeObject(&stack.v) + if !loadOnce { + parser.noLazy = true + *self, err = parser.decodeObject(&stack.v) + } else { + parser.loadOnce = true + *self, err = parser.decodeObject(&stack.v) + } if err != 0 { return parser.ExportError(err) } @@ -1629,7 +1638,23 @@ func NewRaw(json string) Node { if it == _V_NONE { return Node{} } - return newRawNode(parser.s[start:parser.p], it) + return newRawNode(parser.s[start:parser.p], it, false) +} + +// NewRawConcurrentRead creates a node of raw json, which can be READ +// (GetByPath/Get/Index/GetOrIndex/Int64/Bool/Float64/String/Number/Interface/Array/Map/Raw/MarshalJSON) concurrently. +// If the input json is invalid, NewRaw returns a error Node. +func NewRawConcurrentRead(json string) Node { + parser := NewParserObj(json) + start, err := parser.skip() + if err != 0 { + return *newError(err, err.Message()) + } + it := switchRawType(parser.s[start]) + if it == _V_NONE { + return Node{} + } + return newRawNode(parser.s[start:parser.p], it, true) } // NewAny creates a node of type V_ANY if any's type isn't Node or *Node, @@ -1653,7 +1678,7 @@ func NewBytes(src []byte) Node { if len(src) == 0 { panic("empty src bytes") } - out := encodeBase64(src) + out := rt.EncodeBase64(src) return NewString(out) } @@ -1689,15 +1714,15 @@ func NewNumber(v string) Node { } } -func (node Node) toNumber() json.Number { +func (node *Node) toNumber() json.Number { return json.Number(rt.StrFrom(node.p, int64(node.l))) } -func (self Node) toString() string { +func (self *Node) toString() string { return rt.StrFrom(self.p, int64(self.l)) } -func (node Node) toFloat64() (float64, error) { +func (node *Node) toFloat64() (float64, error) { ret, err := node.toNumber().Float64() if err != nil { return 0, err @@ -1705,7 +1730,7 @@ func (node Node) toFloat64() (float64, error) { return ret, nil } -func (node Node) toInt64() (int64, error) { +func (node *Node) toInt64() (int64, error) { ret,err := node.toNumber().Int64() if err != nil { return 0, err @@ -1741,6 +1766,8 @@ func NewArray(v []Node) Node { return newArray(s) } +const _Threshold_Index = 16 + func newArray(v *linkedNodes) Node { return Node{ t: types.V_ARRAY, @@ -1764,6 +1791,9 @@ func NewObject(v []Pair) Node { } func newObject(v *linkedPairs) Node { + if v.size > _Threshold_Index { + v.BuildIndex() + } return Node{ t: types.V_OBJECT, l: uint(v.Len()), @@ -1772,53 +1802,42 @@ func newObject(v *linkedPairs) Node { } func (self *Node) setObject(v *linkedPairs) { + if v.size > _Threshold_Index { + v.BuildIndex() + } self.t = types.V_OBJECT self.l = uint(v.Len()) self.p = unsafe.Pointer(v) } -func newRawNode(str string, typ types.ValueType) Node { - return Node{ - t: _V_RAW | typ, - p: rt.StrPtr(str), - l: uint(len(str)), - } -} - func (self *Node) parseRaw(full bool) { + lock := self.lock() + defer self.unlock() + if !self.isRaw() { + return + } raw := self.toString() parser := NewParserObj(raw) + var e types.ParsingError if full { parser.noLazy = true - parser.skipValue = false + *self, e = parser.Parse() + } else if lock { + var n Node + parser.noLazy = true + parser.loadOnce = true + n, e = parser.Parse() + self.assign(n) + } else { + *self, e = parser.Parse() } - var e types.ParsingError - *self, e = parser.Parse() if e != 0 { *self = *newSyntaxError(parser.syntaxError(e)) } } -var typeJumpTable = [256]types.ValueType{ - '"' : types.V_STRING, - '-' : _V_NUMBER, - '0' : _V_NUMBER, - '1' : _V_NUMBER, - '2' : _V_NUMBER, - '3' : _V_NUMBER, - '4' : _V_NUMBER, - '5' : _V_NUMBER, - '6' : _V_NUMBER, - '7' : _V_NUMBER, - '8' : _V_NUMBER, - '9' : _V_NUMBER, - '[' : types.V_ARRAY, - 'f' : types.V_FALSE, - 'n' : types.V_NULL, - 't' : types.V_TRUE, - '{' : types.V_OBJECT, -} - -func switchRawType(c byte) types.ValueType { - return typeJumpTable[c] +func (self *Node) assign(n Node) { + self.l = n.l + self.p = n.p + atomic.StoreInt64(&self.t, n.t) } diff --git a/vendor/github.com/bytedance/sonic/ast/parser.go b/vendor/github.com/bytedance/sonic/ast/parser.go index a1f58262..30bd1f45 100644 --- a/vendor/github.com/bytedance/sonic/ast/parser.go +++ b/vendor/github.com/bytedance/sonic/ast/parser.go @@ -17,14 +17,16 @@ package ast import ( - `fmt` + "fmt" + "sync" + "sync/atomic" - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" ) const ( - _DEFAULT_NODE_CAP int = 8 + _DEFAULT_NODE_CAP int = 16 _APPEND_GROW_SHIFT = 1 ) @@ -45,6 +47,7 @@ type Parser struct { p int s string noLazy bool + loadOnce bool skipValue bool dbuf *byte } @@ -115,6 +118,10 @@ func (self *Parser) lspace(sp int) int { return sp } +func (self *Parser) backward() { + for ; self.p >= 0 && isSpace(self.s[self.p]); self.p-=1 {} +} + func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) { sp := self.p ns := len(self.s) @@ -148,7 +155,7 @@ func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) { if t == _V_NONE { return Node{}, types.ERR_INVALID_CHAR } - val = newRawNode(self.s[start:self.p], t) + val = newRawNode(self.s[start:self.p], t, false) }else{ /* decode the value */ if val, err = self.Parse(); err != 0 { @@ -234,7 +241,7 @@ func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) { if t == _V_NONE { return Node{}, types.ERR_INVALID_CHAR } - val = newRawNode(self.s[start:self.p], t) + val = newRawNode(self.s[start:self.p], t, false) } else { /* decode the value */ if val, err = self.Parse(); err != 0 { @@ -244,7 +251,7 @@ func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) { /* add the value to result */ // FIXME: ret's address may change here, thus previous referred node in ret may be invalid !! - ret.Push(Pair{Key: key, Value: val}) + ret.Push(NewPair(key, val)) self.p = self.lspace(self.p) /* check for EOF */ @@ -291,6 +298,10 @@ func (self *Parser) Pos() int { return self.p } + +// Parse returns a ast.Node representing the parser's JSON. +// NOTICE: the specific parsing lazy dependens parser's option +// It only parse first layer and first child for Object or Array be default func (self *Parser) Parse() (Node, types.ParsingError) { switch val := self.decodeValue(); val.Vt { case types.V_EOF : return Node{}, types.ERR_EOF @@ -299,22 +310,48 @@ func (self *Parser) Parse() (Node, types.ParsingError) { case types.V_FALSE : return falseNode, 0 case types.V_STRING : return self.decodeString(val.Iv, val.Ep) case types.V_ARRAY: + s := self.p - 1; if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == ']' { self.p = p + 1 return Node{t: types.V_ARRAY}, 0 } if self.noLazy { + if self.loadOnce { + self.noLazy = false + } return self.decodeArray(new(linkedNodes)) } + // NOTICE: loadOnce always keep raw json for object or array + if self.loadOnce { + self.p = s + s, e := self.skipFast() + if e != 0 { + return Node{}, e + } + return newRawNode(self.s[s:self.p], types.V_ARRAY, true), 0 + } return newLazyArray(self), 0 case types.V_OBJECT: + s := self.p - 1; if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == '}' { self.p = p + 1 return Node{t: types.V_OBJECT}, 0 } + // NOTICE: loadOnce always keep raw json for object or array if self.noLazy { + if self.loadOnce { + self.noLazy = false + } return self.decodeObject(new(linkedPairs)) } + if self.loadOnce { + self.p = s + s, e := self.skipFast() + if e != 0 { + return Node{}, e + } + return newRawNode(self.s[s:self.p], types.V_OBJECT, true), 0 + } return newLazyObject(self), 0 case types.V_DOUBLE : return NewNumber(self.s[val.Ep:self.p]), 0 case types.V_INTEGER : return NewNumber(self.s[val.Ep:self.p]), 0 @@ -471,7 +508,7 @@ func (self *Node) skipNextNode() *Node { if t == _V_NONE { return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR)) } - val = newRawNode(parser.s[start:parser.p], t) + val = newRawNode(parser.s[start:parser.p], t, false) } /* add the value to result */ @@ -510,7 +547,7 @@ func (self *Node) skipNextPair() (*Pair) { /* check for EOF */ if parser.p = parser.lspace(sp); parser.p >= ns { - return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_EOF))} + return newErrorPair(parser.syntaxError(types.ERR_EOF)) } /* check for empty object */ @@ -527,7 +564,7 @@ func (self *Node) skipNextPair() (*Pair) { /* decode the key */ if njs = parser.decodeValue(); njs.Vt != types.V_STRING { - return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))} + return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) } /* extract the key */ @@ -537,34 +574,34 @@ func (self *Node) skipNextPair() (*Pair) { /* check for escape sequence */ if njs.Ep != -1 { if key, err = unquote(key); err != 0 { - return &Pair{key, *newSyntaxError(parser.syntaxError(err))} + return newErrorPair(parser.syntaxError(err)) } } /* expect a ':' delimiter */ if err = parser.delim(); err != 0 { - return &Pair{key, *newSyntaxError(parser.syntaxError(err))} + return newErrorPair(parser.syntaxError(err)) } /* skip the value */ if start, err := parser.skipFast(); err != 0 { - return &Pair{key, *newSyntaxError(parser.syntaxError(err))} + return newErrorPair(parser.syntaxError(err)) } else { t := switchRawType(parser.s[start]) if t == _V_NONE { - return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))} + return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) } - val = newRawNode(parser.s[start:parser.p], t) + val = newRawNode(parser.s[start:parser.p], t, false) } /* add the value to result */ - ret.Push(Pair{Key: key, Value: val}) + ret.Push(NewPair(key, val)) self.l++ parser.p = parser.lspace(parser.p) /* check for EOF */ if parser.p >= ns { - return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_EOF))} + return newErrorPair(parser.syntaxError(types.ERR_EOF)) } /* check for the next character */ @@ -577,7 +614,7 @@ func (self *Node) skipNextPair() (*Pair) { self.setObject(ret) return ret.At(ret.Len()-1) default: - return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))} + return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) } } @@ -658,3 +695,72 @@ func backward(src string, i int) int { for ; i>=0 && isSpace(src[i]); i-- {} return i } + + +func newRawNode(str string, typ types.ValueType, lock bool) Node { + ret := Node{ + t: typ | _V_RAW, + p: rt.StrPtr(str), + l: uint(len(str)), + } + if lock { + ret.m = new(sync.RWMutex) + } + return ret +} + +var typeJumpTable = [256]types.ValueType{ + '"' : types.V_STRING, + '-' : _V_NUMBER, + '0' : _V_NUMBER, + '1' : _V_NUMBER, + '2' : _V_NUMBER, + '3' : _V_NUMBER, + '4' : _V_NUMBER, + '5' : _V_NUMBER, + '6' : _V_NUMBER, + '7' : _V_NUMBER, + '8' : _V_NUMBER, + '9' : _V_NUMBER, + '[' : types.V_ARRAY, + 'f' : types.V_FALSE, + 'n' : types.V_NULL, + 't' : types.V_TRUE, + '{' : types.V_OBJECT, +} + +func switchRawType(c byte) types.ValueType { + return typeJumpTable[c] +} + +func (self *Node) loadt() types.ValueType { + return (types.ValueType)(atomic.LoadInt64(&self.t)) +} + +func (self *Node) lock() bool { + if m := self.m; m != nil { + m.Lock() + return true + } + return false +} + +func (self *Node) unlock() { + if m := self.m; m != nil { + m.Unlock() + } +} + +func (self *Node) rlock() bool { + if m := self.m; m != nil { + m.RLock() + return true + } + return false +} + +func (self *Node) runlock() { + if m := self.m; m != nil { + m.RUnlock() + } +} diff --git a/vendor/github.com/bytedance/sonic/ast/search.go b/vendor/github.com/bytedance/sonic/ast/search.go index a8d1e76f..9a5fb942 100644 --- a/vendor/github.com/bytedance/sonic/ast/search.go +++ b/vendor/github.com/bytedance/sonic/ast/search.go @@ -21,8 +21,23 @@ import ( `github.com/bytedance/sonic/internal/native/types` ) +// SearchOptions controls Searcher's behavior +type SearchOptions struct { + // ValidateJSON indicates the searcher to validate the entire JSON + ValidateJSON bool + + // CopyReturn indicates the searcher to copy the result JSON instead of refer from the input + // This can help to reduce memory usage if you cache the results + CopyReturn bool + + // ConcurrentRead indicates the searcher to return a concurrently-READ-safe node, + // including: GetByPath/Get/Index/GetOrIndex/Int64/Bool/Float64/String/Number/Interface/Array/Map/Raw/MarshalJSON + ConcurrentRead bool +} + type Searcher struct { parser Parser + SearchOptions } func NewSearcher(str string) *Searcher { @@ -31,12 +46,16 @@ func NewSearcher(str string) *Searcher { s: str, noLazy: false, }, + SearchOptions: SearchOptions{ + ValidateJSON: true, + }, } } // GetByPathCopy search in depth from top json and returns a **Copied** json node at the path location func (self *Searcher) GetByPathCopy(path ...interface{}) (Node, error) { - return self.getByPath(true, true, path...) + self.CopyReturn = true + return self.getByPath(path...) } // GetByPathNoCopy search in depth from top json and returns a **Referenced** json node at the path location @@ -44,15 +63,15 @@ func (self *Searcher) GetByPathCopy(path ...interface{}) (Node, error) { // WARN: this search directly refer partial json from top json, which has faster speed, // may consumes more memory. func (self *Searcher) GetByPath(path ...interface{}) (Node, error) { - return self.getByPath(false, true, path...) + return self.getByPath(path...) } -func (self *Searcher) getByPath(copystring bool, validate bool, path ...interface{}) (Node, error) { +func (self *Searcher) getByPath(path ...interface{}) (Node, error) { var err types.ParsingError var start int self.parser.p = 0 - start, err = self.parser.getByPath(validate, path...) + start, err = self.parser.getByPath(self.ValidateJSON, path...) if err != 0 { // for compatibility with old version if err == types.ERR_NOT_FOUND { @@ -71,12 +90,12 @@ func (self *Searcher) getByPath(copystring bool, validate bool, path ...interfac // copy string to reducing memory usage var raw string - if copystring { + if self.CopyReturn { raw = rt.Mem2Str([]byte(self.parser.s[start:self.parser.p])) } else { raw = self.parser.s[start:self.parser.p] } - return newRawNode(raw, t), nil + return newRawNode(raw, t, self.ConcurrentRead), nil } // GetByPath searches a path and returns relaction and types of target diff --git a/vendor/github.com/bytedance/sonic/ast/stubs.go b/vendor/github.com/bytedance/sonic/ast/stubs.go new file mode 100644 index 00000000..53bf3b8a --- /dev/null +++ b/vendor/github.com/bytedance/sonic/ast/stubs.go @@ -0,0 +1,142 @@ +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ast + +import ( + "unicode/utf8" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +//go:noescape +//go:linkname memmove runtime.memmove +//goland:noinspection GoUnusedParameter +func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr) + +//go:linkname unsafe_NewArray reflect.unsafe_NewArray +//goland:noinspection GoUnusedParameter +func unsafe_NewArray(typ *rt.GoType, n int) unsafe.Pointer + +//go:nosplit +func mem2ptr(s []byte) unsafe.Pointer { + return (*rt.GoSlice)(unsafe.Pointer(&s)).Ptr +} + +var safeSet = [utf8.RuneSelf]bool{ + ' ': true, + '!': true, + '"': false, + '#': true, + '$': true, + '%': true, + '&': true, + '\'': true, + '(': true, + ')': true, + '*': true, + '+': true, + ',': true, + '-': true, + '.': true, + '/': true, + '0': true, + '1': true, + '2': true, + '3': true, + '4': true, + '5': true, + '6': true, + '7': true, + '8': true, + '9': true, + ':': true, + ';': true, + '<': true, + '=': true, + '>': true, + '?': true, + '@': true, + 'A': true, + 'B': true, + 'C': true, + 'D': true, + 'E': true, + 'F': true, + 'G': true, + 'H': true, + 'I': true, + 'J': true, + 'K': true, + 'L': true, + 'M': true, + 'N': true, + 'O': true, + 'P': true, + 'Q': true, + 'R': true, + 'S': true, + 'T': true, + 'U': true, + 'V': true, + 'W': true, + 'X': true, + 'Y': true, + 'Z': true, + '[': true, + '\\': false, + ']': true, + '^': true, + '_': true, + '`': true, + 'a': true, + 'b': true, + 'c': true, + 'd': true, + 'e': true, + 'f': true, + 'g': true, + 'h': true, + 'i': true, + 'j': true, + 'k': true, + 'l': true, + 'm': true, + 'n': true, + 'o': true, + 'p': true, + 'q': true, + 'r': true, + 's': true, + 't': true, + 'u': true, + 'v': true, + 'w': true, + 'x': true, + 'y': true, + 'z': true, + '{': true, + '|': true, + '}': true, + '~': true, + '\u007f': true, +} + +var hex = "0123456789abcdef" + +//go:linkname unquoteBytes encoding/json.unquoteBytes +func unquoteBytes(s []byte) (t []byte, ok bool) diff --git a/vendor/github.com/bytedance/sonic/ast/stubs_go115.go b/vendor/github.com/bytedance/sonic/ast/stubs_go115.go deleted file mode 100644 index 37b9451f..00000000 --- a/vendor/github.com/bytedance/sonic/ast/stubs_go115.go +++ /dev/null @@ -1,55 +0,0 @@ -// +build !go1.20 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package ast - -import ( - `unsafe` - `unicode/utf8` - - `github.com/bytedance/sonic/internal/rt` -) - -//go:noescape -//go:linkname memmove runtime.memmove -//goland:noinspection GoUnusedParameter -func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr) - -//go:linkname unsafe_NewArray reflect.unsafe_NewArray -//goland:noinspection GoUnusedParameter -func unsafe_NewArray(typ *rt.GoType, n int) unsafe.Pointer - -//go:linkname growslice runtime.growslice -//goland:noinspection GoUnusedParameter -func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice - -//go:nosplit -func mem2ptr(s []byte) unsafe.Pointer { - return (*rt.GoSlice)(unsafe.Pointer(&s)).Ptr -} - -var ( - //go:linkname safeSet encoding/json.safeSet - safeSet [utf8.RuneSelf]bool - - //go:linkname hex encoding/json.hex - hex string -) - -//go:linkname unquoteBytes encoding/json.unquoteBytes -func unquoteBytes(s []byte) (t []byte, ok bool) \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/ast/stubs_go120.go b/vendor/github.com/bytedance/sonic/ast/stubs_go120.go deleted file mode 100644 index 6f830529..00000000 --- a/vendor/github.com/bytedance/sonic/ast/stubs_go120.go +++ /dev/null @@ -1,55 +0,0 @@ -// +build go1.20 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package ast - -import ( - `unsafe` - `unicode/utf8` - - `github.com/bytedance/sonic/internal/rt` -) - -//go:noescape -//go:linkname memmove runtime.memmove -//goland:noinspection GoUnusedParameter -func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr) - -//go:linkname unsafe_NewArray reflect.unsafe_NewArray -//goland:noinspection GoUnusedParameter -func unsafe_NewArray(typ *rt.GoType, n int) unsafe.Pointer - -//go:linkname growslice reflect.growslice -//goland:noinspection GoUnusedParameter -func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice - -//go:nosplit -func mem2ptr(s []byte) unsafe.Pointer { - return (*rt.GoSlice)(unsafe.Pointer(&s)).Ptr -} - -var ( - //go:linkname safeSet encoding/json.safeSet - safeSet [utf8.RuneSelf]bool - - //go:linkname hex encoding/json.hex - hex string -) - -//go:linkname unquoteBytes encoding/json.unquoteBytes -func unquoteBytes(s []byte) (t []byte, ok bool) diff --git a/vendor/github.com/bytedance/sonic/ast/visitor.go b/vendor/github.com/bytedance/sonic/ast/visitor.go index d409509f..dc047851 100644 --- a/vendor/github.com/bytedance/sonic/ast/visitor.go +++ b/vendor/github.com/bytedance/sonic/ast/visitor.go @@ -18,6 +18,7 @@ package ast import ( `encoding/json` + `errors` `github.com/bytedance/sonic/internal/native/types` ) @@ -174,6 +175,19 @@ func (self *traverser) decodeArray() error { sp := self.parser.p ns := len(self.parser.s) + /* allocate array space and parse every element */ + if err := self.visitor.OnArrayBegin(_DEFAULT_NODE_CAP); err != nil { + if err == VisitOPSkip { + // NOTICE: for user needs to skip entiry object + self.parser.p -= 1 + if _, e := self.parser.skipFast(); e != 0 { + return e + } + return self.visitor.OnArrayEnd() + } + return err + } + /* check for EOF */ self.parser.p = self.parser.lspace(sp) if self.parser.p >= ns { @@ -183,16 +197,9 @@ func (self *traverser) decodeArray() error { /* check for empty array */ if self.parser.s[self.parser.p] == ']' { self.parser.p++ - if err := self.visitor.OnArrayBegin(0); err != nil { - return err - } return self.visitor.OnArrayEnd() } - /* allocate array space and parse every element */ - if err := self.visitor.OnArrayBegin(_DEFAULT_NODE_CAP); err != nil { - return err - } for { /* decode the value */ if err := self.decodeValue(); err != nil { @@ -223,6 +230,19 @@ func (self *traverser) decodeObject() error { sp := self.parser.p ns := len(self.parser.s) + /* allocate object space and decode each pair */ + if err := self.visitor.OnObjectBegin(_DEFAULT_NODE_CAP); err != nil { + if err == VisitOPSkip { + // NOTICE: for user needs to skip entiry object + self.parser.p -= 1 + if _, e := self.parser.skipFast(); e != 0 { + return e + } + return self.visitor.OnObjectEnd() + } + return err + } + /* check for EOF */ self.parser.p = self.parser.lspace(sp) if self.parser.p >= ns { @@ -232,16 +252,9 @@ func (self *traverser) decodeObject() error { /* check for empty object */ if self.parser.s[self.parser.p] == '}' { self.parser.p++ - if err := self.visitor.OnObjectBegin(0); err != nil { - return err - } return self.visitor.OnObjectEnd() } - /* allocate object space and decode each pair */ - if err := self.visitor.OnObjectBegin(_DEFAULT_NODE_CAP); err != nil { - return err - } for { var njs types.JsonState var err types.ParsingError @@ -313,3 +326,7 @@ func (self *traverser) decodeString(iv int64, ep int) error { } return self.visitor.OnString(out) } + +// If visitor return this error on `OnObjectBegin()` or `OnArrayBegin()`, +// the transverer will skip entiry object or array +var VisitOPSkip = errors.New("") diff --git a/vendor/github.com/bytedance/sonic/compat.go b/vendor/github.com/bytedance/sonic/compat.go index 728bc176..b32342a8 100644 --- a/vendor/github.com/bytedance/sonic/compat.go +++ b/vendor/github.com/bytedance/sonic/compat.go @@ -1,4 +1,4 @@ -// +build !amd64 !go1.16 go1.23 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* * Copyright 2021 ByteDance Inc. @@ -27,6 +27,8 @@ import ( `github.com/bytedance/sonic/option` ) +const apiKind = UseStdJSON + type frozenConfig struct { Config } diff --git a/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go b/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go index 7883862c..b3e63418 100644 --- a/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go +++ b/vendor/github.com/bytedance/sonic/decoder/decoder_compat.go @@ -1,4 +1,4 @@ -// +build !amd64 !go1.16 go1.23 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* * Copyright 2023 ByteDance Inc. @@ -30,7 +30,7 @@ import ( ) func init() { - println("WARNING: sonic only supports Go1.16~1.22 && CPU amd64, but your environment is not suitable") + println("WARNING: sonic/decoder only supports (Go1.17~1.23 && CPU amd64) or (go1.20~1.23 && CPU arm64), but your environment is not suitable") } const ( diff --git a/vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go b/vendor/github.com/bytedance/sonic/decoder/decoder_native.go similarity index 65% rename from vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go rename to vendor/github.com/bytedance/sonic/decoder/decoder_native.go index 346ebbce..9442d028 100644 --- a/vendor/github.com/bytedance/sonic/decoder/decoder_amd64.go +++ b/vendor/github.com/bytedance/sonic/decoder/decoder_native.go @@ -1,4 +1,6 @@ -// +build amd64,go1.16,!go1.23 +//go:build (amd64 && go1.17 && !go1.24) || (arm64 && go1.20 && !go1.24) +// +build amd64,go1.17,!go1.24 arm64,go1.20,!go1.24 + /* * Copyright 2023 ByteDance Inc. @@ -19,50 +21,50 @@ package decoder import ( - `github.com/bytedance/sonic/internal/decoder` + `github.com/bytedance/sonic/internal/decoder/api` ) // Decoder is the decoder context object -type Decoder = decoder.Decoder +type Decoder = api.Decoder // SyntaxError represents json syntax error -type SyntaxError = decoder.SyntaxError +type SyntaxError = api.SyntaxError // MismatchTypeError represents dismatching between json and object -type MismatchTypeError = decoder.MismatchTypeError +type MismatchTypeError = api.MismatchTypeError // Options for decode. -type Options = decoder.Options +type Options = api.Options const ( - OptionUseInt64 Options = decoder.OptionUseInt64 - OptionUseNumber Options = decoder.OptionUseNumber - OptionUseUnicodeErrors Options = decoder.OptionUseUnicodeErrors - OptionDisableUnknown Options = decoder.OptionDisableUnknown - OptionCopyString Options = decoder.OptionCopyString - OptionValidateString Options = decoder.OptionValidateString + OptionUseInt64 Options = api.OptionUseInt64 + OptionUseNumber Options = api.OptionUseNumber + OptionUseUnicodeErrors Options = api.OptionUseUnicodeErrors + OptionDisableUnknown Options = api.OptionDisableUnknown + OptionCopyString Options = api.OptionCopyString + OptionValidateString Options = api.OptionValidateString ) // StreamDecoder is the decoder context object for streaming input. -type StreamDecoder = decoder.StreamDecoder +type StreamDecoder = api.StreamDecoder var ( // NewDecoder creates a new decoder instance. - NewDecoder = decoder.NewDecoder + NewDecoder = api.NewDecoder // NewStreamDecoder adapts to encoding/json.NewDecoder API. // // NewStreamDecoder returns a new decoder that reads from r. - NewStreamDecoder = decoder.NewStreamDecoder + NewStreamDecoder = api.NewStreamDecoder // Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in // order to reduce the first-hit latency. // // Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is // a compile option to set the depth of recursive compile for the nested struct type. - Pretouch = decoder.Pretouch + Pretouch = api.Pretouch // Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid. // Otherwise, returns negative error code using start and invalid character position using end - Skip = decoder.Skip + Skip = api.Skip ) diff --git a/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go b/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go index 38761c45..254defa2 100644 --- a/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go +++ b/vendor/github.com/bytedance/sonic/encoder/encoder_compat.go @@ -1,4 +1,4 @@ -// +build !amd64 !go1.16 go1.23 +// +build !amd64,!arm64 go1.24 !go1.17 arm64,!go1.20 /* * Copyright 2023 ByteDance Inc. @@ -28,7 +28,7 @@ import ( ) func init() { - println("WARNING:(encoder) sonic only supports Go1.16~1.22 && CPU amd64, but your environment is not suitable") + println("WARNING:(encoder) sonic only supports (Go1.17~1.23 && CPU amd64) or (G01.20~1.23 && CPU arm64) , but your environment is not suitable") } // EnableFallback indicates if encoder use fallback diff --git a/vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go b/vendor/github.com/bytedance/sonic/encoder/encoder_native.go similarity index 95% rename from vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go rename to vendor/github.com/bytedance/sonic/encoder/encoder_native.go index 4dabec6c..b300ebf0 100644 --- a/vendor/github.com/bytedance/sonic/encoder/encoder_amd64.go +++ b/vendor/github.com/bytedance/sonic/encoder/encoder_native.go @@ -1,4 +1,4 @@ -// +build amd64,go1.16,!go1.23 +// +build amd64,go1.17,!go1.24 arm64,go1.20,!go1.24 /* * Copyright 2023 ByteDance Inc. @@ -70,6 +70,9 @@ const ( // CompatibleWithStd is used to be compatible with std encoder. CompatibleWithStd Options = encoder.CompatibleWithStd + + // Encode Infinity or Nan float into `null`, instead of returning an error. + EncodeNullForInfOrNan Options = encoder.EncodeNullForInfOrNan ) diff --git a/vendor/github.com/bytedance/sonic/go.work.sum b/vendor/github.com/bytedance/sonic/go.work.sum new file mode 100644 index 00000000..d5962587 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/go.work.sum @@ -0,0 +1 @@ +github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU= diff --git a/vendor/github.com/bytedance/sonic/internal/base64/b64_amd64.go b/vendor/github.com/bytedance/sonic/internal/base64/b64_amd64.go new file mode 100644 index 00000000..01f99f93 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/base64/b64_amd64.go @@ -0,0 +1,46 @@ +// +build amd64,go1.16 + +/** + * Copyright 2023 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package base64 + +import ( + "github.com/cloudwego/base64x" +) + +func DecodeBase64(src string) ([]byte, error) { + return base64x.StdEncoding.DecodeString(src) +} + +func EncodeBase64(buf []byte, src []byte) []byte { + if len(src) == 0 { + return append(buf, '"', '"') + } + buf = append(buf, '"') + need := base64x.StdEncoding.EncodedLen(len(src)) + if cap(buf) - len(buf) < need { + tmp := make([]byte, len(buf), len(buf) + need*2) + copy(tmp, buf) + buf = tmp + } + base64x.StdEncoding.Encode(buf[len(buf):cap(buf)], src) + buf = buf[:len(buf) + need] + buf = append(buf, '"') + return buf +} + + \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/base64/b64_compat.go b/vendor/github.com/bytedance/sonic/internal/base64/b64_compat.go new file mode 100644 index 00000000..ba8f8b56 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/base64/b64_compat.go @@ -0,0 +1,44 @@ +// +build !amd64 !go1.16 + +/* + * Copyright 2022 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package base64 + +import ( + "encoding/base64" +) + +func EncodeBase64(buf []byte, src []byte) []byte { + if len(src) == 0 { + return append(buf, '"', '"') + } + buf = append(buf, '"') + need := base64.StdEncoding.EncodedLen(len(src)) + if cap(buf) - len(buf) < need { + tmp := make([]byte, len(buf), len(buf) + need*2) + copy(tmp, buf) + buf = tmp + } + base64.StdEncoding.Encode(buf[len(buf):cap(buf)], src) + buf = buf[:len(buf) + need] + buf = append(buf, '"') + return buf +} + +func DecodeBase64(src string) ([]byte, error) { + return base64.StdEncoding.DecodeString(src) +} diff --git a/vendor/github.com/bytedance/sonic/internal/cpu/features.go b/vendor/github.com/bytedance/sonic/internal/cpu/features.go index f9ee3b8f..fd4dbda3 100644 --- a/vendor/github.com/bytedance/sonic/internal/cpu/features.go +++ b/vendor/github.com/bytedance/sonic/internal/cpu/features.go @@ -24,7 +24,6 @@ import ( ) var ( - HasAVX = cpuid.CPU.Has(cpuid.AVX) HasAVX2 = cpuid.CPU.Has(cpuid.AVX2) HasSSE = cpuid.CPU.Has(cpuid.SSE) ) @@ -33,7 +32,8 @@ func init() { switch v := os.Getenv("SONIC_MODE"); v { case "" : break case "auto" : break - case "noavx" : HasAVX = false; fallthrough + case "noavx" : HasAVX2 = false + // will also disable avx, act as `noavx`, we remain it to make sure forward compatibility case "noavx2" : HasAVX2 = false default : panic(fmt.Sprintf("invalid mode: '%s', should be one of 'auto', 'noavx', 'noavx2'", v)) } diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go similarity index 57% rename from vendor/github.com/bytedance/sonic/internal/decoder/decoder.go rename to vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go index 8453db86..5e31198e 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/decoder.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder.go @@ -14,51 +14,51 @@ * limitations under the License. */ -package decoder +package api import ( - `unsafe` - `encoding/json` `reflect` - `runtime` `github.com/bytedance/sonic/internal/native` `github.com/bytedance/sonic/internal/native/types` + `github.com/bytedance/sonic/internal/decoder/consts` + `github.com/bytedance/sonic/internal/decoder/errors` `github.com/bytedance/sonic/internal/rt` `github.com/bytedance/sonic/option` - `github.com/bytedance/sonic/utf8` ) const ( - _F_use_int64 = 0 - _F_disable_urc = 2 - _F_disable_unknown = 3 - _F_copy_string = 4 - - _F_use_number = types.B_USE_NUMBER - _F_validate_string = types.B_VALIDATE_STRING - _F_allow_control = types.B_ALLOW_CONTROL + _F_allow_control = consts.F_allow_control + _F_copy_string = consts.F_copy_string + _F_disable_unknown = consts.F_disable_unknown + _F_disable_urc = consts.F_disable_urc + _F_use_int64 = consts.F_use_int64 + _F_use_number = consts.F_use_number + _F_validate_string = consts.F_validate_string + + _MaxStack = consts.MaxStack + + OptionUseInt64 = consts.OptionUseInt64 + OptionUseNumber = consts.OptionUseNumber + OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors + OptionDisableUnknown = consts.OptionDisableUnknown + OptionCopyString = consts.OptionCopyString + OptionValidateString = consts.OptionValidateString ) -type Options uint64 - -const ( - OptionUseInt64 Options = 1 << _F_use_int64 - OptionUseNumber Options = 1 << _F_use_number - OptionUseUnicodeErrors Options = 1 << _F_disable_urc - OptionDisableUnknown Options = 1 << _F_disable_unknown - OptionCopyString Options = 1 << _F_copy_string - OptionValidateString Options = 1 << _F_validate_string +type ( + Options = consts.Options + MismatchTypeError = errors.MismatchTypeError + SyntaxError = errors.SyntaxError ) func (self *Decoder) SetOptions(opts Options) { - if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) { + if (opts & consts.OptionUseNumber != 0) && (opts & consts.OptionUseInt64 != 0) { panic("can't set OptionUseInt64 and OptionUseNumber both!") } self.f = uint64(opts) } - // Decoder is the decoder context object type Decoder struct { i int @@ -109,44 +109,7 @@ func (self *Decoder) CheckTrailings() error { // Decode parses the JSON-encoded data from current position and stores the result // in the value pointed to by val. func (self *Decoder) Decode(val interface{}) error { - /* validate json if needed */ - if (self.f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(self.s){ - dbuf := utf8.CorrectWith(nil, rt.Str2Mem(self.s), "\ufffd") - self.s = rt.Mem2Str(dbuf) - } - - vv := rt.UnpackEface(val) - vp := vv.Value - - /* check for nil type */ - if vv.Type == nil { - return &json.InvalidUnmarshalError{} - } - - /* must be a non-nil pointer */ - if vp == nil || vv.Type.Kind() != reflect.Ptr { - return &json.InvalidUnmarshalError{Type: vv.Type.Pack()} - } - - etp := rt.PtrElem(vv.Type) - - /* check the defined pointer type for issue 379 */ - if vv.Type.IsNamed() { - newp := vp - etp = vv.Type - vp = unsafe.Pointer(&newp) - } - - /* create a new stack, and call the decoder */ - sb := newStack() - nb, err := decodeTypedPointer(self.s, self.i, etp, vp, sb, self.f) - /* return the stack back */ - self.i = nb - freeStack(sb) - - /* avoid GC ahead */ - runtime.KeepAlive(vv) - return err + return decodeImpl(&self.s, &self.i, self.f, val) } // UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an @@ -194,53 +157,7 @@ func (self *Decoder) ValidateString() { // Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is // a compile option to set the depth of recursive compile for the nested struct type. func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { - cfg := option.DefaultCompileOptions() - for _, opt := range opts { - opt(&cfg) - } - return pretouchRec(map[reflect.Type]bool{vt:true}, cfg) -} - -func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) { - /* compile function */ - compiler := newCompiler().apply(opts) - decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { - if pp, err := compiler.compile(_vt); err != nil { - return nil, err - } else { - as := newAssembler(pp) - as.name = _vt.String() - return as.Load(), nil - } - } - - /* find or compile */ - vt := rt.UnpackType(_vt) - if val := programCache.Get(vt); val != nil { - return nil, nil - } else if _, err := programCache.Compute(vt, decoder); err == nil { - return compiler.rec, nil - } else { - return nil, err - } -} - -func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error { - if opts.RecursiveDepth < 0 || len(vtm) == 0 { - return nil - } - next := make(map[reflect.Type]bool) - for vt := range(vtm) { - sub, err := pretouchType(vt, opts) - if err != nil { - return err - } - for svt := range(sub) { - next[svt] = true - } - } - opts.RecursiveDepth -= 1 - return pretouchRec(next, opts) + return pretouchImpl(vt, opts...) } // Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid. diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go new file mode 100644 index 00000000..4e1c3f42 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_amd64.go @@ -0,0 +1,38 @@ +//go:build go1.17 && !go1.24 +// +build go1.17,!go1.24 + +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package api + +import ( + "github.com/bytedance/sonic/internal/envs" + "github.com/bytedance/sonic/internal/decoder/jitdec" + "github.com/bytedance/sonic/internal/decoder/optdec" +) + +var ( + pretouchImpl = jitdec.Pretouch + decodeImpl = jitdec.Decode +) + + func init() { + if envs.UseOptDec { + pretouchImpl = optdec.Pretouch + decodeImpl = optdec.Decode + } + } diff --git a/vendor/github.com/bytedance/sonic/internal/native/avx/u64toa.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go similarity index 63% rename from vendor/github.com/bytedance/sonic/internal/native/avx/u64toa.go rename to vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go index 466402b6..65a9478b 100644 --- a/vendor/github.com/bytedance/sonic/internal/native/avx/u64toa.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/decoder_arm64.go @@ -1,6 +1,4 @@ -// Code generated by Makefile, DO NOT EDIT. - -// Code generated by Makefile, DO NOT EDIT. +// +build go1.17,!go1.24 /* * Copyright 2021 ByteDance Inc. @@ -18,19 +16,23 @@ * limitations under the License. */ -package avx +package api import ( - `unsafe` - - `github.com/bytedance/sonic/internal/rt` + `github.com/bytedance/sonic/internal/decoder/optdec` + `github.com/bytedance/sonic/internal/envs` ) -var F_u64toa func(out unsafe.Pointer, val uint64) (ret int) +var ( + pretouchImpl = optdec.Pretouch + decodeImpl = optdec.Decode +) -var S_u64toa uintptr -//go:nosplit -func u64toa(out *byte, val uint64) (ret int) { - return F_u64toa(rt.NoEscape(unsafe.Pointer(out)), val) +func init() { + // whe in aarch64. we enable all optimize + envs.EnableOptDec() + envs.EnableFastMap() } + + diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stream.go b/vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go similarity index 90% rename from vendor/github.com/bytedance/sonic/internal/decoder/stream.go rename to vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go index 7eb8a695..8a8102dd 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/stream.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/api/stream.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package api import ( `bytes` @@ -47,6 +47,12 @@ var bufPool = sync.Pool{ }, } +func freeBytes(buf []byte) { + if rt.CanSizeResue(cap(buf)) { + bufPool.Put(buf[:0]) + } +} + // NewStreamDecoder adapts to encoding/json.NewDecoder API. // // NewStreamDecoder returns a new decoder that reads from r. @@ -61,25 +67,16 @@ func NewStreamDecoder(r io.Reader) *StreamDecoder { func (self *StreamDecoder) Decode(val interface{}) (err error) { // read more data into buf if self.More() { - // println(string(self.buf)) var s = self.scanp try_skip: var e = len(self.buf) - // println("s:", s, "e:", e, "scanned:",self.scanned, "scanp:",self.scanp, self.buf) var src = rt.Mem2Str(self.buf[s:e]) - // if len(src) > 5 { - // println(src[:5], src[len(src)-5:]) - // } else { - // println(src) - // } // try skip var x = 0; if y := native.SkipOneFast(&src, &x); y < 0 { if self.readMore() { - // println("more") goto try_skip } else { - // println("no more") err = SyntaxError{e, self.s, types.ParsingError(-s), ""} self.setErr(err) return @@ -89,7 +86,6 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) { e = x + s } - // println("decode: ", s, e) // must copy string here for safety self.Decoder.Reset(string(self.buf[s:e])) err = self.Decoder.Decode(val) @@ -101,13 +97,11 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) { self.scanp = e _, empty := self.scan() if empty { - // println("recycle") // no remain valid bytes, thus we just recycle buffer mem := self.buf self.buf = nil - bufPool.Put(mem[:0]) + freeBytes(mem) } else { - // println("keep") // remain undecoded bytes, move them onto head n := copy(self.buf, self.buf[self.scanp:]) self.buf = self.buf[:n] @@ -123,7 +117,6 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) { // InputOffset returns the input stream byte offset of the current decoder position. // The offset gives the location of the end of the most recently returned token and the beginning of the next token. func (self *StreamDecoder) InputOffset() int64 { - // println("input offset",self.scanned, self.scanp) return self.scanned + int64(self.scanp) } @@ -178,7 +171,7 @@ func (self *StreamDecoder) setErr(err error) { self.err = err mem := self.buf[:0] self.buf = nil - bufPool.Put(mem) + freeBytes(mem) } func (self *StreamDecoder) peek() (byte, error) { @@ -237,12 +230,10 @@ func realloc(buf *[]byte) bool { l := uint(len(*buf)) c := uint(cap(*buf)) if c == 0 { - // println("use pool!") *buf = bufPool.Get().([]byte) return true } if c - l <= c >> minLeftBufferShift { - // println("realloc!") e := l+(l>>minLeftBufferShift) if e <= c { e = c*2 diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go b/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go deleted file mode 100644 index 4c4c850a..00000000 --- a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go116.go +++ /dev/null @@ -1,130 +0,0 @@ -// +build go1.16,!go1.17 - -// Copyright 2023 CloudWeGo Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package decoder - -import ( - `strconv` - _ `unsafe` - - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/rt` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` -) - -var _runtime_writeBarrier uintptr = rt.GcwbAddr() - -//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier -func gcWriteBarrierAX() - -var ( - _V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier)) - - _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX) -) - -func (self *_Assembler) WritePtrAX(i int, rec obj.Addr, saveDI bool) { - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", _AX, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - -func (self *_Assembler) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool, saveAX bool) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveAX { - self.Emit("XCHGQ", ptr, _AX) - } else { - self.Emit("MOVQ", ptr, _AX) - } - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - if saveAX { - self.Emit("XCHGQ", ptr, _AX) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - - -func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) { - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", _AX, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - -func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, _AX) - if saveDI { - self.save(_DI) - } - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - if saveDI { - self.load(_DI) - } - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go deleted file mode 100644 index 9e2acc23..00000000 --- a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_stkabi_amd64.go +++ /dev/null @@ -1,1950 +0,0 @@ -// +build go1.16,!go1.17 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package decoder - -import ( - `encoding/json` - `fmt` - `math` - `reflect` - `unsafe` - - `github.com/bytedance/sonic/internal/caching` - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` - `github.com/twitchyliquid64/golang-asm/obj` -) - -/** Register Allocations - * - * State Registers: - * - * %rbx : stack base - * %r12 : input pointer - * %r13 : input length - * %r14 : input cursor - * %r15 : value pointer - * - * Error Registers: - * - * %r10 : error type register - * %r11 : error pointer register - */ - -/** Function Prototype & Stack Map - * - * func (s string, ic int, vp unsafe.Pointer, sb *_Stack, fv uint64, sv string) (rc int, err error) - * - * s.buf : (FP) - * s.len : 8(FP) - * ic : 16(FP) - * vp : 24(FP) - * sb : 32(FP) - * fv : 40(FP) - * sv : 56(FP) - * err.vt : 72(FP) - * err.vp : 80(FP) - */ - -const ( - _FP_args = 96 // 96 bytes to pass arguments and return values for this function - _FP_fargs = 80 // 80 bytes for passing arguments to other Go functions - _FP_saves = 40 // 40 bytes for saving the registers before CALL instructions - _FP_locals = 144 // 144 bytes for local variables -) - -const ( - _FP_offs = _FP_fargs + _FP_saves + _FP_locals - _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer - _FP_base = _FP_size + 8 // 8 bytes for the return address -) - -const ( - _IM_null = 0x6c6c756e // 'null' - _IM_true = 0x65757274 // 'true' - _IM_alse = 0x65736c61 // 'alse' ('false' without the 'f') -) - -const ( - _BM_space = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n') -) - -const ( - _MODE_JSON = 1 << 3 // base64 mode -) - -const ( - _LB_error = "_error" - _LB_im_error = "_im_error" - _LB_eof_error = "_eof_error" - _LB_type_error = "_type_error" - _LB_field_error = "_field_error" - _LB_range_error = "_range_error" - _LB_stack_error = "_stack_error" - _LB_base64_error = "_base64_error" - _LB_unquote_error = "_unquote_error" - _LB_parsing_error = "_parsing_error" - _LB_parsing_error_v = "_parsing_error_v" - _LB_mismatch_error = "_mismatch_error" -) - -const ( - _LB_char_0_error = "_char_0_error" - _LB_char_1_error = "_char_1_error" - _LB_char_2_error = "_char_2_error" - _LB_char_3_error = "_char_3_error" - _LB_char_4_error = "_char_4_error" - _LB_char_m2_error = "_char_m2_error" - _LB_char_m3_error = "_char_m3_error" -) - -const ( - _LB_skip_one = "_skip_one" - _LB_skip_key_value = "_skip_key_value" -) - -var ( - _AX = jit.Reg("AX") - _CX = jit.Reg("CX") - _DX = jit.Reg("DX") - _DI = jit.Reg("DI") - _SI = jit.Reg("SI") - _BP = jit.Reg("BP") - _SP = jit.Reg("SP") - _R8 = jit.Reg("R8") - _R9 = jit.Reg("R9") - _X0 = jit.Reg("X0") - _X1 = jit.Reg("X1") -) - -var ( - _ST = jit.Reg("BX") - _IP = jit.Reg("R12") - _IL = jit.Reg("R13") - _IC = jit.Reg("R14") - _VP = jit.Reg("R15") -) - -var ( - _R10 = jit.Reg("R10") // used for gcWriteBarrier - _DF = jit.Reg("R10") // reuse R10 in generic decoder for flags - _ET = jit.Reg("R10") - _EP = jit.Reg("R11") -) - -var ( - _ARG_s = _ARG_sp - _ARG_sp = jit.Ptr(_SP, _FP_base) - _ARG_sl = jit.Ptr(_SP, _FP_base + 8) - _ARG_ic = jit.Ptr(_SP, _FP_base + 16) - _ARG_vp = jit.Ptr(_SP, _FP_base + 24) - _ARG_sb = jit.Ptr(_SP, _FP_base + 32) - _ARG_fv = jit.Ptr(_SP, _FP_base + 40) -) - -var ( - _VAR_sv = _VAR_sv_p - _VAR_sv_p = jit.Ptr(_SP, _FP_base + 48) - _VAR_sv_n = jit.Ptr(_SP, _FP_base + 56) - _VAR_vk = jit.Ptr(_SP, _FP_base + 64) -) - -var ( - _RET_rc = jit.Ptr(_SP, _FP_base + 72) - _RET_et = jit.Ptr(_SP, _FP_base + 80) - _RET_ep = jit.Ptr(_SP, _FP_base + 88) -) - -var ( - _VAR_st = _VAR_st_Vt - _VAR_sr = jit.Ptr(_SP, _FP_fargs + _FP_saves) -) - - -var ( - _VAR_st_Vt = jit.Ptr(_SP, _FP_fargs + _FP_saves + 0) - _VAR_st_Dv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8) - _VAR_st_Iv = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16) - _VAR_st_Ep = jit.Ptr(_SP, _FP_fargs + _FP_saves + 24) - _VAR_st_Db = jit.Ptr(_SP, _FP_fargs + _FP_saves + 32) - _VAR_st_Dc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 40) -) - -var ( - _VAR_ss_AX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 48) - _VAR_ss_CX = jit.Ptr(_SP, _FP_fargs + _FP_saves + 56) - _VAR_ss_SI = jit.Ptr(_SP, _FP_fargs + _FP_saves + 64) - _VAR_ss_R8 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 72) - _VAR_ss_R9 = jit.Ptr(_SP, _FP_fargs + _FP_saves + 80) -) - -var ( - _VAR_bs_p = jit.Ptr(_SP, _FP_fargs + _FP_saves + 88) - _VAR_bs_n = jit.Ptr(_SP, _FP_fargs + _FP_saves + 96) - _VAR_bs_LR = jit.Ptr(_SP, _FP_fargs + _FP_saves + 104) -) - -var _VAR_fl = jit.Ptr(_SP, _FP_fargs + _FP_saves + 112) - -var ( - _VAR_et = jit.Ptr(_SP, _FP_fargs + _FP_saves + 120) // save dismatched type - _VAR_ic = jit.Ptr(_SP, _FP_fargs + _FP_saves + 128) // save dismatched position - _VAR_pc = jit.Ptr(_SP, _FP_fargs + _FP_saves + 136) // save skip return pc -) - -type _Assembler struct { - jit.BaseAssembler - p _Program - name string -} - -func newAssembler(p _Program) *_Assembler { - return new(_Assembler).Init(p) -} - -/** Assembler Interface **/ - -func (self *_Assembler) Load() _Decoder { - return ptodec(self.BaseAssembler.Load("decode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs)) -} - -func (self *_Assembler) Init(p _Program) *_Assembler { - self.p = p - self.BaseAssembler.Init(self.compile) - return self -} - -func (self *_Assembler) compile() { - self.prologue() - self.instrs() - self.epilogue() - self.copy_string() - self.escape_string() - self.escape_string_twice() - self.skip_one() - self.skip_key_value() - self.mismatch_error() - self.type_error() - self.field_error() - self.range_error() - self.stack_error() - self.base64_error() - self.parsing_error() -} - -/** Assembler Stages **/ - -var _OpFuncTab = [256]func(*_Assembler, *_Instr) { - _OP_any : (*_Assembler)._asm_OP_any, - _OP_dyn : (*_Assembler)._asm_OP_dyn, - _OP_str : (*_Assembler)._asm_OP_str, - _OP_bin : (*_Assembler)._asm_OP_bin, - _OP_bool : (*_Assembler)._asm_OP_bool, - _OP_num : (*_Assembler)._asm_OP_num, - _OP_i8 : (*_Assembler)._asm_OP_i8, - _OP_i16 : (*_Assembler)._asm_OP_i16, - _OP_i32 : (*_Assembler)._asm_OP_i32, - _OP_i64 : (*_Assembler)._asm_OP_i64, - _OP_u8 : (*_Assembler)._asm_OP_u8, - _OP_u16 : (*_Assembler)._asm_OP_u16, - _OP_u32 : (*_Assembler)._asm_OP_u32, - _OP_u64 : (*_Assembler)._asm_OP_u64, - _OP_f32 : (*_Assembler)._asm_OP_f32, - _OP_f64 : (*_Assembler)._asm_OP_f64, - _OP_unquote : (*_Assembler)._asm_OP_unquote, - _OP_nil_1 : (*_Assembler)._asm_OP_nil_1, - _OP_nil_2 : (*_Assembler)._asm_OP_nil_2, - _OP_nil_3 : (*_Assembler)._asm_OP_nil_3, - _OP_deref : (*_Assembler)._asm_OP_deref, - _OP_index : (*_Assembler)._asm_OP_index, - _OP_is_null : (*_Assembler)._asm_OP_is_null, - _OP_is_null_quote : (*_Assembler)._asm_OP_is_null_quote, - _OP_map_init : (*_Assembler)._asm_OP_map_init, - _OP_map_key_i8 : (*_Assembler)._asm_OP_map_key_i8, - _OP_map_key_i16 : (*_Assembler)._asm_OP_map_key_i16, - _OP_map_key_i32 : (*_Assembler)._asm_OP_map_key_i32, - _OP_map_key_i64 : (*_Assembler)._asm_OP_map_key_i64, - _OP_map_key_u8 : (*_Assembler)._asm_OP_map_key_u8, - _OP_map_key_u16 : (*_Assembler)._asm_OP_map_key_u16, - _OP_map_key_u32 : (*_Assembler)._asm_OP_map_key_u32, - _OP_map_key_u64 : (*_Assembler)._asm_OP_map_key_u64, - _OP_map_key_f32 : (*_Assembler)._asm_OP_map_key_f32, - _OP_map_key_f64 : (*_Assembler)._asm_OP_map_key_f64, - _OP_map_key_str : (*_Assembler)._asm_OP_map_key_str, - _OP_map_key_utext : (*_Assembler)._asm_OP_map_key_utext, - _OP_map_key_utext_p : (*_Assembler)._asm_OP_map_key_utext_p, - _OP_array_skip : (*_Assembler)._asm_OP_array_skip, - _OP_array_clear : (*_Assembler)._asm_OP_array_clear, - _OP_array_clear_p : (*_Assembler)._asm_OP_array_clear_p, - _OP_slice_init : (*_Assembler)._asm_OP_slice_init, - _OP_slice_append : (*_Assembler)._asm_OP_slice_append, - _OP_object_skip : (*_Assembler)._asm_OP_object_skip, - _OP_object_next : (*_Assembler)._asm_OP_object_next, - _OP_struct_field : (*_Assembler)._asm_OP_struct_field, - _OP_unmarshal : (*_Assembler)._asm_OP_unmarshal, - _OP_unmarshal_p : (*_Assembler)._asm_OP_unmarshal_p, - _OP_unmarshal_text : (*_Assembler)._asm_OP_unmarshal_text, - _OP_unmarshal_text_p : (*_Assembler)._asm_OP_unmarshal_text_p, - _OP_lspace : (*_Assembler)._asm_OP_lspace, - _OP_match_char : (*_Assembler)._asm_OP_match_char, - _OP_check_char : (*_Assembler)._asm_OP_check_char, - _OP_load : (*_Assembler)._asm_OP_load, - _OP_save : (*_Assembler)._asm_OP_save, - _OP_drop : (*_Assembler)._asm_OP_drop, - _OP_drop_2 : (*_Assembler)._asm_OP_drop_2, - _OP_recurse : (*_Assembler)._asm_OP_recurse, - _OP_goto : (*_Assembler)._asm_OP_goto, - _OP_switch : (*_Assembler)._asm_OP_switch, - _OP_check_char_0 : (*_Assembler)._asm_OP_check_char_0, - _OP_dismatch_err : (*_Assembler)._asm_OP_dismatch_err, - _OP_go_skip : (*_Assembler)._asm_OP_go_skip, - _OP_add : (*_Assembler)._asm_OP_add, - _OP_check_empty : (*_Assembler)._asm_OP_check_empty, -} - -func (self *_Assembler) instr(v *_Instr) { - if fn := _OpFuncTab[v.op()]; fn != nil { - fn(self, v) - } else { - panic(fmt.Sprintf("invalid opcode: %d", v.op())) - } -} - -func (self *_Assembler) instrs() { - for i, v := range self.p { - self.Mark(i) - self.instr(&v) - self.debug_instr(i, &v) - } -} - -func (self *_Assembler) epilogue() { - self.Mark(len(self.p)) - self.Emit("XORL", _EP, _EP) // XORL EP, EP - self.Emit("MOVQ", _VAR_et, _ET) // MOVQ VAR_et, ET - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ", _LB_mismatch_error) // JNZ _LB_mismatch_error - self.Link(_LB_error) // _error: - self.Emit("MOVQ", _IC, _RET_rc) // MOVQ IC, rc<>+40(FP) - self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+48(FP) - self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+56(FP) - self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP - self.Emit("RET") // RET -} - -func (self *_Assembler) prologue() { - self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP - self.Emit("MOVQ", _ARG_sp, _IP) // MOVQ s.p<>+0(FP), IP - self.Emit("MOVQ", _ARG_sl, _IL) // MOVQ s.l<>+8(FP), IL - self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC - self.Emit("MOVQ", _ARG_vp, _VP) // MOVQ vp<>+24(FP), VP - self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ vp<>+32(FP), ST - // initialize digital buffer first - self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_st_Dc) // MOVQ $_MaxDigitNums, ss.Dcap - self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX - self.Emit("MOVQ", _AX, _VAR_st_Db) // MOVQ AX, ss.Dbuf - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, _VAR_et) // MOVQ AX, ss.Dp -} - -/** Function Calling Helpers **/ - -var _REG_go = []obj.Addr { - _ST, - _VP, - _IP, - _IL, - _IC, -} - -func (self *_Assembler) save(r ...obj.Addr) { - for i, v := range r { - if i > _FP_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8)) - } - } -} - -func (self *_Assembler) load(r ...obj.Addr) { - for i, v := range r { - if i > _FP_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v) - } - } -} - -func (self *_Assembler) call(fn obj.Addr) { - self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX - self.Rjmp("CALL", _AX) // CALL AX -} - -func (self *_Assembler) call_go(fn obj.Addr) { - self.save(_REG_go...) // SAVE $REG_go - self.call(fn) // CALL ${fn} - self.load(_REG_go...) // LOAD $REG_go -} - -func (self *_Assembler) call_sf(fn obj.Addr) { - self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI - self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP) - self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI - self.Emit("LEAQ", jit.Ptr(_ST, _FsmOffset), _DX) // LEAQ _FsmOffset(ST), DX - self.Emit("MOVQ", _ARG_fv, _CX) - self.call(fn) // CALL ${fn} - self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC -} - -func (self *_Assembler) call_vf(fn obj.Addr) { - self.Emit("LEAQ", _ARG_s, _DI) // LEAQ s<>+0(FP), DI - self.Emit("MOVQ", _IC, _ARG_ic) // MOVQ IC, ic<>+16(FP) - self.Emit("LEAQ", _ARG_ic, _SI) // LEAQ ic<>+16(FP), SI - self.Emit("LEAQ", _VAR_st, _DX) // LEAQ st, DX - self.call(fn) // CALL ${fn} - self.Emit("MOVQ", _ARG_ic, _IC) // MOVQ ic<>+16(FP), IC -} - -/** Assembler Error Handlers **/ - -var ( - _F_convT64 = jit.Func(convT64) - _F_error_wrap = jit.Func(error_wrap) - _F_error_type = jit.Func(error_type) - _F_error_field = jit.Func(error_field) - _F_error_value = jit.Func(error_value) - _F_error_mismatch = jit.Func(error_mismatch) -) - -var ( - _I_int8 , _T_int8 = rtype(reflect.TypeOf(int8(0))) - _I_int16 , _T_int16 = rtype(reflect.TypeOf(int16(0))) - _I_int32 , _T_int32 = rtype(reflect.TypeOf(int32(0))) - _I_uint8 , _T_uint8 = rtype(reflect.TypeOf(uint8(0))) - _I_uint16 , _T_uint16 = rtype(reflect.TypeOf(uint16(0))) - _I_uint32 , _T_uint32 = rtype(reflect.TypeOf(uint32(0))) - _I_float32 , _T_float32 = rtype(reflect.TypeOf(float32(0))) -) - -var ( - _T_error = rt.UnpackType(errorType) - _I_base64_CorruptInputError = jit.Itab(_T_error, base64CorruptInputError) -) - -var ( - _V_stackOverflow = jit.Imm(int64(uintptr(unsafe.Pointer(&stackOverflow)))) - _I_json_UnsupportedValueError = jit.Itab(_T_error, reflect.TypeOf(new(json.UnsupportedValueError))) - _I_json_MismatchTypeError = jit.Itab(_T_error, reflect.TypeOf(new(MismatchTypeError))) -) - -func (self *_Assembler) type_error() { - self.Link(_LB_type_error) // _type_error: - self.Emit("MOVQ", _ET, jit.Ptr(_SP, 0)) // MOVQ ET, (SP) - self.call_go(_F_error_type) // CALL_GO error_type - self.Emit("MOVQ", jit.Ptr(_SP, 8), _ET) // MOVQ 8(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 16), _EP) // MOVQ 16(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - - -func (self *_Assembler) mismatch_error() { - self.Link(_LB_mismatch_error) // _type_error: - self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET - self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP - self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX - self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX - self.Sjmp("JE" , _LB_error) // JE _LB_error - self.Emit("MOVQ", _ARG_sp, _AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", _ARG_sl, _CX) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ", _VAR_ic, _AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ", _VAR_et, _CX) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP) - self.call_go(_F_error_mismatch) // CALL_GO error_type - self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) _asm_OP_dismatch_err(p *_Instr) { - self.Emit("MOVQ", _IC, _VAR_ic) - self.Emit("MOVQ", jit.Type(p.vt()), _ET) - self.Emit("MOVQ", _ET, _VAR_et) -} - -func (self *_Assembler) _asm_OP_go_skip(p *_Instr) { - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Xref(p.vi(), 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) // JMP _skip_one -} - -func (self *_Assembler) skip_one() { - self.Link(_LB_skip_one) // _skip: - self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9 - self.Rjmp("JMP" , _R9) // JMP (R9) -} - - -func (self *_Assembler) skip_key_value() { - self.Link(_LB_skip_key_value) // _skip: - // skip the key - self.Emit("MOVQ", _VAR_ic, _IC) // MOVQ _VAR_ic, IC - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - // match char ':' - self.lspace("_global_1") - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(':')) - self.Sjmp("JNE" , _LB_parsing_error_v) // JNE _parse_error_v - self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC - self.lspace("_global_2") - // skip the value - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - // jump back to specified address - self.Emit("MOVQ" , _VAR_pc, _R9) // MOVQ pc, R9 - self.Rjmp("JMP" , _R9) // JMP (R9) -} - -func (self *_Assembler) field_error() { - self.Link(_LB_field_error) // _field_error: - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP) - self.call_go(_F_error_field) // CALL_GO error_field - self.Emit("MOVQ" , jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) range_error() { - self.Link(_LB_range_error) // _range_error: - self.slice_from(_VAR_st_Ep, 0) // SLICE st.Ep, $0 - self.Emit("MOVQ", _DI, jit.Ptr(_SP, 0)) // MOVQ DI, (SP) - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP) - self.Emit("MOVQ", _ET, jit.Ptr(_SP, 16)) // MOVQ ET, 16(SP) - self.Emit("MOVQ", _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP) - self.call_go(_F_error_value) // CALL_GO error_value - self.Emit("MOVQ", jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) stack_error() { - self.Link(_LB_stack_error) // _stack_error: - self.Emit("MOVQ", _V_stackOverflow, _EP) // MOVQ ${_V_stackOverflow}, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ ${_I_json_UnsupportedValueError}, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) base64_error() { - self.Link(_LB_base64_error) - self.Emit("NEGQ", _AX) // NEGQ AX - self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.call_go(_F_convT64) // CALL_GO convT64 - self.Emit("MOVQ", jit.Ptr(_SP, 8), _EP) // MOVQ 8(SP), EP - self.Emit("MOVQ", _I_base64_CorruptInputError, _ET) // MOVQ ${itab(base64.CorruptInputError)}, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) parsing_error() { - self.Link(_LB_eof_error) // _eof_error: - self.Emit("MOVQ" , _IL, _IC) // MOVQ IL, IC - self.Emit("MOVL" , jit.Imm(int64(types.ERR_EOF)), _EP) // MOVL ${types.ERR_EOF}, EP - self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error - self.Link(_LB_unquote_error) // _unquote_error: - self.Emit("SUBQ" , _VAR_sr, _SI) // SUBQ sr, SI - self.Emit("SUBQ" , _SI, _IC) // SUBQ IL, IC - self.Link(_LB_parsing_error_v) // _parsing_error_v: - self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP - self.Emit("NEGQ" , _EP) // NEGQ EP - self.Sjmp("JMP" , _LB_parsing_error) // JMP _parsing_error - self.Link(_LB_char_m3_error) // _char_m3_error: - self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC - self.Link(_LB_char_m2_error) // _char_m2_error: - self.Emit("SUBQ" , jit.Imm(2), _IC) // SUBQ $2, IC - self.Sjmp("JMP" , _LB_char_0_error) // JMP _char_0_error - self.Link(_LB_im_error) // _im_error: - self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPB CX, (IP)(IC) - self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error - self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX - self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 1)) // CMPB CX, 1(IP)(IC) - self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error - self.Emit("SHRL" , jit.Imm(8), _CX) // SHRL $8, CX - self.Emit("CMPB" , _CX, jit.Sib(_IP, _IC, 1, 2)) // CMPB CX, 2(IP)(IC) - self.Sjmp("JNE" , _LB_char_2_error) // JNE _char_2_error - self.Sjmp("JMP" , _LB_char_3_error) // JNE _char_3_error - self.Link(_LB_char_4_error) // _char_4_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_3_error) // _char_3_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_2_error) // _char_2_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_1_error) // _char_1_error: - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Link(_LB_char_0_error) // _char_0_error: - self.Emit("MOVL" , jit.Imm(int64(types.ERR_INVALID_CHAR)), _EP) // MOVL ${types.ERR_INVALID_CHAR}, EP - self.Link(_LB_parsing_error) // _parsing_error: - self.Emit("MOVOU", _ARG_s, _X0) // MOVOU s, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP) - self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP) - self.Emit("MOVQ" , _EP, jit.Ptr(_SP, 24)) // MOVQ EP, 24(SP) - self.call_go(_F_error_wrap) // CALL_GO error_wrap - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -/** Memory Management Routines **/ - -var ( - _T_byte = jit.Type(byteType) - _F_mallocgc = jit.Func(mallocgc) -) - -func (self *_Assembler) malloc(nb obj.Addr, ret obj.Addr) { - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _T_byte, _CX) // MOVQ ${type(byte)}, CX - self.Emit("MOVQ", nb, jit.Ptr(_SP, 0)) // MOVQ ${nb}, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.call_go(_F_mallocgc) // CALL_GO mallocgc - self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret} -} - -func (self *_Assembler) valloc(vt reflect.Type, ret obj.Addr) { - self.Emit("MOVQ", jit.Imm(int64(vt.Size())), _AX) // MOVQ ${vt.Size()}, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ ${vt}, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("MOVB", jit.Imm(1), jit.Ptr(_SP, 16)) // MOVB $1, 16(SP) - self.call_go(_F_mallocgc) // CALL_GO mallocgc - self.Emit("MOVQ", jit.Ptr(_SP, 24), ret) // MOVQ 24(SP), ${ret} -} - -func (self *_Assembler) vfollow(vt reflect.Type) { - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n} - self.valloc(vt, _AX) // VALLOC ${vt}, AX - self.WritePtrAX(1, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_end_{n}") // _end_{n}: - self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP -} - -/** Value Parsing Routines **/ - -var ( - _F_vstring = jit.Imm(int64(native.S_vstring)) - _F_vnumber = jit.Imm(int64(native.S_vnumber)) - _F_vsigned = jit.Imm(int64(native.S_vsigned)) - _F_vunsigned = jit.Imm(int64(native.S_vunsigned)) -) - -func (self *_Assembler) check_err(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ" , _VAR_st_Vt, _AX) // MOVQ st.Vt, AX - self.Emit("TESTQ", _AX, _AX) // CMPQ AX, ${native.V_STRING} - // try to skip the value - if vt != nil { - self.Sjmp("JNS" , "_check_err_{n}") // JNE _parsing_error_v - self.Emit("MOVQ", jit.Type(vt), _ET) - self.Emit("MOVQ", _ET, _VAR_et) - if pin2 != -1 { - self.Emit("SUBQ", jit.Imm(1), _BP) - self.Emit("MOVQ", _BP, _VAR_ic) - self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9 - self.Xref(pin2, 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_key_value) - } else { - self.Emit("MOVQ", _BP, _VAR_ic) - self.Byte(0x4c , 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref(pin, 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) - } - self.Link("_check_err_{n}") - } else { - self.Sjmp("JS" , _LB_parsing_error_v) // JNE _parsing_error_v - } -} - -func (self *_Assembler) check_eof(d int64) { - if d == 1 { - self.Emit("CMPQ", _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - } else { - self.Emit("LEAQ", jit.Ptr(_IC, d), _AX) // LEAQ ${d}(IC), AX - self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , _LB_eof_error) // JA _eof_error - } -} - -func (self *_Assembler) parse_string() { // parse_string has a validate flag params in the last - self.Emit("MOVQ", _ARG_fv, _CX) - self.call_vf(_F_vstring) - self.check_err(nil, "", -1) -} - -func (self *_Assembler) parse_number(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ", _IC, _BP) - self.call_vf(_F_vnumber) // call vnumber - self.check_err(vt, pin, pin2) -} - -func (self *_Assembler) parse_signed(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ", _IC, _BP) - self.call_vf(_F_vsigned) - self.check_err(vt, pin, pin2) -} - -func (self *_Assembler) parse_unsigned(vt reflect.Type, pin string, pin2 int) { - self.Emit("MOVQ", _IC, _BP) - self.call_vf(_F_vunsigned) - self.check_err(vt, pin, pin2) -} - -// Pointer: DI, Size: SI, Return: R9 -func (self *_Assembler) copy_string() { - self.Link("_copy_string") - self.Emit("MOVQ", _DI, _VAR_bs_p) - self.Emit("MOVQ", _SI, _VAR_bs_n) - self.Emit("MOVQ", _R9, _VAR_bs_LR) - self.malloc(_SI, _AX) - self.Emit("MOVQ", _AX, _VAR_sv_p) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) - self.Emit("MOVQ", _VAR_bs_p, _DI) - self.Emit("MOVQ", _DI, jit.Ptr(_SP, 8)) - self.Emit("MOVQ", _VAR_bs_n, _SI) - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 16)) - self.call_go(_F_memmove) - self.Emit("MOVQ", _VAR_sv_p, _DI) - self.Emit("MOVQ", _VAR_bs_n, _SI) - self.Emit("MOVQ", _VAR_bs_LR, _R9) - self.Rjmp("JMP", _R9) -} - -// Pointer: DI, Size: SI, Return: R9 -func (self *_Assembler) escape_string() { - self.Link("_escape_string") - self.Emit("MOVQ" , _DI, _VAR_bs_p) - self.Emit("MOVQ" , _SI, _VAR_bs_n) - self.Emit("MOVQ" , _R9, _VAR_bs_LR) - self.malloc(_SI, _DX) // MALLOC SI, DX - self.Emit("MOVQ" , _DX, _VAR_sv_p) - self.Emit("MOVQ" , _VAR_bs_p, _DI) - self.Emit("MOVQ" , _VAR_bs_n, _SI) - self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX - self.Emit("XORL" , _R8, _R8) // XORL R8, R8 - self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, fv - self.Emit("SETCC", _R8) // SETCC R8 - self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8 - self.call(_F_unquote) // CALL unquote - self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI - self.Emit("ADDQ" , jit.Imm(1), _SI) // ADDQ $1, SI - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error - self.Emit("MOVQ" , _AX, _SI) - self.Emit("MOVQ" , _VAR_sv_p, _DI) - self.Emit("MOVQ" , _VAR_bs_LR, _R9) - self.Rjmp("JMP", _R9) -} - -func (self *_Assembler) escape_string_twice() { - self.Link("_escape_string_twice") - self.Emit("MOVQ" , _DI, _VAR_bs_p) - self.Emit("MOVQ" , _SI, _VAR_bs_n) - self.Emit("MOVQ" , _R9, _VAR_bs_LR) - self.malloc(_SI, _DX) // MALLOC SI, DX - self.Emit("MOVQ" , _DX, _VAR_sv_p) - self.Emit("MOVQ" , _VAR_bs_p, _DI) - self.Emit("MOVQ" , _VAR_bs_n, _SI) - self.Emit("LEAQ" , _VAR_sr, _CX) // LEAQ sr, CX - self.Emit("MOVL" , jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8 - self.Emit("BTQ" , jit.Imm(_F_disable_urc), _ARG_fv) // BTQ ${_F_disable_urc}, AX - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("SETCC", _AX) // SETCC AX - self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _AX) // SHLQ ${types.B_UNICODE_REPLACE}, AX - self.Emit("ORQ" , _AX, _R8) // ORQ AX, R8 - self.call(_F_unquote) // CALL unquote - self.Emit("MOVQ" , _VAR_bs_n, _SI) // MOVQ ${n}, SI - self.Emit("ADDQ" , jit.Imm(3), _SI) // ADDQ $3, SI - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_unquote_error) // JS _unquote_error - self.Emit("MOVQ" , _AX, _SI) - self.Emit("MOVQ" , _VAR_sv_p, _DI) - self.Emit("MOVQ" , _VAR_bs_LR, _R9) - self.Rjmp("JMP", _R9) -} - -/** Range Checking Routines **/ - -var ( - _V_max_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_max_f32)))) - _V_min_f32 = jit.Imm(int64(uintptr(unsafe.Pointer(_Vp_min_f32)))) -) - -var ( - _Vp_max_f32 = new(float32) - _Vp_min_f32 = new(float32) -) - -func init() { - *_Vp_max_f32 = math.MaxFloat32 - *_Vp_min_f32 = -math.MaxFloat32 -} - -func (self *_Assembler) range_single() { - self.Emit("CVTSD2SS", _VAR_st_Dv, _X0) // CVTSD2SS st.Dv, X0 - self.Emit("MOVQ" , _V_max_f32, _AX) // MOVQ _max_f32, AX - self.Emit("MOVQ" , jit.Gitab(_I_float32), _ET) // MOVQ ${itab(float32)}, ET - self.Emit("MOVQ" , jit.Gtype(_T_float32), _EP) // MOVQ ${type(float32)}, EP - self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0 - self.Sjmp("JA" , _LB_range_error) // JA _range_error - self.Emit("MOVQ" , _V_min_f32, _AX) // MOVQ _min_f32, AX - self.Emit("UCOMISS" , jit.Ptr(_AX, 0), _X0) // UCOMISS (AX), X0 - self.Sjmp("JB" , _LB_range_error) // JB _range_error -} - -func (self *_Assembler) range_signed(i *rt.GoItab, t *rt.GoType, a int64, b int64) { - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ", jit.Gitab(i), _ET) // MOVQ ${i}, ET - self.Emit("MOVQ", jit.Gtype(t), _EP) // MOVQ ${t}, EP - self.Emit("CMPQ", _AX, jit.Imm(a)) // CMPQ AX, ${a} - self.Sjmp("JL" , _LB_range_error) // JL _range_error - self.Emit("CMPQ", _AX, jit.Imm(b)) // CMPQ AX, ${B} - self.Sjmp("JG" , _LB_range_error) // JG _range_error -} - -func (self *_Assembler) range_unsigned(i *rt.GoItab, t *rt.GoType, v uint64) { - self.Emit("MOVQ" , _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ" , jit.Gitab(i), _ET) // MOVQ ${i}, ET - self.Emit("MOVQ" , jit.Gtype(t), _EP) // MOVQ ${t}, EP - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_range_error) // JS _range_error - self.Emit("CMPQ" , _AX, jit.Imm(int64(v))) // CMPQ AX, ${a} - self.Sjmp("JA" , _LB_range_error) // JA _range_error -} - -/** String Manipulating Routines **/ - -var ( - _F_unquote = jit.Imm(int64(native.S_unquote)) -) - -func (self *_Assembler) slice_from(p obj.Addr, d int64) { - self.Emit("MOVQ", p, _SI) // MOVQ ${p}, SI - self.slice_from_r(_SI, d) // SLICE_R SI, ${d} -} - -func (self *_Assembler) slice_from_r(p obj.Addr, d int64) { - self.Emit("LEAQ", jit.Sib(_IP, p, 1, 0), _DI) // LEAQ (IP)(${p}), DI - self.Emit("NEGQ", p) // NEGQ ${p} - self.Emit("LEAQ", jit.Sib(_IC, p, 1, d), _SI) // LEAQ d(IC)(${p}), SI -} - -func (self *_Assembler) unquote_once(p obj.Addr, n obj.Addr, stack bool, copy bool) { - self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1 - self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1 - self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n} - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_once_write_{n}", 4) - self.Sjmp("JMP" , "_escape_string") - self.Link("_noescape_{n}") // _noescape_{n}: - if copy { - self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv) - self.Sjmp("JNC", "_unquote_once_write_{n}") - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_once_write_{n}", 4) - self.Sjmp("JMP", "_copy_string") - } - self.Link("_unquote_once_write_{n}") - self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n} - if stack { - self.Emit("MOVQ", _DI, p) - } else { - self.WriteRecNotAX(10, _DI, p, false, false) - } -} - -func (self *_Assembler) unquote_twice(p obj.Addr, n obj.Addr, stack bool) { - self.Emit("CMPQ" , _VAR_st_Ep, jit.Imm(-1)) // CMPQ st.Ep, $-1 - self.Sjmp("JE" , _LB_eof_error) // JE _eof_error - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -3), jit.Imm('\\')) // CMPB -3(IP)(IC), $'\\' - self.Sjmp("JNE" , _LB_char_m3_error) // JNE _char_m3_error - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, -2), jit.Imm('"')) // CMPB -2(IP)(IC), $'"' - self.Sjmp("JNE" , _LB_char_m2_error) // JNE _char_m2_error - self.slice_from(_VAR_st_Iv, -3) // SLICE st.Iv, $-3 - self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX - self.Emit("ADDQ" , _VAR_st_Iv, _AX) // ADDQ st.Iv, AX - self.Emit("CMPQ" , _VAR_st_Ep, _AX) // CMPQ st.Ep, AX - self.Sjmp("JE" , "_noescape_{n}") // JE _noescape_{n} - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_twice_write_{n}", 4) - self.Sjmp("JMP" , "_escape_string_twice") - self.Link("_noescape_{n}") // _noescape_{n}: - self.Emit("BTQ" , jit.Imm(_F_copy_string), _ARG_fv) - self.Sjmp("JNC", "_unquote_twice_write_{n}") - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_unquote_twice_write_{n}", 4) - self.Sjmp("JMP", "_copy_string") - self.Link("_unquote_twice_write_{n}") - self.Emit("MOVQ" , _SI, n) // MOVQ SI, ${n} - if stack { - self.Emit("MOVQ", _DI, p) - } else { - self.WriteRecNotAX(12, _DI, p, false, false) - } -} - -/** Memory Clearing Routines **/ - -var ( - _F_memclrHasPointers = jit.Func(memclrHasPointers) - _F_memclrNoHeapPointers = jit.Func(memclrNoHeapPointers) -) - -func (self *_Assembler) mem_clear_fn(ptrfree bool) { - if !ptrfree { - self.call_go(_F_memclrHasPointers) - } else { - self.call_go(_F_memclrNoHeapPointers) - } -} - -func (self *_Assembler) mem_clear_rem(size int64, ptrfree bool) { - self.Emit("MOVQ", jit.Imm(size), _CX) // MOVQ ${size}, CX - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _AX) // MOVQ (ST)(AX), AX - self.Emit("SUBQ", _VP, _AX) // SUBQ VP, AX - self.Emit("ADDQ", _AX, _CX) // ADDQ AX, CX - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.mem_clear_fn(ptrfree) // CALL_GO memclr{Has,NoHeap}Pointers -} - -/** Map Assigning Routines **/ - -var ( - _F_mapassign = jit.Func(mapassign) - _F_mapassign_fast32 = jit.Func(mapassign_fast32) - _F_mapassign_faststr = jit.Func(mapassign_faststr) - _F_mapassign_fast64ptr = jit.Func(mapassign_fast64ptr) -) - -var ( - _F_decodeJsonUnmarshaler obj.Addr - _F_decodeTextUnmarshaler obj.Addr -) - -func init() { - _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler) - _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler) -} - -func (self *_Assembler) mapaccess_ptr(t reflect.Type) { - if rt.MapType(rt.UnpackType(t)).IndirectElem() { - self.vfollow(t.Elem()) - } -} - -func (self *_Assembler) mapassign_std(t reflect.Type, v obj.Addr) { - self.Emit("LEAQ", v, _AX) // LEAQ ${v}, AX - self.mapassign_call(t, _F_mapassign) // MAPASSIGN ${t}, mapassign -} - -func (self *_Assembler) mapassign_str_fast(t reflect.Type, p obj.Addr, n obj.Addr) { - self.Emit("MOVQ", jit.Type(t), _AX) // MOVQ ${t}, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP) - self.Emit("MOVQ", p, jit.Ptr(_SP, 16)) // MOVQ ${p}, 16(SP) - self.Emit("MOVQ", n, jit.Ptr(_SP, 24)) // MOVQ ${n}, 24(SP) - self.call_go(_F_mapassign_faststr) // CALL_GO ${fn} - self.Emit("MOVQ", jit.Ptr(_SP, 32), _VP) // MOVQ 32(SP), VP - self.mapaccess_ptr(t) -} - -func (self *_Assembler) mapassign_call(t reflect.Type, fn obj.Addr) { - self.Emit("MOVQ", jit.Type(t), _SI) // MOVQ ${t}, SI - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP) - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.call_go(fn) // CALL_GO ${fn} - self.Emit("MOVQ", jit.Ptr(_SP, 24), _VP) // MOVQ 24(SP), VP -} - -func (self *_Assembler) mapassign_fastx(t reflect.Type, fn obj.Addr) { - self.mapassign_call(t, fn) - self.mapaccess_ptr(t) -} - -func (self *_Assembler) mapassign_utext(t reflect.Type, addressable bool) { - pv := false - vk := t.Key() - tk := t.Key() - - /* deref pointer if needed */ - if vk.Kind() == reflect.Ptr { - pv = true - vk = vk.Elem() - } - - /* addressable value with pointer receiver */ - if addressable { - pv = false - tk = reflect.PtrTo(tk) - } - - /* allocate the key, and call the unmarshaler */ - self.valloc(vk, _DI) // VALLOC ${vk}, DI - // must spill vk pointer since next call_go may invoke GC - self.Emit("MOVQ" , _DI, _VAR_vk) - self.Emit("MOVQ" , jit.Type(tk), _AX) // MOVQ ${tk}, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _DI, jit.Ptr(_SP, 8)) // MOVQ DI, 8(SP) - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP) - self.call_go(_F_decodeTextUnmarshaler) // CALL_GO decodeTextUnmarshaler - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.Emit("MOVQ" , _VAR_vk, _AX) - - /* select the correct assignment function */ - if !pv { - self.mapassign_call(t, _F_mapassign) - } else { - self.mapassign_fastx(t, _F_mapassign_fast64ptr) - } -} - -/** External Unmarshaler Routines **/ - -var ( - _F_skip_one = jit.Imm(int64(native.S_skip_one)) - _F_skip_number = jit.Imm(int64(native.S_skip_number)) -) - -func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) { - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v - self.slice_from_r(_AX, 0) // SLICE_R AX, $0 - self.Emit("MOVQ" , _DI, _VAR_sv_p) // MOVQ DI, sv.p - self.Emit("MOVQ" , _SI, _VAR_sv_n) // MOVQ SI, sv.n - self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref} -} - -func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n - self.unmarshal_func(t, _F_decodeTextUnmarshaler, deref) // UNMARSHAL text, ${t}, ${deref} -} - -func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) { - pt := t - vk := t.Kind() - - /* allocate the field if needed */ - if deref && vk == reflect.Ptr { - self.Emit("MOVQ" , _VP, _AX) // MOVQ VP, AX - self.Emit("MOVQ" , jit.Ptr(_AX, 0), _AX) // MOVQ (AX), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_deref_{n}") // JNZ _deref_{n} - self.valloc(t.Elem(), _AX) // VALLOC ${t.Elem()}, AX - self.WritePtrAX(3, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_deref_{n}") // _deref_{n}: - } - - /* set value type */ - self.Emit("MOVQ", jit.Type(pt), _CX) // MOVQ ${pt}, CX - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 0)) // MOVQ CX, (SP) - - /* set value pointer */ - if deref && vk == reflect.Ptr { - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - } else { - self.Emit("MOVQ", _VP, jit.Ptr(_SP, 8)) // MOVQ VP, 8(SP) - } - - /* set the source string and call the unmarshaler */ - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 16)) // MOVOU X0, 16(SP) - self.call_go(fn) // CALL_GO ${fn} - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -/** Dynamic Decoding Routine **/ - -var ( - _F_decodeTypedPointer obj.Addr -) - -func init() { - _F_decodeTypedPointer = jit.Func(decodeTypedPointer) -} - -func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) { - self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX - self.Emit("MOVOU", _ARG_sp, _X0) // MOVOU sp, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 0)) // MOVOU X0, (SP) - self.Emit("MOVQ" , _IC, jit.Ptr(_SP, 16)) // MOVQ IC, 16(SP) - self.Emit("MOVQ" , vt, jit.Ptr(_SP, 24)) // MOVQ ${vt}, 24(SP) - self.Emit("MOVQ" , vp, jit.Ptr(_SP, 32)) // MOVQ ${vp}, 32(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 40)) // MOVQ ST, 40(SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 48)) // MOVQ CX, 48(SP) - self.call_go(_F_decodeTypedPointer) // CALL_GO decodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 64), _ET) // MOVQ 64(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 72), _EP) // MOVQ 72(SP), EP - self.Emit("MOVQ" , jit.Ptr(_SP, 56), _IC) // MOVQ 56(SP), IC - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JE", "_decode_dynamic_end_{n}") // JE, _decode_dynamic_end_{n} - self.Emit("MOVQ", _I_json_MismatchTypeError, _AX) // MOVQ _I_json_MismatchTypeError, AX - self.Emit("CMPQ", _ET, _AX) // CMPQ ET, AX - self.Sjmp("JNE" , _LB_error) // JNE LB_error - self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic - self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et - self.Link("_decode_dynamic_end_{n}") - -} - -/** OpCode Assembler Functions **/ - -var ( - _F_memequal = jit.Func(memequal) - _F_memmove = jit.Func(memmove) - _F_growslice = jit.Func(growslice) - _F_makeslice = jit.Func(makeslice) - _F_makemap_small = jit.Func(makemap_small) - _F_mapassign_fast64 = jit.Func(mapassign_fast64) -) - -var ( - _F_lspace = jit.Imm(int64(native.S_lspace)) - _F_strhash = jit.Imm(int64(caching.S_strhash)) -) - -var ( - _F_b64decode = jit.Imm(int64(_subr__b64decode)) - _F_decodeValue = jit.Imm(int64(_subr_decode_value)) -) - -var ( - _F_skip_array = jit.Imm(int64(native.S_skip_array)) - _F_skip_object = jit.Imm(int64(native.S_skip_object)) -) - -var ( - _F_FieldMap_GetCaseInsensitive obj.Addr - _Empty_Slice = make([]byte, 0) - _Zero_Base = int64(uintptr(((*rt.GoSlice)(unsafe.Pointer(&_Empty_Slice))).Ptr)) -) - -const ( - _MODE_AVX2 = 1 << 2 -) - -const ( - _Fe_ID = int64(unsafe.Offsetof(caching.FieldEntry{}.ID)) - _Fe_Name = int64(unsafe.Offsetof(caching.FieldEntry{}.Name)) - _Fe_Hash = int64(unsafe.Offsetof(caching.FieldEntry{}.Hash)) -) - -const ( - _Vk_Ptr = int64(reflect.Ptr) - _Gt_KindFlags = int64(unsafe.Offsetof(rt.GoType{}.KindFlags)) -) - -func init() { - _F_FieldMap_GetCaseInsensitive = jit.Func((*caching.FieldMap).GetCaseInsensitive) -} - -func (self *_Assembler) _asm_OP_any(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_VP, 8), _CX) // MOVQ 8(VP), CX - self.Emit("TESTQ" , _CX, _CX) // TESTQ CX, CX - self.Sjmp("JZ" , "_decode_{n}") // JZ _decode_{n} - self.Emit("CMPQ" , _CX, _VP) // CMPQ CX, VP - self.Sjmp("JE" , "_decode_{n}") // JE _decode_{n} - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX - self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX - self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr} - self.Sjmp("JNE" , "_decode_{n}") // JNE _decode_{n} - self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI - self.decode_dynamic(_AX, _DI) // DECODE AX, DI - self.Sjmp("JMP" , "_decode_end_{n}") // JMP _decode_end_{n} - self.Link("_decode_{n}") // _decode_{n}: - self.Emit("MOVQ" , _ARG_fv, _DF) // MOVQ fv, DF - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 0)) // MOVQ _ST, (SP) - self.call(_F_decodeValue) // CALL decodeValue - self.Emit("TESTQ" , _EP, _EP) // TESTQ EP, EP - self.Sjmp("JNZ" , _LB_parsing_error) // JNZ _parsing_error - self.Link("_decode_end_{n}") // _decode_end_{n}: -} - -func (self *_Assembler) _asm_OP_dyn(p *_Instr) { - self.Emit("MOVQ" , jit.Type(p.vt()), _ET) // MOVQ ${p.vt()}, ET - self.Emit("CMPQ" , jit.Ptr(_VP, 8), jit.Imm(0)) // CMPQ 8(VP), $0 - self.Sjmp("JE" , _LB_type_error) // JE _type_error - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX - self.Emit("MOVBLZX", jit.Ptr(_AX, _Gt_KindFlags), _DX) // MOVBLZX _Gt_KindFlags(AX), DX - self.Emit("ANDL" , jit.Imm(rt.F_kind_mask), _DX) // ANDL ${F_kind_mask}, DX - self.Emit("CMPL" , _DX, jit.Imm(_Vk_Ptr)) // CMPL DX, ${reflect.Ptr} - self.Sjmp("JNE" , _LB_type_error) // JNE _type_error - self.Emit("LEAQ" , jit.Ptr(_VP, 8), _DI) // LEAQ 8(VP), DI - self.decode_dynamic(_AX, _DI) // DECODE AX, DI - self.Link("_decode_end_{n}") // _decode_end_{n}: -} - -func (self *_Assembler) _asm_OP_str(_ *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false, true) // UNQUOTE once, (VP), 8(VP) -} - -func (self *_Assembler) _asm_OP_bin(_ *_Instr) { - self.parse_string() // PARSE STRING - self.slice_from(_VAR_st_Iv, -1) // SLICE st.Iv, $-1 - self.Emit("MOVQ" , _DI, jit.Ptr(_VP, 0)) // MOVQ DI, (VP) - self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP) - self.Emit("SHRQ" , jit.Imm(2), _SI) // SHRQ $2, SI - self.Emit("LEAQ" , jit.Sib(_SI, _SI, 2, 0), _SI) // LEAQ (SI)(SI*2), SI - self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP) - self.malloc(_SI, _SI) // MALLOC SI, SI - - // TODO: due to base64x's bug, only use AVX mode now - self.Emit("MOVL", jit.Imm(_MODE_JSON), _CX) // MOVL $_MODE_JSON, CX - - /* call the decoder */ - self.Emit("XORL" , _DX, _DX) // XORL DX, DX - self.Emit("MOVQ" , _VP, _DI) // MOVQ VP, DI - - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _R9) // MOVQ SI, (VP) - self.WriteRecNotAX(4, _SI, jit.Ptr(_VP, 0), true, false) // XCHGQ SI, (VP) - self.Emit("MOVQ" , _R9, _SI) - - self.Emit("XCHGQ", _DX, jit.Ptr(_VP, 8)) // XCHGQ DX, 8(VP) - self.call(_F_b64decode) // CALL b64decode - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_base64_error) // JS _base64_error - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) -} - -func (self *_Assembler) _asm_OP_bool(_ *_Instr) { - self.Emit("LEAQ", jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX - self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , _LB_eof_error) // JA _eof_error - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('f')) // CMPB (IP)(IC), $'f' - self.Sjmp("JE" , "_false_{n}") // JE _false_{n} - self.Emit("MOVL", jit.Imm(_IM_true), _CX) // MOVL $"true", CX - self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC) - self.Sjmp("JE" , "_bool_true_{n}") - - // try to skip the value - self.Emit("MOVQ", _IC, _VAR_ic) - self.Emit("MOVQ", _T_bool, _ET) - self.Emit("MOVQ", _ET, _VAR_et) - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_end_{n}", 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) - - self.Link("_bool_true_{n}") - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - self.Emit("MOVB", jit.Imm(1), jit.Ptr(_VP, 0)) // MOVB $1, (VP) - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_false_{n}") // _false_{n}: - self.Emit("ADDQ", jit.Imm(1), _AX) // ADDQ $1, AX - self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC - self.Emit("CMPQ", _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , _LB_eof_error) // JA _eof_error - self.Emit("MOVL", jit.Imm(_IM_alse), _CX) // MOVL $"alse", CX - self.Emit("CMPL", _CX, jit.Sib(_IP, _IC, 1, 0)) // CMPL CX, (IP)(IC) - self.Sjmp("JNE" , _LB_im_error) // JNE _im_error - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP) - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_num(_ *_Instr) { - self.Emit("MOVQ", jit.Imm(0), _VAR_fl) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"')) - self.Emit("MOVQ", _IC, _BP) - self.Sjmp("JNE", "_skip_number_{n}") - self.Emit("MOVQ", jit.Imm(1), _VAR_fl) - self.Emit("ADDQ", jit.Imm(1), _IC) - self.Link("_skip_number_{n}") - - /* call skip_number */ - self.call_sf(_F_skip_number) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNS" , "_num_next_{n}") - - /* call skip one */ - self.Emit("MOVQ", _BP, _VAR_ic) - self.Emit("MOVQ", _T_number, _ET) - self.Emit("MOVQ", _ET, _VAR_et) - self.Byte(0x4c, 0x8d, 0x0d) - self.Sref("_num_end_{n}", 4) - self.Emit("MOVQ", _R9, _VAR_pc) - self.Sjmp("JMP" , _LB_skip_one) - - /* assgin string */ - self.Link("_num_next_{n}") - self.slice_from_r(_AX, 0) - self.Emit("BTQ", jit.Imm(_F_copy_string), _ARG_fv) - self.Sjmp("JNC", "_num_write_{n}") - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ (PC), R9 - self.Sref("_num_write_{n}", 4) - self.Sjmp("JMP", "_copy_string") - self.Link("_num_write_{n}") - self.Emit("MOVQ", _SI, jit.Ptr(_VP, 8)) // MOVQ SI, 8(VP) - self.WriteRecNotAX(13, _DI, jit.Ptr(_VP, 0), false, false) - - /* check if quoted */ - self.Emit("CMPQ", _VAR_fl, jit.Imm(1)) - self.Sjmp("JNE", "_num_end_{n}") - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('"')) - self.Sjmp("JNE", _LB_char_0_error) - self.Emit("ADDQ", jit.Imm(1), _IC) - self.Link("_num_end_{n}") -} - -func (self *_Assembler) _asm_OP_i8(ins *_Instr) { - var pin = "_i8_end_{n}" - self.parse_signed(int8Type, pin, -1) // PARSE int8 - self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8 - self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_i16(ins *_Instr) { - var pin = "_i16_end_{n}" - self.parse_signed(int16Type, pin, -1) // PARSE int16 - self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16 - self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_i32(ins *_Instr) { - var pin = "_i32_end_{n}" - self.parse_signed(int32Type, pin, -1) // PARSE int32 - self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32 - self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_i64(ins *_Instr) { - var pin = "_i64_end_{n}" - self.parse_signed(int64Type, pin, -1) // PARSE int64 - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u8(ins *_Instr) { - var pin = "_u8_end_{n}" - self.parse_unsigned(uint8Type, pin, -1) // PARSE uint8 - self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8 - self.Emit("MOVB", _AX, jit.Ptr(_VP, 0)) // MOVB AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u16(ins *_Instr) { - var pin = "_u16_end_{n}" - self.parse_unsigned(uint16Type, pin, -1) // PARSE uint16 - self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16 - self.Emit("MOVW", _AX, jit.Ptr(_VP, 0)) // MOVW AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u32(ins *_Instr) { - var pin = "_u32_end_{n}" - self.parse_unsigned(uint32Type, pin, -1) // PARSE uint32 - self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32 - self.Emit("MOVL", _AX, jit.Ptr(_VP, 0)) // MOVL AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_u64(ins *_Instr) { - var pin = "_u64_end_{n}" - self.parse_unsigned(uint64Type, pin, -1) // PARSE uint64 - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_f32(ins *_Instr) { - var pin = "_f32_end_{n}" - self.parse_number(float32Type, pin, -1) // PARSE NUMBER - self.range_single() // RANGE float32 - self.Emit("MOVSS", _X0, jit.Ptr(_VP, 0)) // MOVSS X0, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_f64(ins *_Instr) { - var pin = "_f64_end_{n}" - self.parse_number(float64Type, pin, -1) // PARSE NUMBER - self.Emit("MOVSD", _VAR_st_Dv, _X0) // MOVSD st.Dv, X0 - self.Emit("MOVSD", _X0, jit.Ptr(_VP, 0)) // MOVSD X0, (VP) - self.Link(pin) -} - -func (self *_Assembler) _asm_OP_unquote(ins *_Instr) { - self.check_eof(2) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm('\\')) // CMPB (IP)(IC), $'\\' - self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 1), jit.Imm('"')) // CMPB 1(IP)(IC), $'"' - self.Sjmp("JNE" , _LB_char_1_error) // JNE _char_1_error - self.Emit("ADDQ", jit.Imm(2), _IC) // ADDQ $2, IC - self.parse_string() // PARSE STRING - self.unquote_twice(jit.Ptr(_VP, 0), jit.Ptr(_VP, 8), false) // UNQUOTE twice, (VP), 8(VP) -} - -func (self *_Assembler) _asm_OP_nil_1(_ *_Instr) { - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, jit.Ptr(_VP, 0)) // MOVQ AX, (VP) -} - -func (self *_Assembler) _asm_OP_nil_2(_ *_Instr) { - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP) -} - -func (self *_Assembler) _asm_OP_nil_3(_ *_Instr) { - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP) - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 16)) // MOVOU X0, 16(VP) -} - -func (self *_Assembler) _asm_OP_deref(p *_Instr) { - self.vfollow(p.vt()) -} - -func (self *_Assembler) _asm_OP_index(p *_Instr) { - self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ ${p.vi()}, AX - self.Emit("ADDQ", _AX, _VP) // ADDQ _AX, _VP -} - -func (self *_Assembler) _asm_OP_is_null(p *_Instr) { - self.Emit("LEAQ" , jit.Ptr(_IC, 4), _AX) // LEAQ 4(IC), AX - self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , "_not_null_{n}") // JA _not_null_{n} - self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null" - self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC - self.Xjmp("JE" , p.vi()) // JE {p.vi()} - self.Link("_not_null_{n}") // _not_null_{n}: -} - -func (self *_Assembler) _asm_OP_is_null_quote(p *_Instr) { - self.Emit("LEAQ" , jit.Ptr(_IC, 5), _AX) // LEAQ 4(IC), AX - self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL - self.Sjmp("JA" , "_not_null_quote_{n}") // JA _not_null_quote_{n} - self.Emit("CMPL" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(_IM_null)) // CMPL (IP)(IC), $"null" - self.Sjmp("JNE" , "_not_null_quote_{n}") // JNE _not_null_quote_{n} - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 4), jit.Imm('"')) // CMPB 4(IP)(IC), $'"' - self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC - self.Xjmp("JE" , p.vi()) // JE {p.vi()} - self.Link("_not_null_quote_{n}") // _not_null_quote_{n}: -} - -func (self *_Assembler) _asm_OP_map_init(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _AX) // MOVQ (VP), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_end_{n}") // JNZ _end_{n} - self.call_go(_F_makemap_small) // CALL_GO makemap_small - self.Emit("MOVQ" , jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX - self.WritePtrAX(6, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_end_{n}") // _end_{n}: - self.Emit("MOVQ" , _AX, _VP) // MOVQ AX, VP -} - -func (self *_Assembler) _asm_OP_map_key_i8(p *_Instr) { - self.parse_signed(int8Type, "", p.vi()) // PARSE int8 - self.range_signed(_I_int8, _T_int8, math.MinInt8, math.MaxInt8) // RANGE int8 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int8, mapassign, st.Iv -} - -func (self *_Assembler) _asm_OP_map_key_i16(p *_Instr) { - self.parse_signed(int16Type, "", p.vi()) // PARSE int16 - self.range_signed(_I_int16, _T_int16, math.MinInt16, math.MaxInt16) // RANGE int16 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN int16, mapassign, st.Iv -} - -func (self *_Assembler) _asm_OP_map_key_i32(p *_Instr) { - self.parse_signed(int32Type, "", p.vi()) // PARSE int32 - self.range_signed(_I_int32, _T_int32, math.MinInt32, math.MaxInt32) // RANGE int32 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int32, mapassign, st.Iv - } else { - self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN int32, mapassign_fast32 - } -} - -func (self *_Assembler) _asm_OP_map_key_i64(p *_Instr) { - self.parse_signed(int64Type, "", p.vi()) // PARSE int64 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN int64, mapassign, st.Iv - } else { - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN int64, mapassign_fast64 - } -} - -func (self *_Assembler) _asm_OP_map_key_u8(p *_Instr) { - self.parse_unsigned(uint8Type, "", p.vi()) // PARSE uint8 - self.range_unsigned(_I_uint8, _T_uint8, math.MaxUint8) // RANGE uint8 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint8, vt.Iv -} - -func (self *_Assembler) _asm_OP_map_key_u16(p *_Instr) { - self.parse_unsigned(uint16Type, "", p.vi()) // PARSE uint16 - self.range_unsigned(_I_uint16, _T_uint16, math.MaxUint16) // RANGE uint16 - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Iv) // MAPASSIGN uint16, vt.Iv -} - -func (self *_Assembler) _asm_OP_map_key_u32(p *_Instr) { - self.parse_unsigned(uint32Type, "", p.vi()) // PARSE uint32 - self.range_unsigned(_I_uint32, _T_uint32, math.MaxUint32) // RANGE uint32 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint32, vt.Iv - } else { - self.mapassign_fastx(vt, _F_mapassign_fast32) // MAPASSIGN uint32, mapassign_fast32 - } -} - -func (self *_Assembler) _asm_OP_map_key_u64(p *_Instr) { - self.parse_unsigned(uint64Type, "", p.vi()) // PARSE uint64 - self.match_char('"') - if vt := p.vt(); !mapfast(vt) { - self.mapassign_std(vt, _VAR_st_Iv) // MAPASSIGN uint64, vt.Iv - } else { - self.Emit("MOVQ", _VAR_st_Iv, _AX) // MOVQ st.Iv, AX - self.mapassign_fastx(vt, _F_mapassign_fast64) // MAPASSIGN uint64, mapassign_fast64 - } -} - -func (self *_Assembler) _asm_OP_map_key_f32(p *_Instr) { - self.parse_number(float32Type, "", p.vi()) // PARSE NUMBER - self.range_single() // RANGE float32 - self.Emit("MOVSS", _X0, _VAR_st_Dv) // MOVSS X0, st.Dv - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv -} - -func (self *_Assembler) _asm_OP_map_key_f64(p *_Instr) { - self.parse_number(float64Type, "", p.vi()) // PARSE NUMBER - self.match_char('"') - self.mapassign_std(p.vt(), _VAR_st_Dv) // MAPASSIGN ${p.vt()}, mapassign, st.Dv -} - -func (self *_Assembler) _asm_OP_map_key_str(p *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n - if vt := p.vt(); !mapfast(vt) { - self.valloc(vt.Key(), _DI) - self.Emit("MOVOU", _VAR_sv, _X0) - self.Emit("MOVOU", _X0, jit.Ptr(_DI, 0)) - self.mapassign_std(vt, jit.Ptr(_DI, 0)) - } else { - self.Emit("MOVQ", _VAR_sv_p, _DI) // MOVQ sv.p, DI - self.Emit("MOVQ", _VAR_sv_n, _SI) // MOVQ sv.n, SI - self.mapassign_str_fast(vt, _DI, _SI) // MAPASSIGN string, DI, SI - } -} - -func (self *_Assembler) _asm_OP_map_key_utext(p *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, true) // UNQUOTE once, sv.p, sv.n - self.mapassign_utext(p.vt(), false) // MAPASSIGN utext, ${p.vt()}, false -} - -func (self *_Assembler) _asm_OP_map_key_utext_p(p *_Instr) { - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n - self.mapassign_utext(p.vt(), true) // MAPASSIGN utext, ${p.vt()}, true -} - -func (self *_Assembler) _asm_OP_array_skip(_ *_Instr) { - self.call_sf(_F_skip_array) // CALL_SF skip_array - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v -} - -func (self *_Assembler) _asm_OP_array_clear(p *_Instr) { - self.mem_clear_rem(p.i64(), true) -} - -func (self *_Assembler) _asm_OP_array_clear_p(p *_Instr) { - self.mem_clear_rem(p.i64(), false) -} - -func (self *_Assembler) _asm_OP_slice_init(p *_Instr) { - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) - self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_done_{n}") // JNZ _done_{n} - self.Emit("MOVQ" , jit.Imm(_MinSlice), _CX) // MOVQ ${_MinSlice}, CX - self.Emit("MOVQ" , _CX, jit.Ptr(_VP, 16)) // MOVQ CX, 16(VP) - self.Emit("MOVQ" , jit.Type(p.vt()), _DX) // MOVQ ${p.vt()}, DX - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP) - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP) - self.call_go(_F_makeslice) // CALL_GO makeslice - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX - self.WritePtrAX(7, jit.Ptr(_VP, 0), false) // MOVQ AX, (VP) - self.Link("_done_{n}") // _done_{n}: - self.Emit("XORL" , _AX, _AX) // XORL AX, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) -} - -func (self *_Assembler) _asm_OP_check_empty(p *_Instr) { - rbracket := p.vb() - if rbracket == ']' { - self.check_eof(1) - self.Emit("LEAQ", jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(rbracket))) // CMPB (IP)(IC), ']' - self.Sjmp("JNE" , "_not_empty_array_{n}") // JNE _not_empty_array_{n} - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - self.Emit("MOVQ", jit.Imm(_Zero_Base), _AX) - self.WritePtrAX(9, jit.Ptr(_VP, 0), false) - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 8)) // MOVOU X0, 8(VP) - self.Xjmp("JMP" , p.vi()) // JMP {p.vi()} - self.Link("_not_empty_array_{n}") - } else { - panic("only implement check empty array here!") - } -} - -func (self *_Assembler) _asm_OP_slice_append(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_VP, 8), _AX) // MOVQ 8(VP), AX - self.Emit("CMPQ" , _AX, jit.Ptr(_VP, 16)) // CMPQ AX, 16(VP) - self.Sjmp("JB" , "_index_{n}") // JB _index_{n} - self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", jit.Ptr(_VP, 0), _X0) // MOVOU (VP), X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.Emit("MOVQ" , jit.Ptr(_VP, 16), _AX) // MOVQ 16(VP), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP) - self.Emit("SHLQ" , jit.Imm(1), _AX) // SHLQ $1, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_go(_F_growslice) // CALL_GO growslice - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVQ 40(SP), DI - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _AX) // MOVQ 48(SP), AX - self.Emit("MOVQ" , jit.Ptr(_SP, 56), _SI) // MOVQ 56(SP), SI - self.WriteRecNotAX(8, _DI, jit.Ptr(_VP, 0), true, true)// MOVQ DI, (VP) - self.Emit("MOVQ" , _AX, jit.Ptr(_VP, 8)) // MOVQ AX, 8(VP) - self.Emit("MOVQ" , _SI, jit.Ptr(_VP, 16)) // MOVQ SI, 16(VP) - - // because growslice not zero memory {oldcap, newlen} when append et not has ptrdata. - // but we should zero it, avoid decode it as random values. - if rt.UnpackType(p.vt()).PtrData == 0 { - self.Emit("SUBQ" , _AX, _SI) // MOVQ AX, SI - - self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP) - self.Emit("MOVQ" , _DI, _VP) // MOVQ DI, VP - self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX - self.From("MULQ" , _CX) // MULQ CX - self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP - - self.Emit("MOVQ" , _SI, _AX) // MOVQ SI, AX - self.From("MULQ" , _CX) // MULQ CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - - self.Emit("MOVQ" , _VP, jit.Ptr(_SP, 0)) // MOVQ VP, (SP) - self.mem_clear_fn(true) // CALL_GO memclr{Has,NoHeap} - self.Sjmp("JMP", "_append_slice_end_{n}") // JMP _append_slice_end_{n} - } - - self.Link("_index_{n}") // _index_{n}: - self.Emit("ADDQ" , jit.Imm(1), jit.Ptr(_VP, 8)) // ADDQ $1, 8(VP) - self.Emit("MOVQ" , jit.Ptr(_VP, 0), _VP) // MOVQ (VP), VP - self.Emit("MOVQ" , jit.Imm(int64(p.vlen())), _CX) // MOVQ ${p.vlen()}, CX - self.From("MULQ" , _CX) // MULQ CX - self.Emit("ADDQ" , _AX, _VP) // ADDQ AX, VP - self.Link("_append_slice_end_{n}") -} - -func (self *_Assembler) _asm_OP_object_skip(_ *_Instr) { - self.call_sf(_F_skip_object) // CALL_SF skip_object - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v -} - -func (self *_Assembler) _asm_OP_object_next(_ *_Instr) { - self.call_sf(_F_skip_one) // CALL_SF skip_one - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v -} - -func (self *_Assembler) _asm_OP_struct_field(p *_Instr) { - assert_eq(caching.FieldEntrySize, 32, "invalid field entry size") - self.Emit("MOVQ" , jit.Imm(-1), _AX) // MOVQ $-1, AX - self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, sr - self.parse_string() // PARSE STRING - self.unquote_once(_VAR_sv_p, _VAR_sv_n, true, false) // UNQUOTE once, sv.p, sv.n - self.Emit("LEAQ" , _VAR_sv, _AX) // LEAQ sv, AX - self.Emit("XORL" , _CX, _CX) // XORL CX, CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.call_go(_F_strhash) // CALL_GO strhash - self.Emit("MOVQ" , jit.Ptr(_SP, 16), _AX) // MOVQ 16(SP), AX - self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9 - self.Emit("MOVQ" , jit.Imm(freezeFields(p.vf())), _CX) // MOVQ ${p.vf()}, CX - self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_b), _SI) // MOVQ FieldMap.b(CX), SI - self.Emit("MOVQ" , jit.Ptr(_CX, caching.FieldMap_N), _CX) // MOVQ FieldMap.N(CX), CX - self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX - self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n} - self.Link("_loop_{n}") // _loop_{n}: - self.Emit("XORL" , _DX, _DX) // XORL DX, DX - self.From("DIVQ" , _CX) // DIVQ CX - self.Emit("LEAQ" , jit.Ptr(_DX, 1), _AX) // LEAQ 1(DX), AX - self.Emit("SHLQ" , jit.Imm(5), _DX) // SHLQ $5, DX - self.Emit("LEAQ" , jit.Sib(_SI, _DX, 1, 0), _DI) // LEAQ (SI)(DX), DI - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Hash), _R8) // MOVQ FieldEntry.Hash(DI), R8 - self.Emit("TESTQ", _R8, _R8) // TESTQ R8, R8 - self.Sjmp("JZ" , "_try_lowercase_{n}") // JZ _try_lowercase_{n} - self.Emit("CMPQ" , _R8, _R9) // CMPQ R8, R9 - self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n} - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name + 8), _DX) // MOVQ FieldEntry.Name+8(DI), DX - self.Emit("CMPQ" , _DX, _VAR_sv_n) // CMPQ DX, sv.n - self.Sjmp("JNE" , "_loop_{n}") // JNE _loop_{n} - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_ID), _R8) // MOVQ FieldEntry.ID(DI), R8 - self.Emit("MOVQ" , _AX, _VAR_ss_AX) // MOVQ AX, ss.AX - self.Emit("MOVQ" , _CX, _VAR_ss_CX) // MOVQ CX, ss.CX - self.Emit("MOVQ" , _SI, _VAR_ss_SI) // MOVQ SI, ss.SI - self.Emit("MOVQ" , _R8, _VAR_ss_R8) // MOVQ R8, ss.R8 - self.Emit("MOVQ" , _R9, _VAR_ss_R9) // MOVQ R9, ss.R9 - self.Emit("MOVQ" , _VAR_sv_p, _AX) // MOVQ _VAR_sv_p, AX - self.Emit("MOVQ" , jit.Ptr(_DI, _Fe_Name), _CX) // MOVQ FieldEntry.Name(DI), CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 16)) // MOVQ DX, 16(SP) - self.call_go(_F_memequal) // CALL_GO memequal - self.Emit("MOVQ" , _VAR_ss_AX, _AX) // MOVQ ss.AX, AX - self.Emit("MOVQ" , _VAR_ss_CX, _CX) // MOVQ ss.CX, CX - self.Emit("MOVQ" , _VAR_ss_SI, _SI) // MOVQ ss.SI, SI - self.Emit("MOVQ" , _VAR_ss_R9, _R9) // MOVQ ss.R9, R9 - self.Emit("MOVB" , jit.Ptr(_SP, 24), _DX) // MOVB 24(SP), DX - self.Emit("TESTB", _DX, _DX) // TESTB DX, DX - self.Sjmp("JZ" , "_loop_{n}") // JZ _loop_{n} - self.Emit("MOVQ" , _VAR_ss_R8, _R8) // MOVQ ss.R8, R8 - self.Emit("MOVQ" , _R8, _VAR_sr) // MOVQ R8, sr - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_try_lowercase_{n}") // _try_lowercase_{n}: - self.Emit("MOVQ" , jit.Imm(referenceFields(p.vf())), _AX) // MOVQ ${p.vf()}, AX - self.Emit("MOVOU", _VAR_sv, _X0) // MOVOU sv, X0 - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.call_go(_F_FieldMap_GetCaseInsensitive) // CALL_GO FieldMap::GetCaseInsensitive - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _AX) // MOVQ 24(SP), AX - self.Emit("MOVQ" , _AX, _VAR_sr) // MOVQ AX, _VAR_sr - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNS" , "_end_{n}") // JNS _end_{n} - self.Emit("BTQ" , jit.Imm(_F_disable_unknown), _ARG_fv) // BTQ ${_F_disable_unknown}, fv - self.Sjmp("JC" , _LB_field_error) // JC _field_error - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) { - self.unmarshal_json(p.vt(), true) -} - -func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) { - self.unmarshal_json(p.vt(), false) -} - -func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) { - self.unmarshal_text(p.vt(), true) -} - -func (self *_Assembler) _asm_OP_unmarshal_text_p(p *_Instr) { - self.unmarshal_text(p.vt(), false) -} - -func (self *_Assembler) _asm_OP_lspace(_ *_Instr) { - self.lspace("_{n}") -} - -func (self *_Assembler) lspace(subfix string) { - var label = "_lspace" + subfix - - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , label) // JA _nospace_{n} - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , label) // JNC _nospace_{n} - - /* test up to 4 characters */ - for i := 0; i < 3; i++ { - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , label) // JA _nospace_{n} - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , label) // JNC _nospace_{n} - } - - /* handle over to the native function */ - self.Emit("MOVQ" , _IP, _DI) // MOVQ IP, DI - self.Emit("MOVQ" , _IL, _SI) // MOVQ IL, SI - self.Emit("MOVQ" , _IC, _DX) // MOVQ IC, DX - self.call(_F_lspace) // CALL lspace - self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , _LB_parsing_error_v) // JS _parsing_error_v - self.Emit("CMPQ" , _AX, _IL) // CMPQ AX, IL - self.Sjmp("JAE" , _LB_eof_error) // JAE _eof_error - self.Emit("MOVQ" , _AX, _IC) // MOVQ AX, IC - self.Link(label) // _nospace_{n}: -} - -func (self *_Assembler) _asm_OP_match_char(p *_Instr) { - self.match_char(p.vb()) -} - -func (self *_Assembler) match_char(char byte) { - self.check_eof(1) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(char))) // CMPB (IP)(IC), ${p.vb()} - self.Sjmp("JNE" , _LB_char_0_error) // JNE _char_0_error - self.Emit("ADDQ", jit.Imm(1), _IC) // ADDQ $1, IC -} - -func (self *_Assembler) _asm_OP_check_char(p *_Instr) { - self.check_eof(1) - self.Emit("LEAQ" , jit.Ptr(_IC, 1), _AX) // LEAQ 1(IC), AX - self.Emit("CMPB" , jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()} - self.Emit("CMOVQEQ", _AX, _IC) // CMOVQEQ AX, IC - self.Xjmp("JE" , p.vi()) // JE {p.vi()} -} - -func (self *_Assembler) _asm_OP_check_char_0(p *_Instr) { - self.check_eof(1) - self.Emit("CMPB", jit.Sib(_IP, _IC, 1, 0), jit.Imm(int64(p.vb()))) // CMPB (IP)(IC), ${p.vb()} - self.Xjmp("JE" , p.vi()) // JE {p.vi()} -} - -func (self *_Assembler) _asm_OP_add(p *_Instr) { - self.Emit("ADDQ", jit.Imm(int64(p.vi())), _IC) // ADDQ ${p.vi()}, IC -} - -func (self *_Assembler) _asm_OP_load(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _VP) // MOVQ (ST)(AX), VP -} - -func (self *_Assembler) _asm_OP_save(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX - self.Emit("CMPQ", _CX, jit.Imm(_MaxStackBytes)) // CMPQ CX, ${_MaxStackBytes} - self.Sjmp("JAE" , _LB_stack_error) // JA _stack_error - self.WriteRecNotAX(0 , _VP, jit.Sib(_ST, _CX, 1, 8), false, false) // MOVQ VP, 8(ST)(CX) - self.Emit("ADDQ", jit.Imm(8), _CX) // ADDQ $8, CX - self.Emit("MOVQ", _CX, jit.Ptr(_ST, 0)) // MOVQ CX, (ST) -} - -func (self *_Assembler) _asm_OP_drop(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ", jit.Imm(8), _AX) // SUBQ $8, AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP - self.Emit("MOVQ", _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("XORL", _ET, _ET) // XORL ET, ET - self.Emit("MOVQ", _ET, jit.Sib(_ST, _AX, 1, 8)) // MOVQ ET, 8(ST)(AX) -} - -func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ" , jit.Imm(16), _AX) // SUBQ $16, AX - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _VP) // MOVQ 8(ST)(AX), VP - self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX) -} - -func (self *_Assembler) _asm_OP_recurse(p *_Instr) { - self.Emit("MOVQ", jit.Type(p.vt()), _AX) // MOVQ ${p.vt()}, AX - self.decode_dynamic(_AX, _VP) // DECODE AX, VP -} - -func (self *_Assembler) _asm_OP_goto(p *_Instr) { - self.Xjmp("JMP", p.vi()) -} - -func (self *_Assembler) _asm_OP_switch(p *_Instr) { - self.Emit("MOVQ", _VAR_sr, _AX) // MOVQ sr, AX - self.Emit("CMPQ", _AX, jit.Imm(p.i64())) // CMPQ AX, ${len(p.vs())} - self.Sjmp("JAE" , "_default_{n}") // JAE _default_{n} - - /* jump table selector */ - self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI - self.Sref("_switch_table_{n}", 4) // .... &_switch_table_{n} - self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX - self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX - self.Rjmp("JMP" , _AX) // JMP AX - self.Link("_switch_table_{n}") // _switch_table_{n}: - - /* generate the jump table */ - for i, v := range p.vs() { - self.Xref(v, int64(-i) * 4) - } - - /* default case */ - self.Link("_default_{n}") - self.NOP() -} - -func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) { - self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP) - self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP) - self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP) - self.call_go(_F_println) -} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go b/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go new file mode 100644 index 00000000..f916f650 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/consts/option.go @@ -0,0 +1,33 @@ + +package consts + +import ( + `github.com/bytedance/sonic/internal/native/types` +) + + +const ( + F_use_int64 = 0 + F_disable_urc = 2 + F_disable_unknown = 3 + F_copy_string = 4 + + F_use_number = types.B_USE_NUMBER + F_validate_string = types.B_VALIDATE_STRING + F_allow_control = types.B_ALLOW_CONTROL +) + +type Options uint64 + +const ( + OptionUseInt64 Options = 1 << F_use_int64 + OptionUseNumber Options = 1 << F_use_number + OptionUseUnicodeErrors Options = 1 << F_disable_urc + OptionDisableUnknown Options = 1 << F_disable_unknown + OptionCopyString Options = 1 << F_copy_string + OptionValidateString Options = 1 << F_validate_string +) + +const ( + MaxStack = 4096 +) \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/errors.go b/vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go similarity index 90% rename from vendor/github.com/bytedance/sonic/internal/decoder/errors.go rename to vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go index 4453f5cf..9f05e8b6 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/errors.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/errors/errors.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package errors import ( `encoding/json` @@ -46,7 +46,7 @@ func (self SyntaxError) Description() string { func (self SyntaxError) description() string { /* check for empty source */ if self.Src == "" { - return fmt.Sprintf("no sources available: %#v", self) + return fmt.Sprintf("no sources available, the input json is empty: %#v", self) } p, x, q, y := calcBounds(len(self.Src), self.Pos) @@ -112,12 +112,12 @@ func clamp_zero(v int) int { /** JIT Error Helpers **/ -var stackOverflow = &json.UnsupportedValueError { +var StackOverflow = &json.UnsupportedValueError { Str : "Value nesting too deep", Value : reflect.ValueOf("..."), } -func error_wrap(src string, pos int, code types.ParsingError) error { +func ErrorWrap(src string, pos int, code types.ParsingError) error { return *error_wrap_heap(src, pos, code) } @@ -130,7 +130,7 @@ func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError } } -func error_type(vt *rt.GoType) error { +func ErrorType(vt *rt.GoType) error { return &json.UnmarshalTypeError{Type: vt.Pack()} } @@ -171,7 +171,7 @@ func (self MismatchTypeError) Description() string { return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description()) } -func error_mismatch(src string, pos int, vt *rt.GoType) error { +func ErrorMismatch(src string, pos int, vt *rt.GoType) error { return &MismatchTypeError { Pos : pos, Src : src, @@ -179,11 +179,11 @@ func error_mismatch(src string, pos int, vt *rt.GoType) error { } } -func error_field(name string) error { +func ErrorField(name string) error { return errors.New("json: unknown field " + strconv.Quote(name)) } -func error_value(value string, vtype reflect.Type) error { +func ErrorValue(value string, vtype reflect.Type) error { return &json.UnmarshalTypeError { Type : vtype, Value : value, diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go deleted file mode 100644 index 8ce5c292..00000000 --- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64.go +++ /dev/null @@ -1,733 +0,0 @@ -// +build go1.16,!go1.17 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package decoder - -import ( - `encoding/json` - `fmt` - `reflect` - - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/native/types` - `github.com/twitchyliquid64/golang-asm/obj` -) - -/** Crucial Registers: - * - * ST(BX) : ro, decoder stack - * DF(R10) : ro, decoder flags - * EP(R11) : wo, error pointer - * IP(R12) : ro, input pointer - * IL(R13) : ro, input length - * IC(R14) : rw, input cursor - * VP(R15) : ro, value pointer (to an interface{}) - */ - -const ( - _VD_args = 8 // 8 bytes for passing arguments to this functions - _VD_fargs = 64 // 64 bytes for passing arguments to other Go functions - _VD_saves = 40 // 40 bytes for saving the registers before CALL instructions - _VD_locals = 88 // 88 bytes for local variables -) - -const ( - _VD_offs = _VD_fargs + _VD_saves + _VD_locals - _VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer -) - -var ( - _VAR_ss = _VAR_ss_Vt - _VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves) -) - -var ( - _VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8) - _VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16) - _VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24) - _VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32) - _VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40) - _VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48) -) - -var ( - _VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56) - _VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64) - _VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72) - _VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80) -) - -type _ValueDecoder struct { - jit.BaseAssembler -} - -func (self *_ValueDecoder) build() uintptr { - self.Init(self.compile) - return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic)) -} - -/** Function Calling Helpers **/ - -func (self *_ValueDecoder) save(r ...obj.Addr) { - for i, v := range r { - if i > _VD_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8)) - } - } -} - -func (self *_ValueDecoder) load(r ...obj.Addr) { - for i, v := range r { - if i > _VD_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v) - } - } -} - -func (self *_ValueDecoder) call(fn obj.Addr) { - self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX - self.Rjmp("CALL", _AX) // CALL AX -} - -func (self *_ValueDecoder) call_go(fn obj.Addr) { - self.save(_REG_go...) // SAVE $REG_go - self.call(fn) // CALL ${fn} - self.load(_REG_go...) // LOAD $REG_go -} - -/** Decoder Assembler **/ - -const ( - _S_val = iota + 1 - _S_arr - _S_arr_0 - _S_obj - _S_obj_0 - _S_obj_delim - _S_obj_sep -) - -const ( - _S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep) - _S_omask_end = (1 << _S_obj_0) | (1 << _S_obj) - _S_vmask = (1 << _S_val) | (1 << _S_arr_0) -) - -const ( - _A_init_len = 1 - _A_init_cap = 16 -) - -const ( - _ST_Sp = 0 - _ST_Vt = _PtrBytes - _ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1) -) - -var ( - _V_true = jit.Imm(int64(pbool(true))) - _V_false = jit.Imm(int64(pbool(false))) - _F_value = jit.Imm(int64(native.S_value)) -) - -var ( - _V_max = jit.Imm(int64(types.V_MAX)) - _E_eof = jit.Imm(int64(types.ERR_EOF)) - _E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR)) - _E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX)) -) - -var ( - _F_convTslice = jit.Func(convTslice) - _F_convTstring = jit.Func(convTstring) - _F_invalid_vtype = jit.Func(invalid_vtype) -) - -var ( - _T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil))) - _T_bool = jit.Type(reflect.TypeOf(false)) - _T_int64 = jit.Type(reflect.TypeOf(int64(0))) - _T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem()) - _T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil))) - _T_string = jit.Type(reflect.TypeOf("")) - _T_number = jit.Type(reflect.TypeOf(json.Number(""))) - _T_float64 = jit.Type(reflect.TypeOf(float64(0))) -) - -var _R_tab = map[int]string { - '[': "_decode_V_ARRAY", - '{': "_decode_V_OBJECT", - ':': "_decode_V_KEY_SEP", - ',': "_decode_V_ELEM_SEP", - ']': "_decode_V_ARRAY_END", - '}': "_decode_V_OBJECT_END", -} - -func (self *_ValueDecoder) compile() { - self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP - - /* initialize the state machine */ - self.Emit("XORL", _CX, _CX) // XORL CX, CX - self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df - /* initialize digital buffer first */ - self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap - self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX - self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf - /* add ST offset */ - self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0] - self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0] - self.Sjmp("JMP" , "_next") // JMP _next - - /* set the value from previous round */ - self.Link("_set_value") // _set_value: - self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX - self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error - self.Emit("XORL" , _SI, _SI) // XORL SI, SI - self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp - self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI - self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI) - self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI) - - /* check for value stack */ - self.Link("_next") // _next: - self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_return") // JS _return - - /* fast path: test up to 4 characters manually */ - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , "_decode_fast") // JA _decode_fast - self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX - self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - - /* at least 1 to 3 spaces */ - for i := 0; i < 3; i++ { - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' ' - self.Sjmp("JA" , "_decode_fast") // JA _decode_fast - self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX - self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - } - - /* at least 4 spaces */ - self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL - self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF - self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX - - /* fast path: use lookup table to select decoder */ - self.Link("_decode_fast") // _decode_fast: - self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI - self.Sref("_decode_tab", 4) // .... &_decode_tab - self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX - self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_decode_native") // JZ _decode_native - self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC - self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX - self.Rjmp("JMP" , _AX) // JMP AX - - /* decode with native decoder */ - self.Link("_decode_native") // _decode_native: - self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI - self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI - self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX - self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX - self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8 - self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8 - self.call(_F_value) // CALL value - self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC - - /* check for errors */ - self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_parsing_error") - self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype - self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max - self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype - - /* jump table selector */ - self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI - self.Sref("_switch_table", 4) // .... &_switch_table - self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX - self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX - self.Rjmp("JMP" , _AX) // JMP AX - - /** V_EOF **/ - self.Link("_decode_V_EOF") // _decode_V_EOF: - self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP - self.Sjmp("JMP" , "_error") // JMP _error - - /** V_NULL **/ - self.Link("_decode_V_NULL") // _decode_V_NULL: - self.Emit("XORL", _R8, _R8) // XORL R8, R8 - self.Emit("XORL", _R9, _R9) // XORL R9, R9 - self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_TRUE **/ - self.Link("_decode_V_TRUE") // _decode_V_TRUE: - self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8 - // TODO: maybe modified by users? - self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9 - self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_FALSE **/ - self.Link("_decode_V_FALSE") // _decode_V_FALSE: - self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8 - self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9 - self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_ARRAY **/ - self.Link("_decode_V_ARRAY") // _decode_V_ARRAY - self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char - - /* create a new array */ - self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP) - self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP) - self.call_go(_F_makeslice) // CALL_GO runtime.makeslice - self.Emit("MOVQ", jit.Ptr(_SP, 24), _DX) // MOVQ 24(SP), DX - - /* pack into an interface */ - self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP) - self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP) - self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP) - self.call_go(_F_convTslice) // CALL_GO runtime.convTslice - self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) // MOVQ 24(SP), R8 - - /* replace current state with an array */ - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX] - self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI) - self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI) - - /* add a new slot for the first element */ - self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX - self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE} - self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow - self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX] - self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_OBJECT **/ - self.Link("_decode_V_OBJECT") // _decode_V_OBJECT: - self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX - self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char - self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small - self.Emit("MOVQ", jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj, ST.Vt[CX] - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX - self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI) - self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI) - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_STRING **/ - self.Link("_decode_V_STRING") // _decode_V_STRING: - self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX - self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX - self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX - - /* check for escapes */ - self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1 - self.Sjmp("JNE" , "_unquote") // JNE _unquote - self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX - self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8 - self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI - self.Sref("_copy_string_end", 4) - self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df) - self.Sjmp("JC", "copy_string") - self.Link("_copy_string_end") - self.Emit("XORL", _DX, _DX) // XORL DX, DX - /* strings with no escape sequences */ - self.Link("_noescape") // _noescape: - self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI - self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI - self.Sjmp("JC" , "_object_key") // JC _object_key - - /* check for pre-packed strings, avoid 1 allocation */ - self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX - self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str - self.Emit("MOVQ" , _R8, jit.Ptr(_SP, 0)) // MOVQ R8, (SP) - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.call_go(_F_convTstring) // CALL_GO runtime.convTstring - self.Emit("MOVQ" , jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9 - - /* packed string already in R9 */ - self.Link("_packed_str") // _packed_str: - self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8 - self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI - self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /* the string is an object key, get the map */ - self.Link("_object_key") - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - - /* add a new delimiter */ - self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX - self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE} - self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX] - - /* add a new slot int the map */ - self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX - self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP) - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP) - self.Emit("MOVQ", _R8, jit.Ptr(_SP, 16)) // MOVQ R9, 16(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP) - self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr - self.Emit("MOVQ", jit.Ptr(_SP, 32), _AX) // MOVQ 32(SP), AX - - /* add to the pointer stack */ - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /* allocate memory to store the string header and unquoted result */ - self.Link("_unquote") // _unquote: - self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX - self.Emit("MOVQ", _T_byte, _CX) // MOVQ _T_byte, CX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVB", jit.Imm(0), jit.Ptr(_SP, 16)) // MOVB $0, 16(SP) - self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc - self.Emit("MOVQ", jit.Ptr(_SP, 24), _R9) // MOVQ 24(SP), R9 - - /* prepare the unquoting parameters */ - self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX - self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI - self.Emit("NEGQ" , _CX) // NEGQ CX - self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI - self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX - self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX - self.Emit("XORL" , _R8, _R8) // XORL R8, R8 - self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv - self.Emit("SETCC", _R8) // SETCC R8 - self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8 - - /* unquote the string, with R9 been preserved */ - self.save(_R9) // SAVE R9 - self.call(_F_unquote) // CALL unquote - self.load(_R9) // LOAD R9 - - /* check for errors */ - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_unquote_error") // JS _unquote_error - self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX - self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8 - self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9) - self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9) - self.Sjmp("JMP" , "_noescape") // JMP _noescape - - /** V_DOUBLE **/ - self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE: - self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df - self.Sjmp("JC" , "_use_number") // JC _use_number - self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0 - self.Sjmp("JMP" , "_use_float64") // JMP _use_float64 - - /** V_INTEGER **/ - self.Link("_decode_V_INTEGER") // _decode_V_INTEGER: - self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df - self.Sjmp("JC" , "_use_number") // JC _use_number - self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df - self.Sjmp("JC" , "_use_int64") // JC _use_int64 - self.Emit("MOVQ" , _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX - self.Emit("CVTSQ2SD", _AX, _X0) // CVTSQ2SD AX, X0 - - /* represent numbers as `float64` */ - self.Link("_use_float64") // _use_float64: - self.Emit("MOVSD", _X0, jit.Ptr(_SP, 0)) // MOVSD X0, (SP) - self.call_go(_F_convT64) // CALL_GO runtime.convT64 - self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8 - self.Emit("MOVQ" , jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9 - self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /* represent numbers as `json.Number` */ - self.Link("_use_number") // _use_number - self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX - self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI - self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX - self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX - self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP) - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.call_go(_F_convTstring) // CALL_GO runtime.convTstring - self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8 - self.Emit("MOVQ", jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9 - self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /* represent numbers as `int64` */ - self.Link("_use_int64") // _use_int64: - self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.call_go(_F_convT64) // CALL_GO runtime.convT64 - self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8 - self.Emit("MOVQ", jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9 - self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI - self.Sjmp("JMP" , "_set_value") // JMP _set_value - - /** V_KEY_SEP **/ - self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP: - // self.Byte(0xcc) - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim - self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char - self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX] - self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1] - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_ELEM_SEP **/ - self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP: - self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("CMPQ" , _AX, jit.Imm(_S_arr)) // CMPQ _AX, _S_arr - self.Sjmp("JE" , "_array_sep") // JZ _next - self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr - self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char - self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt)) - self.Sjmp("JMP" , "_next") // JMP _next - - /* arrays */ - self.Link("_array_sep") - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX - self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI) - self.Sjmp("JAE" , "_array_more") // JAE _array_more - - /* add a slot for the new element */ - self.Link("_array_append") // _array_append: - self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI) - self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI - self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX - self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE} - self.Sjmp("JAE" , "_stack_overflow") - self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX - self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI - self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp - self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX] - self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX} - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_ARRAY_END **/ - self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END: - self.Emit("XORL", _DX, _DX) // XORL DX, DX - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0 - self.Sjmp("JE" , "_first_item") // JE _first_item - self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr - self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char - self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp - self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /* first element of an array */ - self.Link("_first_item") // _first_item: - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1] - self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX] - self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI) - self.Sjmp("JMP" , "_next") // JMP _next - - /** V_OBJECT_END **/ - self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END: - self.Emit("MOVL", jit.Imm(_S_omask_end), _DX) // MOVL _S_omask, DI - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX - self.Emit("BTQ" , _AX, _DX) - self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp - self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX] - self.Sjmp("JMP" , "_next") // JMP _next - - /* return from decoder */ - self.Link("_return") // _return: - self.Emit("XORL", _EP, _EP) // XORL EP, EP - self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0] - self.Link("_epilogue") // _epilogue: - self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST - self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP - self.Emit("RET") // RET - - /* array expand */ - self.Link("_array_more") // _array_more: - self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX - self.Emit("MOVOU", jit.Ptr(_SI, 0), _X0) // MOVOU (SI), X0 - self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DX) // MOVQ 16(SI), DX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 24)) // MOVQ DX, 24(SP) - self.Emit("SHLQ" , jit.Imm(1), _DX) // SHLQ $1, DX - self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 32)) // MOVQ DX, 32(SP) - self.call_go(_F_growslice) // CALL_GO runtime.growslice - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVOU 40(SP), DI - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _DX) // MOVOU 48(SP), DX - self.Emit("MOVQ" , jit.Ptr(_SP, 56), _AX) // MOVQ 56(SP), AX - - /* update the slice */ - self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX - self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI - self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI - self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI) - self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX) - self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI) - self.Sjmp("JMP" , "_array_append") // JMP _array_append - - /* copy string */ - self.Link("copy_string") // pointer: R8, length: AX, return addr: DI - // self.Byte(0xcc) - self.Emit("MOVQ", _R8, _VAR_cs_p) - self.Emit("MOVQ", _AX, _VAR_cs_n) - self.Emit("MOVQ", _DI, _VAR_cs_LR) - self.Emit("MOVQ", _T_byte, _R8) - self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0)) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) - self.call_go(_F_makeslice) - self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) - self.Emit("MOVQ", _R8, _VAR_cs_d) - self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0)) - self.Emit("MOVQ", _VAR_cs_p, _R8) - self.Emit("MOVQ", _R8, jit.Ptr(_SP, 8)) - self.Emit("MOVQ", _VAR_cs_n, _AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) - self.call_go(_F_memmove) - self.Emit("MOVQ", _VAR_cs_d, _R8) - self.Emit("MOVQ", _VAR_cs_n, _AX) - self.Emit("MOVQ", _VAR_cs_LR, _DI) - // self.Byte(0xcc) - self.Rjmp("JMP", _DI) - - /* error handlers */ - self.Link("_stack_overflow") - self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP - self.Sjmp("JMP" , "_error") // JMP _error - self.Link("_vtype_error") // _vtype_error: - self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC - self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP - self.Sjmp("JMP" , "_error") // JMP _error - self.Link("_invalid_char") // _invalid_char: - self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC - self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP - self.Sjmp("JMP" , "_error") // JMP _error - self.Link("_unquote_error") // _unquote_error: - self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC - self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC - self.Link("_parsing_error") // _parsing_error: - self.Emit("NEGQ" , _AX) // NEGQ AX - self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP - self.Link("_error") // _error: - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP) - self.Sjmp("JMP" , "_epilogue") // JMP _epilogue - - /* invalid value type, never returns */ - self.Link("_invalid_vtype") - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.call(_F_invalid_vtype) // CALL invalid_type - self.Emit("UD2") // UD2 - - /* switch jump table */ - self.Link("_switch_table") // _switch_table: - self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0 - self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4 - self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8 - self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12 - self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16 - self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20 - self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24 - self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28 - self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32 - self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36 - self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40 - self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44 - self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48 - - /* fast character lookup table */ - self.Link("_decode_tab") // _decode_tab: - self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0 - - /* generate rest of the tabs */ - for i := 1; i < 256; i++ { - if to, ok := _R_tab[i]; ok { - self.Sref(to, -int64(i) * 4) - } else { - self.Byte(0x00, 0x00, 0x00, 0x00) - } - } -} - -/** Generic Decoder **/ - -var ( - _subr_decode_value = new(_ValueDecoder).build() -) - -//go:nosplit -func invalid_vtype(vt types.ValueType) { - throw(fmt.Sprintf("invalid value type: %d", vt)) -} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s b/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s deleted file mode 100644 index 4e509c2f..00000000 --- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_stkabi_amd64_test.s +++ /dev/null @@ -1,37 +0,0 @@ -// +build go1.16,!go1.17 - -// -// Copyright 2021 ByteDance Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -#include "go_asm.h" -#include "funcdata.h" -#include "textflag.h" - -TEXT ·decodeValueStub(SB), NOSPLIT, $0 - 72 - NO_LOCAL_POINTERS - PXOR X0, X0 - MOVOU X0, rv+48(FP) - MOVQ st+0(FP), BX - MOVQ sp+8(FP), R12 - MOVQ sn+16(FP), R13 - MOVQ ic+24(FP), R14 - MOVQ vp+32(FP), R15 - MOVQ df+40(FP), R10 - MOVQ ·_subr_decode_value(SB), AX - CALL AX - MOVQ R14, rp+48(FP) - MOVQ R11, ex+56(FP) - RET diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm.s b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm.s similarity index 100% rename from vendor/github.com/bytedance/sonic/internal/decoder/asm.s rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm.s diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go similarity index 99% rename from vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go index b0125a79..48f73e5b 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go117.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go117.go @@ -14,7 +14,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package decoder +package jitdec import ( `strconv` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go similarity index 99% rename from vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go index 6adeac0c..cbec3d24 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/asm_stubs_amd64_go121.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/asm_stubs_amd64_go121.go @@ -1,4 +1,4 @@ -// +build go1.21,!go1.23 +// +build go1.21,!go1.24 // Copyright 2023 CloudWeGo Authors // @@ -14,7 +14,7 @@ // See the License for the specific language governing permissions and // limitations under the License. -package decoder +package jitdec import ( `strconv` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go similarity index 98% rename from vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go index 6c6fde91..8a43d868 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/assembler_regabi_amd64.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/assembler_regabi_amd64.go @@ -1,4 +1,4 @@ -// +build go1.17,!go1.23 +// +build go1.17,!go1.24 /* * Copyright 2021 ByteDance Inc. @@ -16,7 +16,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding/json` @@ -385,7 +385,7 @@ func (self *_Assembler) prologue() { var ( _REG_go = []obj.Addr { _ST, _VP, _IP, _IL, _IC } - _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC, _IL } + _REG_rt = []obj.Addr { _ST, _VP, _IP, _IL, _IC } ) func (self *_Assembler) save(r ...obj.Addr) { @@ -492,9 +492,9 @@ func (self *_Assembler) type_error() { func (self *_Assembler) mismatch_error() { self.Link(_LB_mismatch_error) // _type_error: self.Emit("MOVQ", _VAR_et, _ET) // MOVQ _VAR_et, ET - self.Emit("MOVQ", _VAR_ic, _EP) // MOVQ _VAR_ic, EP self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchType, CX self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX + self.Emit("MOVQ", jit.Ptr(_ST, _EpOffset), _EP) // MOVQ stack.Ep, EP self.Sjmp("JE" , _LB_error) // JE _LB_error self.Emit("MOVQ", _ARG_sp, _AX) self.Emit("MOVQ", _ARG_sl, _BX) @@ -972,11 +972,13 @@ var ( var ( _F_decodeJsonUnmarshaler obj.Addr + _F_decodeJsonUnmarshalerQuoted obj.Addr _F_decodeTextUnmarshaler obj.Addr ) func init() { _F_decodeJsonUnmarshaler = jit.Func(decodeJsonUnmarshaler) + _F_decodeJsonUnmarshalerQuoted = jit.Func(decodeJsonUnmarshalerQuoted) _F_decodeTextUnmarshaler = jit.Func(decodeTextUnmarshaler) } @@ -1061,14 +1063,15 @@ var ( _F_skip_number = jit.Imm(int64(native.S_skip_number)) ) -func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool) { +func (self *_Assembler) unmarshal_json(t reflect.Type, deref bool, f obj.Addr) { self.call_sf(_F_skip_one) // CALL_SF skip_one self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX self.Sjmp("JS" , _LB_parsing_error_v) // JS _parse_error_v + self.Emit("MOVQ", _IC, _VAR_ic) // store for mismatche error skip self.slice_from_r(_AX, 0) // SLICE_R AX, $0 self.Emit("MOVQ" , _DI, _ARG_sv_p) // MOVQ DI, sv.p self.Emit("MOVQ" , _SI, _ARG_sv_n) // MOVQ SI, sv.n - self.unmarshal_func(t, _F_decodeJsonUnmarshaler, deref) // UNMARSHAL json, ${t}, ${deref} + self.unmarshal_func(t, f, deref) // UNMARSHAL json, ${t}, ${deref} } func (self *_Assembler) unmarshal_text(t reflect.Type, deref bool) { @@ -1103,7 +1106,15 @@ func (self *_Assembler) unmarshal_func(t reflect.Type, fn obj.Addr, deref bool) self.Emit("MOVQ" , _ARG_sv_n, _DI) // MOVQ sv.n, DI self.call_go(fn) // CALL_GO ${fn} self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error + self.Sjmp("JZ" , "_unmarshal_func_end_{n}") // JNZ _error + self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ ET, VAR.et + self.Emit("CMPQ", _ET, _CX) // check if MismatchedError + self.Sjmp("JNE" , _LB_error) + self.Emit("MOVQ", jit.Type(t), _CX) // store current type + self.Emit("MOVQ", _CX, _VAR_et) // store current type + self.Emit("MOVQ", _VAR_ic, _IC) // recover the pos + self.Emit("XORL", _ET, _ET) + self.Link("_unmarshal_func_end_{n}") } /** Dynamic Decoding Routine **/ @@ -1136,8 +1147,8 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) { self.Emit("MOVQ", _I_json_MismatchTypeError, _CX) // MOVQ _I_json_MismatchTypeError, CX self.Emit("CMPQ", _ET, _CX) // CMPQ ET, CX self.Sjmp("JNE", _LB_error) // JNE LB_error - self.Emit("MOVQ", _EP, _VAR_ic) // MOVQ EP, VAR_ic self.Emit("MOVQ", _ET, _VAR_et) // MOVQ ET, VAR_et + self.WriteRecNotAX(14, _EP, jit.Ptr(_ST, _EpOffset), false, false) // MOVQ EP, stack.Ep self.Link("_decode_dynamic_end_{n}") } @@ -1146,7 +1157,7 @@ func (self *_Assembler) decode_dynamic(vt obj.Addr, vp obj.Addr) { var ( _F_memequal = jit.Func(memequal) _F_memmove = jit.Func(memmove) - _F_growslice = jit.Func(growslice) + _F_growslice = jit.Func(rt.GrowSlice) _F_makeslice = jit.Func(makeslice) _F_makemap_small = jit.Func(makemap_small) _F_mapassign_fast64 = jit.Func(mapassign_fast64) @@ -1774,11 +1785,19 @@ func (self *_Assembler) _asm_OP_struct_field(p *_Instr) { } func (self *_Assembler) _asm_OP_unmarshal(p *_Instr) { - self.unmarshal_json(p.vt(), true) + if iv := p.i64(); iv != 0 { + self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshalerQuoted) + } else { + self.unmarshal_json(p.vt(), true, _F_decodeJsonUnmarshaler) + } } func (self *_Assembler) _asm_OP_unmarshal_p(p *_Instr) { - self.unmarshal_json(p.vt(), false) + if iv := p.i64(); iv != 0 { + self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshalerQuoted) + } else { + self.unmarshal_json(p.vt(), false, _F_decodeJsonUnmarshaler) + } } func (self *_Assembler) _asm_OP_unmarshal_text(p *_Instr) { diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/compiler.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go similarity index 94% rename from vendor/github.com/bytedance/sonic/internal/decoder/compiler.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go index b350c046..2ad3f6d8 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/compiler.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/compiler.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding/json` @@ -271,6 +271,13 @@ func newInsVt(op _Op, vt reflect.Type) _Instr { } } +func newInsVtI(op _Op, vt reflect.Type, iv int) _Instr { + return _Instr { + u: packOp(op) | rt.PackInt(iv), + p: unsafe.Pointer(rt.UnpackType(vt)), + } +} + func newInsVf(op _Op, vf *caching.FieldMap) _Instr { return _Instr { u: packOp(op), @@ -452,6 +459,10 @@ func (self *_Program) rtt(op _Op, vt reflect.Type) { *self = append(*self, newInsVt(op, vt)) } +func (self *_Program) rtti(op _Op, vt reflect.Type, iv int) { + *self = append(*self, newInsVtI(op, vt, iv)) +} + func (self *_Program) fmv(op _Op, vf *caching.FieldMap) { *self = append(*self, newInsVf(op, vf)) } @@ -527,35 +538,54 @@ func (self *_Compiler) compile(vt reflect.Type) (ret _Program, err error) { return } -func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type) bool { +const ( + checkMarshalerFlags_quoted = 1 +) + +func (self *_Compiler) checkMarshaler(p *_Program, vt reflect.Type, flags int, exec bool) bool { pt := reflect.PtrTo(vt) /* check for `json.Unmarshaler` with pointer receiver */ if pt.Implements(jsonUnmarshalerType) { - p.rtt(_OP_unmarshal_p, pt) + if exec { + p.add(_OP_lspace) + p.rtti(_OP_unmarshal_p, pt, flags) + } return true } /* check for `json.Unmarshaler` */ if vt.Implements(jsonUnmarshalerType) { - p.add(_OP_lspace) - self.compileUnmarshalJson(p, vt) + if exec { + p.add(_OP_lspace) + self.compileUnmarshalJson(p, vt, flags) + } return true } + if flags == checkMarshalerFlags_quoted { + // text marshaler shouldn't be supported for quoted string + return false + } + /* check for `encoding.TextMarshaler` with pointer receiver */ if pt.Implements(encodingTextUnmarshalerType) { - p.add(_OP_lspace) - self.compileUnmarshalTextPtr(p, pt) + if exec { + p.add(_OP_lspace) + self.compileUnmarshalTextPtr(p, pt, flags) + } return true } /* check for `encoding.TextUnmarshaler` */ if vt.Implements(encodingTextUnmarshalerType) { - p.add(_OP_lspace) - self.compileUnmarshalText(p, vt) + if exec { + p.add(_OP_lspace) + self.compileUnmarshalText(p, vt, flags) + } return true } + return false } @@ -567,7 +597,7 @@ func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type) { return } - if self.checkMarshaler(p, vt) { + if self.checkMarshaler(p, vt, 0, true) { return } @@ -690,7 +720,7 @@ func (self *_Compiler) compilePtr(p *_Program, sp int, et reflect.Type) { /* dereference all the way down */ for et.Kind() == reflect.Ptr { - if self.checkMarshaler(p, et) { + if self.checkMarshaler(p, et, 0, true) { return } et = et.Elem() @@ -938,7 +968,22 @@ end_of_object: p.pin(skip) } +func (self *_Compiler) compileStructFieldStrUnmarshal(p *_Program, vt reflect.Type) { + p.add(_OP_lspace) + n0 := p.pc() + p.add(_OP_is_null) + self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, true) + p.pin(n0) +} + func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) { + // according to std, json.Unmarshaler should be called before stringize + // see https://github.com/bytedance/sonic/issues/670 + if self.checkMarshaler(p, vt, checkMarshalerFlags_quoted, false) { + self.compileStructFieldStrUnmarshal(p, vt) + return + } + n1 := -1 ft := vt sv := false @@ -1106,7 +1151,7 @@ func (self *_Compiler) compileUnmarshalEnd(p *_Program, vt reflect.Type, i int) p.pin(j) } -func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) { +func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type, flags int) { i := p.pc() v := _OP_unmarshal p.add(_OP_is_null) @@ -1117,11 +1162,11 @@ func (self *_Compiler) compileUnmarshalJson(p *_Program, vt reflect.Type) { } /* call the unmarshaler */ - p.rtt(v, vt) + p.rtti(v, vt, flags) self.compileUnmarshalEnd(p, vt, i) } -func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) { +func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type, iv int) { i := p.pc() v := _OP_unmarshal_text p.add(_OP_is_null) @@ -1134,15 +1179,15 @@ func (self *_Compiler) compileUnmarshalText(p *_Program, vt reflect.Type) { } /* call the unmarshaler */ - p.rtt(v, vt) + p.rtti(v, vt, iv) self.compileUnmarshalEnd(p, vt, i) } -func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type) { +func (self *_Compiler) compileUnmarshalTextPtr(p *_Program, vt reflect.Type, iv int) { i := p.pc() p.add(_OP_is_null) p.chr(_OP_match_char, '"') - p.rtt(_OP_unmarshal_text_p, vt) + p.rtti(_OP_unmarshal_text_p, vt, iv) p.pin(i) } diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/debug.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go similarity index 99% rename from vendor/github.com/bytedance/sonic/internal/decoder/debug.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go index d5537ed9..b59a3e57 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/debug.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/debug.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `os` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go new file mode 100644 index 00000000..ab716469 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/decoder.go @@ -0,0 +1,139 @@ +package jitdec + +import ( + `unsafe` + `encoding/json` + `reflect` + `runtime` + + `github.com/bytedance/sonic/internal/decoder/consts` + `github.com/bytedance/sonic/internal/decoder/errors` + `github.com/bytedance/sonic/internal/rt` + `github.com/bytedance/sonic/utf8` + `github.com/bytedance/sonic/option` +) + +type ( + MismatchTypeError = errors.MismatchTypeError + SyntaxError = errors.SyntaxError +) + +const ( + _F_allow_control = consts.F_allow_control + _F_copy_string = consts.F_copy_string + _F_disable_unknown = consts.F_disable_unknown + _F_disable_urc = consts.F_disable_urc + _F_use_int64 = consts.F_use_int64 + _F_use_number = consts.F_use_number + _F_validate_string = consts.F_validate_string +) + +var ( + error_wrap = errors.ErrorWrap + error_type = errors.ErrorType + error_field = errors.ErrorField + error_value = errors.ErrorValue + error_mismatch = errors.ErrorMismatch + stackOverflow = errors.StackOverflow +) + + +// Decode parses the JSON-encoded data from current position and stores the result +// in the value pointed to by val. +func Decode(s *string, i *int, f uint64, val interface{}) error { + /* validate json if needed */ + if (f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(*s){ + dbuf := utf8.CorrectWith(nil, rt.Str2Mem(*s), "\ufffd") + *s = rt.Mem2Str(dbuf) + } + + vv := rt.UnpackEface(val) + vp := vv.Value + + /* check for nil type */ + if vv.Type == nil { + return &json.InvalidUnmarshalError{} + } + + /* must be a non-nil pointer */ + if vp == nil || vv.Type.Kind() != reflect.Ptr { + return &json.InvalidUnmarshalError{Type: vv.Type.Pack()} + } + + etp := rt.PtrElem(vv.Type) + + /* check the defined pointer type for issue 379 */ + if vv.Type.IsNamed() { + newp := vp + etp = vv.Type + vp = unsafe.Pointer(&newp) + } + + /* create a new stack, and call the decoder */ + sb := newStack() + nb, err := decodeTypedPointer(*s, *i, etp, vp, sb, f) + /* return the stack back */ + *i = nb + freeStack(sb) + + /* avoid GC ahead */ + runtime.KeepAlive(vv) + return err +} + + +// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in +// order to reduce the first-hit latency. +// +// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is +// a compile option to set the depth of recursive compile for the nested struct type. +func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { + cfg := option.DefaultCompileOptions() + for _, opt := range opts { + opt(&cfg) + } + return pretouchRec(map[reflect.Type]bool{vt:true}, cfg) +} + +func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) { + /* compile function */ + compiler := newCompiler().apply(opts) + decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { + if pp, err := compiler.compile(_vt); err != nil { + return nil, err + } else { + as := newAssembler(pp) + as.name = _vt.String() + return as.Load(), nil + } + } + + /* find or compile */ + vt := rt.UnpackType(_vt) + if val := programCache.Get(vt); val != nil { + return nil, nil + } else if _, err := programCache.Compute(vt, decoder); err == nil { + return compiler.rec, nil + } else { + return nil, err + } +} + +func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error { + if opts.RecursiveDepth < 0 || len(vtm) == 0 { + return nil + } + next := make(map[reflect.Type]bool) + for vt := range(vtm) { + sub, err := pretouchType(vt, opts) + if err != nil { + return err + } + for svt := range(sub) { + next[svt] = true + } + } + opts.RecursiveDepth -= 1 + return pretouchRec(next, opts) +} + diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go similarity index 99% rename from vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go index c7514cb4..e6d5e3e8 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64.go @@ -1,4 +1,4 @@ -// +build go1.17,!go1.23 +// +build go1.17,!go1.24 /* * Copyright 2021 ByteDance Inc. @@ -16,7 +16,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding/json` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64_test.s b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s similarity index 97% rename from vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64_test.s rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s index b4b0de18..19ed3752 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/generic_regabi_amd64_test.s +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/generic_regabi_amd64_test.s @@ -1,4 +1,4 @@ -// +build go1.17,!go1.23 +// +build go1.17,!go1.24 // // Copyright 2021 ByteDance Inc. diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/pools.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go similarity index 97% rename from vendor/github.com/bytedance/sonic/internal/decoder/pools.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go index bcd14cc6..01868cb2 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/pools.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/pools.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `sync` @@ -36,6 +36,7 @@ const ( _PtrBytes = _PTR_SIZE / 8 _FsmOffset = (_MaxStack + 1) * _PtrBytes _DbufOffset = _FsmOffset + int64(unsafe.Sizeof(types.StateMachine{})) + types.MAX_RECURSE * _PtrBytes + _EpOffset = _DbufOffset + _MaxDigitNums _StackSize = unsafe.Sizeof(_Stack{}) ) @@ -53,6 +54,7 @@ type _Stack struct { mm types.StateMachine vp [types.MAX_RECURSE]unsafe.Pointer dp [_MaxDigitNums]byte + ep unsafe.Pointer } type _Decoder func( diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/primitives.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go similarity index 84% rename from vendor/github.com/bytedance/sonic/internal/decoder/primitives.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go index 1c9ce1fa..5adfc038 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/primitives.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/primitives.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding` @@ -39,6 +39,13 @@ func decodeJsonUnmarshaler(vv interface{}, s string) error { return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s)) } +func decodeJsonUnmarshalerQuoted(vv interface{}, s string) error { + if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { + return &MismatchTypeError{} + } + return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s[1:len(s)-1])) +} + func decodeTextUnmarshaler(vv interface{}, s string) error { return vv.(encoding.TextUnmarshaler).UnmarshalText(rt.Str2Mem(s)) } diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go116.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go similarity index 94% rename from vendor/github.com/bytedance/sonic/internal/decoder/stubs_go116.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go index b02eb2b8..8fa7c32f 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go116.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go116.go @@ -1,4 +1,4 @@ -// +build go1.16,!go1.20 +// +build go1.17,!go1.20 /* * Copyright 2021 ByteDance Inc. @@ -16,7 +16,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `unsafe` @@ -72,11 +72,6 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer //goland:noinspection GoUnusedParameter func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer -//go:noescape -//go:linkname growslice runtime.growslice -//goland:noinspection GoUnusedParameter -func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice - //go:linkname makemap_small runtime.makemap_small func makemap_small() unsafe.Pointer diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go similarity index 95% rename from vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go index 870e2539..a6dad26d 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/stubs_go120.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/stubs_go120.go @@ -16,7 +16,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `unsafe` @@ -72,11 +72,6 @@ func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer //goland:noinspection GoUnusedParameter func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer -//go:noescape -//go:linkname growslice reflect.growslice -//goland:noinspection GoUnusedParameter -func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice - //go:linkname makemap_small runtime.makemap_small func makemap_small() unsafe.Pointer diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/types.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go similarity index 99% rename from vendor/github.com/bytedance/sonic/internal/decoder/types.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go index 6fc0e706..c196eb5b 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/types.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/types.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `encoding` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/utils.go b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go similarity index 98% rename from vendor/github.com/bytedance/sonic/internal/decoder/utils.go rename to vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go index 23ee5d50..0a7a2028 100644 --- a/vendor/github.com/bytedance/sonic/internal/decoder/utils.go +++ b/vendor/github.com/bytedance/sonic/internal/decoder/jitdec/utils.go @@ -14,7 +14,7 @@ * limitations under the License. */ -package decoder +package jitdec import ( `unsafe` diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go new file mode 100644 index 00000000..713fb656 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compile_struct.go @@ -0,0 +1,174 @@ +package optdec + +import ( + "fmt" + "reflect" + + caching "github.com/bytedance/sonic/internal/optcaching" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/internal/resolver" +) + +const ( + _MAX_FIELDS = 50 // cutoff at 50 fields struct +) + +func (c *compiler) compileIntStringOption(vt reflect.Type) decFunc { + switch vt.Size() { + case 4: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u32StringDecoder{} + case reflect.Int: + return &i32StringDecoder{} + } + case 8: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u64StringDecoder{} + case reflect.Int: + return &i64StringDecoder{} + } + default: + panic("not supported pointer size: " + fmt.Sprint(vt.Size())) + } + panic("unreachable") +} + +func isInteger(vt reflect.Type) bool { + switch vt.Kind() { + case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint, reflect.Uintptr, reflect.Int: return true + default: return false + } +} + +func (c *compiler) assertStringOptTypes(vt reflect.Type) { + if c.depth > _CompileMaxDepth { + panic(*stackOverflow) + } + + c.depth += 1 + defer func () { + c.depth -= 1 + }() + + if isInteger(vt) { + return + } + + switch vt.Kind() { + case reflect.String, reflect.Bool, reflect.Float32, reflect.Float64: + return + case reflect.Ptr: c.assertStringOptTypes(vt.Elem()) + default: + panicForInvalidStrType(vt) + } +} + +func (c *compiler) compileFieldStringOption(vt reflect.Type) decFunc { + c.assertStringOptTypes(vt) + unmDec := c.tryCompilePtrUnmarshaler(vt, true) + if unmDec != nil { + return unmDec + } + + switch vt.Kind() { + case reflect.String: + if vt == jsonNumberType { + return &numberStringDecoder{} + } + return &strStringDecoder{} + case reflect.Bool: + return &boolStringDecoder{} + case reflect.Int8: + return &i8StringDecoder{} + case reflect.Int16: + return &i16StringDecoder{} + case reflect.Int32: + return &i32StringDecoder{} + case reflect.Int64: + return &i64StringDecoder{} + case reflect.Uint8: + return &u8StringDecoder{} + case reflect.Uint16: + return &u16StringDecoder{} + case reflect.Uint32: + return &u32StringDecoder{} + case reflect.Uint64: + return &u64StringDecoder{} + case reflect.Float32: + return &f32StringDecoder{} + case reflect.Float64: + return &f64StringDecoder{} + case reflect.Uint: + fallthrough + case reflect.Uintptr: + fallthrough + case reflect.Int: + return c.compileIntStringOption(vt) + case reflect.Ptr: + return &ptrStrDecoder{ + typ: rt.UnpackType(vt.Elem()), + deref: c.compileFieldStringOption(vt.Elem()), + } + default: + panicForInvalidStrType(vt) + return nil + } +} + +func (c *compiler) compileStruct(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + if c.namedPtr { + c.namedPtr = false + return c.compileStructBody(vt) + } + + if c.depth >= c.opts.MaxInlineDepth + 1 || (c.counts > 0 && vt.NumField() >= _MAX_FIELDS) { + return &recuriveDecoder{ + typ: rt.UnpackType(vt), + } + } else { + return c.compileStructBody(vt) + } +} + +func (c *compiler) compileStructBody(vt reflect.Type) decFunc { + fv := resolver.ResolveStruct(vt) + entries := make([]fieldEntry, 0, len(fv)) + + for _, f := range fv { + var dec decFunc + /* dealt with field tag options */ + if f.Opts&resolver.F_stringize != 0 { + dec = c.compileFieldStringOption(f.Type) + } else { + dec = c.compile(f.Type) + } + + /* deal with embedded pointer fields */ + if f.Path[0].Kind == resolver.F_deref { + dec = &embeddedFieldPtrDecoder{ + field: f, + fieldDec: dec, + fieldName: f.Name, + } + } + + entries = append(entries, fieldEntry{ + FieldMeta: f, + fieldDec: dec, + }) + } + return &structDecoder{ + fieldMap: caching.NewFieldLookup(fv), + fields: entries, + structName: vt.Name(), + typ: vt, + } +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go new file mode 100644 index 00000000..fd164af9 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/compiler.go @@ -0,0 +1,449 @@ +package optdec + +import ( + "encoding/json" + "fmt" + "reflect" + + "github.com/bytedance/sonic/option" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/internal/caching" +) + +var ( + programCache = caching.CreateProgramCache() +) + +func findOrCompile(vt *rt.GoType) (decFunc, error) { + makeDecoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { + ret, err := newCompiler().compileType(vt.Pack()) + return ret, err + } + if val := programCache.Get(vt); val != nil { + return val.(decFunc), nil + } else if ret, err := programCache.Compute(vt, makeDecoder); err == nil { + return ret.(decFunc), nil + } else { + return nil, err + } +} + +type compiler struct { + visited map[reflect.Type]bool + depth int + counts int + opts option.CompileOptions + namedPtr bool +} + +func newCompiler() *compiler { + return &compiler{ + visited: make(map[reflect.Type]bool), + opts: option.DefaultCompileOptions(), + } +} + +func (self *compiler) apply(opts option.CompileOptions) *compiler { + self.opts = opts + return self +} + +const _CompileMaxDepth = 4096 + +func (c *compiler) enter(vt reflect.Type) { + c.visited[vt] = true + c.depth += 1 + + if c.depth > _CompileMaxDepth { + panic(*stackOverflow) + } +} + +func (c *compiler) exit(vt reflect.Type) { + c.visited[vt] = false + c.depth -= 1 +} + +func (c *compiler) compileInt(vt reflect.Type) decFunc { + switch vt.Size() { + case 4: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u32Decoder{} + case reflect.Int: + return &i32Decoder{} + } + case 8: + switch vt.Kind() { + case reflect.Uint: + fallthrough + case reflect.Uintptr: + return &u64Decoder{} + case reflect.Int: + return &i64Decoder{} + } + default: + panic("not supported pointer size: " + fmt.Sprint(vt.Size())) + } + panic("unreachable") +} + +func (c *compiler) rescue(ep *error) { + if val := recover(); val != nil { + if err, ok := val.(error); ok { + *ep = err + } else { + panic(val) + } + } +} + +func (c *compiler) compileType(vt reflect.Type) (rt decFunc, err error) { + defer c.rescue(&err) + rt = c.compile(vt) + return rt, err +} + +func (c *compiler) compile(vt reflect.Type) decFunc { + if c.visited[vt] { + return &recuriveDecoder{ + typ: rt.UnpackType(vt), + } + } + + dec := c.tryCompilePtrUnmarshaler(vt, false) + if dec != nil { + return dec + } + + return c.compileBasic(vt) +} + +func (c *compiler) compileBasic(vt reflect.Type) decFunc { + defer func() { + c.counts += 1 + }() + switch vt.Kind() { + case reflect.Bool: + return &boolDecoder{} + case reflect.Int8: + return &i8Decoder{} + case reflect.Int16: + return &i16Decoder{} + case reflect.Int32: + return &i32Decoder{} + case reflect.Int64: + return &i64Decoder{} + case reflect.Uint8: + return &u8Decoder{} + case reflect.Uint16: + return &u16Decoder{} + case reflect.Uint32: + return &u32Decoder{} + case reflect.Uint64: + return &u64Decoder{} + case reflect.Float32: + return &f32Decoder{} + case reflect.Float64: + return &f64Decoder{} + case reflect.Uint: + fallthrough + case reflect.Uintptr: + fallthrough + case reflect.Int: + return c.compileInt(vt) + case reflect.String: + return c.compileString(vt) + case reflect.Array: + return c.compileArray(vt) + case reflect.Interface: + return c.compileInterface(vt) + case reflect.Map: + return c.compileMap(vt) + case reflect.Ptr: + return c.compilePtr(vt) + case reflect.Slice: + return c.compileSlice(vt) + case reflect.Struct: + return c.compileStruct(vt) + default: + panic(&json.UnmarshalTypeError{Type: vt}) + } +} + +func (c *compiler) compilePtr(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + + // specail logic for Named Ptr, issue 379 + if reflect.PtrTo(vt.Elem()) != vt { + c.namedPtr = true + return &ptrDecoder{ + typ: rt.UnpackType(vt.Elem()), + deref: c.compileBasic(vt.Elem()), + } + } + + return &ptrDecoder{ + typ: rt.UnpackType(vt.Elem()), + deref: c.compile(vt.Elem()), + } +} + +func (c *compiler) compileArray(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + return &arrayDecoder{ + len: vt.Len(), + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } +} + +func (c *compiler) compileString(vt reflect.Type) decFunc { + if vt == jsonNumberType { + return &numberDecoder{} + } + return &stringDecoder{} + +} + +func (c *compiler) tryCompileSliceUnmarshaler(vt reflect.Type) decFunc { + pt := reflect.PtrTo(vt.Elem()) + if pt.Implements(jsonUnmarshalerType) { + return &sliceDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + + if pt.Implements(encodingTextUnmarshalerType) { + return &sliceDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + return nil +} + +func (c *compiler) compileSlice(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + + // Some common slice, use a decoder, to avoid function calls + et := rt.UnpackType(vt.Elem()) + + /* first checking `[]byte` */ + if et.Kind() == reflect.Uint8 /* []byte */ { + return c.compileSliceBytes(vt) + } + + dec := c.tryCompileSliceUnmarshaler(vt) + if dec != nil { + return dec + } + + if vt == reflect.TypeOf([]interface{}{}) { + return &sliceEfaceDecoder{} + } + if et.IsInt32() { + return &sliceI32Decoder{} + } + if et.IsInt64() { + return &sliceI64Decoder{} + } + if et.IsUint32() { + return &sliceU32Decoder{} + } + if et.IsUint64() { + return &sliceU64Decoder{} + } + if et.Kind() == reflect.String { + return &sliceStringDecoder{} + } + + return &sliceDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } +} + +func (c *compiler) compileSliceBytes(vt reflect.Type) decFunc { + ep := reflect.PtrTo(vt.Elem()) + + if ep.Implements(jsonUnmarshalerType) { + return &sliceBytesUnmarshalerDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + + if ep.Implements(encodingTextUnmarshalerType) { + return &sliceBytesUnmarshalerDecoder{ + elemType: rt.UnpackType(vt.Elem()), + elemDec: c.compile(vt.Elem()), + typ: vt, + } + } + + return &sliceBytesDecoder{} +} + +func (c *compiler) compileInterface(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + if vt.NumMethod() == 0 { + return &efaceDecoder{} + } + + if vt.Implements(jsonUnmarshalerType) { + return &unmarshalJSONDecoder{ + typ: rt.UnpackType(vt), + } + } + + if vt.Implements(encodingTextUnmarshalerType) { + return &unmarshalTextDecoder{ + typ: rt.UnpackType(vt), + } + } + + return &ifaceDecoder{ + typ: rt.UnpackType(vt), + } +} + +func (c *compiler) compileMap(vt reflect.Type) decFunc { + c.enter(vt) + defer c.exit(vt) + // check the key unmarshaler at first + decKey := tryCompileKeyUnmarshaler(vt) + if decKey != nil { + return &mapDecoder{ + mapType: rt.MapType(rt.UnpackType(vt)), + keyDec: decKey, + elemDec: c.compile(vt.Elem()), + } + } + + // Most common map, use a decoder, to avoid function calls + if vt == reflect.TypeOf(map[string]interface{}{}) { + return &mapEfaceDecoder{} + } else if vt == reflect.TypeOf(map[string]string{}) { + return &mapStringDecoder{} + } + + // Some common integer map later + mt := rt.MapType(rt.UnpackType(vt)) + + if mt.Key.Kind() == reflect.String { + return &mapStrKeyDecoder{ + mapType: mt, + assign: rt.GetMapStrAssign(vt), + elemDec: c.compile(vt.Elem()), + } + } + + if mt.Key.IsInt64() { + return &mapI64KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap64Assign(vt), + } + } + + if mt.Key.IsInt32() { + return &mapI32KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap32Assign(vt), + } + } + + if mt.Key.IsUint64() { + return &mapU64KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap64Assign(vt), + } + } + + if mt.Key.IsUint32() { + return &mapU32KeyDecoder{ + mapType: mt, + elemDec: c.compile(vt.Elem()), + assign: rt.GetMap32Assign(vt), + } + } + + // Generic map + return &mapDecoder{ + mapType: mt, + keyDec: c.compileMapKey(vt), + elemDec: c.compile(vt.Elem()), + } +} + +func tryCompileKeyUnmarshaler(vt reflect.Type) decKey { + pt := reflect.PtrTo(vt.Key()) + + /* check for `encoding.TextUnmarshaler` with pointer receiver */ + if pt.Implements(encodingTextUnmarshalerType) { + return decodeKeyTextUnmarshaler + } + + /* not support map key with `json.Unmarshaler` */ + return nil +} + +func (c *compiler) compileMapKey(vt reflect.Type) decKey { + switch vt.Key().Kind() { + case reflect.Int8: + return decodeKeyI8 + case reflect.Int16: + return decodeKeyI16 + case reflect.Uint8: + return decodeKeyU8 + case reflect.Uint16: + return decodeKeyU16 + default: + panic(&json.UnmarshalTypeError{Type: vt}) + } +} + +// maybe vt is a named type, and not a pointer receiver, see issue 379 +func (c *compiler) tryCompilePtrUnmarshaler(vt reflect.Type, strOpt bool) decFunc { + pt := reflect.PtrTo(vt) + + /* check for `json.Unmarshaler` with pointer receiver */ + if pt.Implements(jsonUnmarshalerType) { + return &unmarshalJSONDecoder{ + typ: rt.UnpackType(pt), + strOpt: strOpt, + } + } + + /* check for `encoding.TextMarshaler` with pointer receiver */ + if pt.Implements(encodingTextUnmarshalerType) { + /* TextUnmarshal not support ,strig tag */ + if strOpt { + panicForInvalidStrType(vt) + } + return &unmarshalTextDecoder{ + typ: rt.UnpackType(pt), + } + } + + return nil +} + +func panicForInvalidStrType(vt reflect.Type) { + panic(error_type(rt.UnpackType(vt))) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go new file mode 100644 index 00000000..77879faf --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/const.go @@ -0,0 +1,60 @@ +package optdec + +import "math" + +/* +Copied from sonic-rs +// JSON Value Type +const NULL: u64 = 0; +const BOOL: u64 = 2; +const FALSE: u64 = BOOL; +const TRUE: u64 = (1 << 3) | BOOL; +const NUMBER: u64 = 3; +const UINT: u64 = NUMBER; +const SINT: u64 = (1 << 3) | NUMBER; +const REAL: u64 = (2 << 3) | NUMBER; +const RAWNUMBER: u64 = (3 << 3) | NUMBER; +const STRING: u64 = 4; +const STRING_COMMON: u64 = STRING; +const STRING_HASESCAPED: u64 = (1 << 3) | STRING; +const OBJECT: u64 = 6; +const ARRAY: u64 = 7; + +/// JSON Type Mask +const POS_MASK: u64 = (!0) << 32; +const POS_BITS: u64 = 32; +const TYPE_MASK: u64 = 0xFF; +const TYPE_BITS: u64 = 8; + +*/ + +const ( + // BasicType: 3 bits + KNull = 0 // xxxxx000 + KBool = 2 // xxxxx010 + KNumber = 3 // xxxxx011 + KString = 4 // xxxxx100 + KRaw = 5 // xxxxx101 + KObject = 6 // xxxxx110 + KArray = 7 // xxxxx111 + + // SubType: 2 bits + KFalse = (0 << 3) | KBool // xxx00_010, 2 + KTrue = (1 << 3) | KBool // xxx01_010, 10 + KUint = (0 << 3) | KNumber // xxx00_011, 3 + KSint = (1 << 3) | KNumber // xxx01_011, 11 + KReal = (2 << 3) | KNumber // xxx10_011, 19 + KRawNumber = (3 << 3) | KNumber // xxx11_011, 27 + KStringCommon = KString // xxx00_100, 4 + KStringEscaped = (1 << 3) | KString // xxx01_100, 12 +) + +const ( + PosMask = math.MaxUint64 << 32 + PosBits = 32 + TypeMask = 0xFF + TypeBits = 8 + + ConLenMask = uint64(math.MaxUint32) + ConLenBits = 32 +) diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go new file mode 100644 index 00000000..93ed9b7e --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/context.go @@ -0,0 +1,3 @@ +package optdec + +type context = Context diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go new file mode 100644 index 00000000..81eed34e --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/decoder.go @@ -0,0 +1,160 @@ +package optdec + +import ( + "reflect" + "unsafe" + + "encoding/json" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" + "github.com/bytedance/sonic/internal/decoder/errors" + "github.com/bytedance/sonic/internal/decoder/consts" +) + + +type ( + MismatchTypeError = errors.MismatchTypeError + SyntaxError = errors.SyntaxError +) + +const ( + _F_allow_control = consts.F_allow_control + _F_copy_string = consts.F_copy_string + _F_disable_unknown = consts.F_disable_unknown + _F_disable_urc = consts.F_disable_urc + _F_use_int64 = consts.F_use_int64 + _F_use_number = consts.F_use_number + _F_validate_string = consts.F_validate_string +) + +type Options = consts.Options + +const ( + OptionUseInt64 = consts.OptionUseInt64 + OptionUseNumber = consts.OptionUseNumber + OptionUseUnicodeErrors = consts.OptionUseUnicodeErrors + OptionDisableUnknown = consts.OptionDisableUnknown + OptionCopyString = consts.OptionCopyString + OptionValidateString = consts.OptionValidateString +) + + +func Decode(s *string, i *int, f uint64, val interface{}) error { + vv := rt.UnpackEface(val) + vp := vv.Value + + /* check for nil type */ + if vv.Type == nil { + return &json.InvalidUnmarshalError{} + } + + /* must be a non-nil pointer */ + if vp == nil || vv.Type.Kind() != reflect.Ptr { + return &json.InvalidUnmarshalError{Type: vv.Type.Pack()} + } + + etp := rt.PtrElem(vv.Type) + + /* check the defined pointer type for issue 379 */ + if vv.Type.IsNamed() { + newp := vp + etp = vv.Type + vp = unsafe.Pointer(&newp) + } + + dec, err := findOrCompile(etp) + if err != nil { + return err + } + + /* parse into document */ + ctx, err := NewContext(*s, *i, uint64(f), etp) + defer ctx.Delete() + if ctx.Parser.Utf8Inv { + *s = ctx.Parser.Json + } + if err != nil { + goto fix_error; + } + err = dec.FromDom(vp, ctx.Root(), &ctx) + +fix_error: + err = fix_error(*s, *i, err) + + // update position at last + *i += ctx.Parser.Pos() + return err +} + +func fix_error(json string, pos int, err error) error { + if e, ok := err.(SyntaxError); ok { + return SyntaxError{ + Pos: int(e.Pos) + pos, + Src: json, + Msg: e.Msg, + } + } + + if e, ok := err.(MismatchTypeError); ok { + return &MismatchTypeError { + Pos: int(e.Pos) + pos, + Src: json, + Type: e.Type, + } + } + + return err +} + +// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in +// order to reduce the first-hit latency. +// +// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is +// a compile option to set the depth of recursive compile for the nested struct type. +func Pretouch(vt reflect.Type, opts ...option.CompileOption) error { + cfg := option.DefaultCompileOptions() + for _, opt := range opts { + opt(&cfg) + } + return pretouchRec(map[reflect.Type]bool{vt:true}, cfg) +} + +func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) { + /* compile function */ + compiler := newCompiler().apply(opts) + decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) { + if f, err := compiler.compileType(_vt); err != nil { + return nil, err + } else { + return f, nil + } + } + + /* find or compile */ + vt := rt.UnpackType(_vt) + if val := programCache.Get(vt); val != nil { + return nil, nil + } else if _, err := programCache.Compute(vt, decoder); err == nil { + return compiler.visited, nil + } else { + return nil, err + } +} + +func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error { + if opts.RecursiveDepth < 0 || len(vtm) == 0 { + return nil + } + next := make(map[reflect.Type]bool) + for vt := range(vtm) { + sub, err := pretouchType(vt, opts) + if err != nil { + return err + } + for svt := range(sub) { + next[svt] = true + } + } + opts.RecursiveDepth -= 1 + return pretouchRec(next, opts) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go new file mode 100644 index 00000000..db0af547 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/errors.go @@ -0,0 +1,73 @@ +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + package optdec + + import ( + "encoding/json" + "errors" + "reflect" + "strconv" + + "github.com/bytedance/sonic/internal/rt" + ) + + /** JIT Error Helpers **/ + + var stackOverflow = &json.UnsupportedValueError{ + Str: "Value nesting too deep", + Value: reflect.ValueOf("..."), + } + + func error_type(vt *rt.GoType) error { + return &json.UnmarshalTypeError{Type: vt.Pack()} + } + + func error_mismatch(node Node, ctx *context, typ reflect.Type) error { + return MismatchTypeError{ + Pos: node.Position(), + Src: ctx.Parser.Json, + Type: typ, + } + } + + func newUnmatched(pos int, vt *rt.GoType) error { + return MismatchTypeError{ + Pos: pos, + Src: "", + Type: vt.Pack(), + } + } + + func error_field(name string) error { + return errors.New("json: unknown field " + strconv.Quote(name)) + } + + func error_value(value string, vtype reflect.Type) error { + return &json.UnmarshalTypeError{ + Type: vtype, + Value: value, + } + } + + func error_syntax(pos int, src string, msg string) error { + return SyntaxError{ + Pos: pos, + Src: src, + Msg: msg, + } + } + \ No newline at end of file diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go new file mode 100644 index 00000000..2a0523d5 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/functor.go @@ -0,0 +1,281 @@ +package optdec + +import ( + "encoding/json" + "math" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/internal/resolver" +) + +type decFunc interface { + FromDom(vp unsafe.Pointer, node Node, ctx *context) error +} + +type ptrDecoder struct { + typ *rt.GoType + deref decFunc +} + +// Pointer Value is allocated in the Caller +func (d *ptrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + if *(*unsafe.Pointer)(vp) == nil { + *(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true) + } + + return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx) +} + +type embeddedFieldPtrDecoder struct { + field resolver.FieldMeta + fieldDec decFunc + fieldName string +} + +// Pointer Value is allocated in the Caller +func (d *embeddedFieldPtrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + // seek into the pointer + vp = unsafe.Pointer(uintptr(vp) - uintptr(d.field.Path[0].Size)) + for _, f := range d.field.Path { + deref := rt.UnpackType(f.Type) + vp = unsafe.Pointer(uintptr(vp) + f.Size) + if f.Kind == resolver.F_deref { + if *(*unsafe.Pointer)(vp) == nil { + *(*unsafe.Pointer)(vp) = rt.Mallocgc(deref.Size, deref, true) + } + vp = *(*unsafe.Pointer)(vp) + } + } + return d.fieldDec.FromDom(vp, node, ctx) +} + +type i8Decoder struct{} + +func (d *i8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok || ret > math.MaxInt8 || ret < math.MinInt8 { + return error_mismatch(node, ctx, int8Type) + } + + *(*int8)(vp) = int8(ret) + return nil +} + +type i16Decoder struct{} + +func (d *i16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok || ret > math.MaxInt16 || ret < math.MinInt16 { + return error_mismatch(node, ctx, int16Type) + } + + *(*int16)(vp) = int16(ret) + return nil +} + +type i32Decoder struct{} + +func (d *i32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok || ret > math.MaxInt32 || ret < math.MinInt32 { + return error_mismatch(node, ctx, int32Type) + } + + *(*int32)(vp) = int32(ret) + return nil +} + +type i64Decoder struct{} + +func (d *i64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsI64(ctx) + if !ok { + return error_mismatch(node, ctx, int64Type) + } + + *(*int64)(vp) = int64(ret) + return nil +} + +type u8Decoder struct{} + +func (d *u8Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok || ret > math.MaxUint8 { + err := error_mismatch(node, ctx, uint8Type) + return err + } + + *(*uint8)(vp) = uint8(ret) + return nil +} + +type u16Decoder struct{} + +func (d *u16Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok || ret > math.MaxUint16 { + return error_mismatch(node, ctx, uint16Type) + } + *(*uint16)(vp) = uint16(ret) + return nil +} + +type u32Decoder struct{} + +func (d *u32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok || ret > math.MaxUint32 { + return error_mismatch(node, ctx, uint32Type) + } + + *(*uint32)(vp) = uint32(ret) + return nil +} + +type u64Decoder struct{} + +func (d *u64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsU64(ctx) + if !ok { + return error_mismatch(node, ctx, uint64Type) + } + + *(*uint64)(vp) = uint64(ret) + return nil +} + +type f32Decoder struct{} + +func (d *f32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsF64(ctx) + if !ok || ret > math.MaxFloat32 || ret < -math.MaxFloat32 { + return error_mismatch(node, ctx, float32Type) + } + + *(*float32)(vp) = float32(ret) + return nil +} + +type f64Decoder struct{} + +func (d *f64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsF64(ctx) + if !ok { + return error_mismatch(node, ctx, float64Type) + } + + *(*float64)(vp) = float64(ret) + return nil +} + +type boolDecoder struct { +} + +func (d *boolDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsBool() + if !ok { + return error_mismatch(node, ctx, boolType) + } + + *(*bool)(vp) = bool(ret) + return nil +} + +type stringDecoder struct { +} + +func (d *stringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + ret, ok := node.AsStr(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + *(*string)(vp) = ret + return nil +} + +type numberDecoder struct { +} + +func (d *numberDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + num, ok := node.AsNumber(ctx) + if !ok { + return error_mismatch(node, ctx, jsonNumberType) + } + *(*json.Number)(vp) = num + return nil +} + +type recuriveDecoder struct { + typ *rt.GoType +} + +func (d *recuriveDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + dec, err := findOrCompile(d.typ) + if err != nil { + return err + } + return dec.FromDom(vp, node, ctx) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go new file mode 100644 index 00000000..1d76f805 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/helper.go @@ -0,0 +1,101 @@ +package optdec + +import ( + "encoding/json" + "strconv" + + "github.com/bytedance/sonic/internal/native" + "github.com/bytedance/sonic/internal/native/types" +) + + +func SkipNumberFast(json string, start int) (int, error) { + // find the number ending, we pasred in sonic-cpp, it alway valid + pos := start + for pos < len(json) && json[pos] != ']' && json[pos] != '}' && json[pos] != ',' { + if json[pos] >= '0' && json[pos] <= '9' || json[pos] == '.' || json[pos] == '-' || json[pos] == '+' || json[pos] == 'e' || json[pos] == 'E' { + pos += 1 + } else { + return pos, error_syntax(pos, json, "invalid number") + } + } + return pos, nil +} + +func ValidNumberFast(json string) error { + // find the number ending, we pasred in sonic-cpp, it alway valid + pos := 0 + for pos < len(json) && json[pos] != ']' && json[pos] != '}' && json[pos] != ',' { + if json[pos] >= '0' && json[pos] <= '9' || json[pos] == '.' || json[pos] == '-' || json[pos] == '+' || json[pos] == 'e' || json[pos] == 'E' { + pos += 1 + } else { + return error_syntax(pos, json, "invalid number") + } + } + + if pos == 0 { + return error_syntax(pos, json, "invalid number") + } + return nil +} + +func SkipOneFast2(json string, pos *int) (int, error) { + // find the number ending, we pasred in sonic-cpp, it alway valid + start := native.SkipOneFast(&json, pos) + if start < 0 { + return -1, error_syntax(*pos, json, types.ParsingError(-start).Error()) + } + return start, nil +} + +func SkipOneFast(json string, pos int) (string, error) { + // find the number ending, we pasred in sonic-cpp, it alway valid + start := native.SkipOneFast(&json, &pos) + if start < 0 { + // TODO: details error code + return "", error_syntax(pos, json, types.ParsingError(-start).Error()) + } + return json[start:pos], nil +} + +func ParseI64(raw string) (int64, error) { + i64, err := strconv.ParseInt(raw, 10, 64) + if err != nil { + return 0, err + } + return i64, nil +} + +func ParseBool(raw string) (bool, error) { + var b bool + err := json.Unmarshal([]byte(raw), &b) + if err != nil { + return false, err + } + return b, nil +} + +func ParseU64(raw string) (uint64, error) { + u64, err := strconv.ParseUint(raw, 10, 64) + if err != nil { + return 0, err + } + return u64, nil +} + +func ParseF64(raw string) (float64, error) { + f64, err := strconv.ParseFloat(raw, 64) + if err != nil { + return 0, err + } + return f64, nil +} + +func Unquote(raw string) (string, error) { + var u string + err := json.Unmarshal([]byte(raw), &u) + if err != nil { + return "", err + } + return u, nil +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go new file mode 100644 index 00000000..0c063d55 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/interface.go @@ -0,0 +1,169 @@ +package optdec + +import ( + "encoding" + "encoding/json" + "unsafe" + "reflect" + + "github.com/bytedance/sonic/internal/rt" +) + +type efaceDecoder struct { +} + +func (d *efaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*interface{})(vp) = interface{}(nil) + return nil + } + + eface := *(*rt.GoEface)(vp) + + // not pointer type, or nil pointer, or *interface{} + if eface.Value == nil || eface.Type.Kind() != reflect.Ptr || rt.PtrElem(eface.Type) == anyType { + ret, err := node.AsEface(ctx) + if err != nil { + return err + } + + *(*interface{})(vp) = ret + return nil + } + + etp := rt.PtrElem(eface.Type) + vp = eface.Value + + /* check the defined pointer type for issue 379 */ + if eface.Type.IsNamed() { + newp := vp + etp = eface.Type + vp = unsafe.Pointer(&newp) + } + + dec, err := findOrCompile(etp) + if err != nil { + return err + } + + return dec.FromDom(vp, node, ctx) +} + +type ifaceDecoder struct { + typ *rt.GoType +} + +func (d *ifaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + iface := *(*rt.GoIface)(vp) + if iface.Itab == nil { + return error_type(d.typ) + } + + vt := iface.Itab.Vt + + // not pointer type, or nil pointer, or *interface{} + if vp == nil || vt.Kind() != reflect.Ptr || rt.PtrElem(vt) == anyType { + ret, err := node.AsEface(ctx) + if err != nil { + return err + } + + *(*interface{})(vp) = ret + return nil + } + + + etp := rt.PtrElem(vt) + vp = iface.Value + + /* check the defined pointer type for issue 379 */ + if vt.IsNamed() { + newp := vp + etp = vt + vp = unsafe.Pointer(&newp) + } + + dec, err := findOrCompile(etp) + if err != nil { + return err + } + + return dec.FromDom(vp, node, ctx) +} + +type unmarshalTextDecoder struct { + typ *rt.GoType +} + +func (d *unmarshalTextDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + txt, ok := node.AsStringText(ctx) + if !ok { + return error_mismatch(node, ctx, d.typ.Pack()) + } + + v := *(*interface{})(unsafe.Pointer(&rt.GoEface{ + Type: d.typ, + Value: vp, + })) + + // fast path + if u, ok := v.(encoding.TextUnmarshaler); ok { + return u.UnmarshalText(txt) + } + + // slow path + rv := reflect.ValueOf(v) + if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok { + return u.UnmarshalText(txt) + } + + return error_type(d.typ) +} + +type unmarshalJSONDecoder struct { + typ *rt.GoType + strOpt bool +} + +func (d *unmarshalJSONDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + v := *(*interface{})(unsafe.Pointer(&rt.GoEface{ + Type: d.typ, + Value: vp, + })) + + var input []byte + if d.strOpt && node.IsNull() { + input = []byte("null") + } else if d.strOpt { + s, ok := node.AsStringText(ctx) + if !ok { + return error_mismatch(node, ctx, d.typ.Pack()) + } + input = s + } else { + input = []byte(node.AsRaw(ctx)) + } + + // fast path + if u, ok := v.(json.Unmarshaler); ok { + return u.UnmarshalJSON((input)) + } + + // slow path + rv := reflect.ValueOf(v) + if u, ok := rv.Interface().(json.Unmarshaler); ok { + return u.UnmarshalJSON(input) + } + + return error_type(d.typ) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go new file mode 100644 index 00000000..1a2bda8f --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/map.go @@ -0,0 +1,430 @@ +package optdec + +import ( + "encoding" + "encoding/json" + "math" + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +/** Decoder for most common map types: map[string]interface{}, map[string]string **/ + +type mapEfaceDecoder struct { +} + +func (d *mapEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*map[string]interface{})(vp) = nil + return nil + } + + return node.AsMapEface(ctx, vp) +} + +type mapStringDecoder struct { +} + +func (d *mapStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*map[string]string)(vp) = nil + return nil + } + + return node.AsMapString(ctx, vp) +} + +/** Decoder for map with string key **/ + +type mapStrKeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.MapStrAssign + typ reflect.Type +} + +func (d *mapStrKeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + key, _ := keyn.AsStr(ctx) + + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, key) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +/** Decoder for map with int32 or int64 key **/ + +type mapI32KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map32Assign +} + +func (d *mapI32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + next := obj.Children() + var gerr error + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + k, ok := keyn.ParseI64(ctx) + if !ok || k > math.MaxInt32 || k < math.MinInt32 { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + key := int32(k) + ku32 := *(*uint32)(unsafe.Pointer(&key)) + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, ku32) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +type mapI64KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map64Assign +} + +func (d *mapI64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + key, ok := keyn.ParseI64(ctx) + + if !ok { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + ku64 := *(*uint64)(unsafe.Pointer(&key)) + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, ku64) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +/** Decoder for map with unt32 or uint64 key **/ + +type mapU32KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map32Assign +} + +func (d *mapU32KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + k, ok := keyn.ParseU64(ctx) + if !ok || k > math.MaxUint32 { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + key := uint32(k) + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, key) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +type mapU64KeyDecoder struct { + mapType *rt.GoMapType + elemDec decFunc + assign rt.Map64Assign +} + +func (d *mapU64KeyDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + var gerr error + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + key, ok := keyn.ParseU64(ctx) + if !ok { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + valn := NewNode(PtrOffset(next, 1)) + valp := d.assign(d.mapType, m, key) + err := d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} + +/** Decoder for generic cases */ + +type decKey func(dec *mapDecoder, raw string, ctx *context) (interface{}, error) + +func decodeKeyU8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseU64(key) + if err != nil { + return nil, err + } + if ret > math.MaxUint8 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return uint8(ret), nil +} + +func decodeKeyU16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseU64(key) + if err != nil { + return nil, err + } + if ret > math.MaxUint16 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return uint16(ret), nil +} + +func decodeKeyI8(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseI64(key) + if err != nil { + return nil, err + } + if ret > math.MaxInt8 || ret < math.MinInt8 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return int8(ret), nil +} + +func decodeKeyI16(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret, err := ParseI64(key) + if err != nil { + return nil, err + } + if ret > math.MaxInt16 || ret < math.MinInt16 { + return nil, error_value(key, dec.mapType.Key.Pack()) + } + return int16(ret), nil +} + +func decodeKeyJSONUnmarshaler(dec *mapDecoder, raw string, _ *context) (interface{}, error) { + ret := reflect.New(dec.mapType.Key.Pack()).Interface() + err := ret.(json.Unmarshaler).UnmarshalJSON([]byte(raw)) + if err != nil { + return nil, err + } + return ret, nil +} + +func decodeKeyTextUnmarshaler(dec *mapDecoder, raw string, ctx *context) (interface{}, error) { + key, err := Unquote(raw) + if err != nil { + return nil, err + } + ret := reflect.New(dec.mapType.Key.Pack()).Interface() + err = ret.(encoding.TextUnmarshaler).UnmarshalText([]byte(key)) + if err != nil { + return nil, err + } + return ret, nil +} + +type mapDecoder struct { + mapType *rt.GoMapType + keyDec decKey + elemDec decFunc +} + +func (d *mapDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.mapType.Pack()) + } + + // allocate map + m := *(*unsafe.Pointer)(vp) + if m == nil { + m = rt.Makemap(&d.mapType.GoType, obj.Len()) + } + + next := obj.Children() + var gerr error + for i := 0; i < obj.Len(); i++ { + keyn := NewNode(next) + raw := keyn.AsRaw(ctx) + key, err := d.keyDec(d, raw, ctx) + if err != nil { + if gerr == nil { + gerr = error_mismatch(keyn, ctx, d.mapType.Pack()) + } + valn := NewNode(PtrOffset(next, 1)) + next = valn.Next() + continue + } + + valn := NewNode(PtrOffset(next, 1)) + keyp := rt.UnpackEface(key).Value + valp := rt.Mapassign(d.mapType, m, keyp) + err = d.elemDec.FromDom(valp, valn, ctx) + if gerr == nil && err != nil { + gerr = err + } + + next = valn.Next() + } + + *(*unsafe.Pointer)(vp) = m + return gerr +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go new file mode 100644 index 00000000..29a0136a --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/native.go @@ -0,0 +1,269 @@ +package optdec + +import ( + "fmt" + "reflect" + "unsafe" + + "sync" + + "github.com/bytedance/sonic/internal/native" + "github.com/bytedance/sonic/internal/native/types" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/utf8" +) + + +type ErrorCode int + +const ( + SONIC_OK = 0; + SONIC_CONTROL_CHAR = 1; + SONIC_INVALID_ESCAPED = 2; + SONIC_INVALID_NUM = 3; + SONIC_FLOAT_INF = 4; + SONIC_EOF = 5; + SONIC_INVALID_CHAR = 6; + SONIC_EXPECT_KEY = 7; + SONIC_EXPECT_COLON = 8; + SONIC_EXPECT_OBJ_COMMA_OR_END = 9; + SONIC_EXPECT_ARR_COMMA_OR_END = 10; + SONIC_VISIT_FAILED = 11; + SONIC_INVALID_ESCAPED_UTF = 12; + SONIC_INVALID_LITERAL = 13; + SONIC_STACK_OVERFLOW = 14; +) + +var ParsingErrors = []string{ + SONIC_OK : "ok", + SONIC_CONTROL_CHAR : "control chars in string", + SONIC_INVALID_ESCAPED : "invalid escaped chars in string", + SONIC_INVALID_NUM : "invalid number", + SONIC_FLOAT_INF : "float infinity", + SONIC_EOF : "eof", + SONIC_INVALID_CHAR : "invalid chars", + SONIC_EXPECT_KEY : "expect a json key", + SONIC_EXPECT_COLON : "expect a `:`", + SONIC_EXPECT_OBJ_COMMA_OR_END : "expect a `,` or `}`", + SONIC_EXPECT_ARR_COMMA_OR_END : "expect a `,` or `]`", + SONIC_VISIT_FAILED : "failed in json visitor", + SONIC_INVALID_ESCAPED_UTF : "invalid escaped unicodes", + SONIC_INVALID_LITERAL : "invalid literal(true/false/null)", + SONIC_STACK_OVERFLOW : "json is exceeded max depth 4096, cause stack overflow", +} + +func (code ErrorCode) Error() string { + return ParsingErrors[code] +} + +type node struct { + typ uint64 + val uint64 +} + +// should consitent with native/parser.c +type _nospaceBlock struct { + _ [8]byte + _ [8]byte +} + +// should consitent with native/parser.c +type nodeBuf struct { + ncur uintptr + parent int64 + depth uint64 + nstart uintptr + nend uintptr + stat jsonStat +} + +func (self *nodeBuf) init(nodes []node) { + self.ncur = uintptr(unsafe.Pointer(&nodes[0])) + self.nstart = self.ncur + self.nend = self.ncur + uintptr(cap(nodes)) * unsafe.Sizeof(node{}) + self.parent = -1 +} + +// should consitent with native/parser.c +type Parser struct { + Json string + padded []byte + nodes []node + dbuf []byte + backup []node + + options uint64 + // JSON cursor + start uintptr + cur uintptr + end uintptr + _nbk _nospaceBlock + + // node buffer cursor + nbuf nodeBuf + Utf8Inv bool + isEface bool +} + +// only when parse non-empty object/array are needed. +type jsonStat struct { + object uint32 + array uint32 + str uint32 + number uint32 + array_elems uint32 + object_keys uint32 + max_depth uint32 +} + + +var ( + defaultJsonPaddedCap uintptr = 1 << 20 // 1 Mb + defaultNodesCap uintptr = (1 << 20) / unsafe.Sizeof(node{}) // 1 Mb +) + +var parsePool sync.Pool = sync.Pool { + New: func () interface{} { + return &Parser{ + options: 0, + padded: make([]byte, 0, defaultJsonPaddedCap), + nodes: make([]node, defaultNodesCap, defaultNodesCap), + dbuf: make([]byte, types.MaxDigitNums, types.MaxDigitNums), + } + }, +} + +var padding string = "x\"x\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + +func newParser(data string, pos int, opt uint64) *Parser { + p := parsePool.Get().(*Parser) + + /* validate json if needed */ + if (opt & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(data){ + dbuf := utf8.CorrectWith(nil, rt.Str2Mem(data[pos:]), "\ufffd") + dbuf = append(dbuf, padding...) + p.Json = rt.Mem2Str(dbuf[:len(dbuf) - len(padding)]) + p.Utf8Inv = true + p.start = uintptr((*rt.GoString)(unsafe.Pointer(&p.Json)).Ptr) + } else { + p.Json = data + // TODO: prevent too large JSON + p.padded = append(p.padded, data[pos:]...) + p.padded = append(p.padded, padding...) + p.start = uintptr((*rt.GoSlice)(unsafe.Pointer(&p.padded)).Ptr) + } + + p.cur = p.start + p.end = p.cur + uintptr(len(p.Json)) + p.options = opt + p.nbuf.init(p.nodes) + return p +} + + +func (p *Parser) Pos() int { + return int(p.cur - p.start) +} + +func (p *Parser) JsonBytes() []byte { + if p.Utf8Inv { + return (rt.Str2Mem(p.Json)) + } else { + return p.padded + } +} + +var nodeType = rt.UnpackType(reflect.TypeOf(node{})) + +//go:inline +func calMaxNodeCap(jsonSize int) int { + return jsonSize / 2 + 2 +} + +func (p *Parser) parse() ErrorCode { + // when decode into struct, we should decode number as possible + old := p.options + if !p.isEface { + p.options &^= 1 << _F_use_number + } + + // fast path with limited node buffer + err := ErrorCode(native.ParseWithPadding(unsafe.Pointer(p))) + if err != SONIC_VISIT_FAILED { + p.options = old + return err + } + + // check OoB here + offset := p.nbuf.ncur - p.nbuf.nstart + curLen := offset / unsafe.Sizeof(node{}) + if curLen != uintptr(len(p.nodes)) { + panic(fmt.Sprintf("current len: %d, real len: %d cap: %d", curLen, len(p.nodes), cap(p.nodes))) + } + + // node buf is not enough, continue parse + // the maxCap is always meet all valid JSON + maxCap := calMaxNodeCap(len(p.Json)) + slice := rt.GoSlice{ + Ptr: rt.Mallocgc(uintptr(maxCap) * nodeType.Size, nodeType, false), + Len: maxCap, + Cap: maxCap, + } + rt.Memmove(unsafe.Pointer(slice.Ptr), unsafe.Pointer(&p.nodes[0]), offset) + p.backup = p.nodes + p.nodes = *(*[]node)(unsafe.Pointer(&slice)) + + // update node cursor + p.nbuf.nstart = uintptr(unsafe.Pointer(&p.nodes[0])) + p.nbuf.nend = p.nbuf.nstart + uintptr(cap(p.nodes)) * unsafe.Sizeof(node{}) + p.nbuf.ncur = p.nbuf.nstart + offset + + // continue parse json + err = ErrorCode(native.ParseWithPadding(unsafe.Pointer(p))) + p.options = old + return err +} + +func (p *Parser) reset() { + p.options = 0 + p.padded = p.padded[:0] + // nodes is too large here, we will not reset it and use small backup nodes buffer + if p.backup != nil { + p.nodes = p.backup + p.backup = nil + } + p.start = 0 + p.cur = 0 + p.end = 0 + p.Json = "" + p.nbuf = nodeBuf{} + p._nbk = _nospaceBlock{} + p.Utf8Inv = false + p.isEface = false +} + +func (p *Parser) free() { + p.reset() + parsePool.Put(p) +} + +//go:noinline +func (p *Parser) fixError(code ErrorCode) error { + if code == SONIC_OK { + return nil + } + + if p.Pos() == 0 { + code = SONIC_EOF; + } + + pos := p.Pos() - 1 + return error_syntax(pos, p.Json, ParsingErrors[code]) +} + +func Parse(data string, opt uint64) error { + p := newParser(data, 0, opt) + err := p.parse() + p.free() + return err +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go new file mode 100644 index 00000000..8b49ebb3 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/node.go @@ -0,0 +1,1279 @@ +package optdec + +import ( + "encoding/json" + "math" + "unsafe" + + "github.com/bytedance/sonic/internal/envs" + "github.com/bytedance/sonic/internal/rt" +) + +type Context struct { + Parser *Parser + efacePool *efacePool + Stack bounedStack + Utf8Inv bool +} + +func (ctx *Context) Options() uint64 { + return ctx.Parser.options +} + +/************************* Stack and Pool Helper *******************/ + +type parentStat struct { + con unsafe.Pointer + remain uint64 +} +type bounedStack struct { + stack []parentStat + index int +} + +func newStack(size int) bounedStack { + return bounedStack{ + stack: make([]parentStat, size + 2), + index: 0, + } +} + +//go:nosplit +func (s *bounedStack) Pop() (unsafe.Pointer, int, bool){ + s.index-- + con := s.stack[s.index].con + remain := s.stack[s.index].remain &^ (uint64(1) << 63) + isObj := (s.stack[s.index].remain & (uint64(1) << 63)) != 0 + s.stack[s.index].con = nil + s.stack[s.index].remain = 0 + return con, int(remain), isObj +} + +//go:nosplit +func (s *bounedStack) Push(p unsafe.Pointer, remain int, isObj bool) { + s.stack[s.index].con = p + s.stack[s.index].remain = uint64(remain) + if isObj { + s.stack[s.index].remain |= (uint64(1) << 63) + } + s.index++ +} + +type efacePool struct{ + t64 rt.T64Pool + tslice rt.TslicePool + tstring rt.TstringPool + efaceSlice rt.SlicePool +} + +func newEfacePool(stat *jsonStat, useNumber bool) *efacePool { + strs := int(stat.str) + nums := 0 + if useNumber { + strs += int(stat.number) + } else { + nums = int(stat.number) + } + + return &efacePool{ + t64: rt.NewT64Pool(nums), + tslice: rt.NewTslicePool(int(stat.array)), + tstring: rt.NewTstringPool(strs), + efaceSlice: rt.NewPool(rt.AnyType, int(stat.array_elems)), + } +} + +func (self *efacePool) GetMap(hint int) unsafe.Pointer { + m := make(map[string]interface{}, hint) + return *(*unsafe.Pointer)(unsafe.Pointer(&m)) +} + +func (self *efacePool) GetSlice(hint int) unsafe.Pointer { + return unsafe.Pointer(self.efaceSlice.GetSlice(hint)) +} + +func (self *efacePool) ConvTSlice(val rt.GoSlice, typ *rt.GoType, dst unsafe.Pointer) { + self.tslice.Conv(val, typ, (*interface{})(dst)) +} + +func (self *efacePool) ConvF64(val float64, dst unsafe.Pointer) { + self.t64.Conv(castU64(val), rt.Float64Type, (*interface{})(dst)) +} + +func (self *efacePool) ConvTstring(val string, dst unsafe.Pointer) { + self.tstring.Conv(val, (*interface{})(dst)) +} + +func (self *efacePool) ConvTnum(val json.Number, dst unsafe.Pointer) { + self.tstring.ConvNum(val, (*interface{})(dst)) +} + +/********************************************************/ + +func canUseFastMap( opts uint64, root *rt.GoType) bool { + return envs.UseFastMap && (opts & (1 << _F_copy_string)) == 0 && (opts & (1 << _F_use_int64)) == 0 && (root == rt.AnyType || root == rt.MapEfaceType || root == rt.SliceEfaceType) +} + +func NewContext(json string, pos int, opts uint64, root *rt.GoType) (Context, error) { + ctx := Context{ + Parser: newParser(json, pos, opts), + } + if root == rt.AnyType || root == rt.MapEfaceType || root == rt.SliceEfaceType { + ctx.Parser.isEface = true + } + + ecode := ctx.Parser.parse() + + if ecode != 0 { + return ctx, ctx.Parser.fixError(ecode) + } + + useNumber := (opts & (1 << _F_use_number )) != 0 + if canUseFastMap(opts, root) { + ctx.efacePool = newEfacePool(&ctx.Parser.nbuf.stat, useNumber) + ctx.Stack = newStack(int(ctx.Parser.nbuf.stat.max_depth)) + } + + return ctx, nil +} + +func (ctx *Context) Delete() { + ctx.Parser.free() + ctx.Parser = nil +} + +type Node struct { + cptr uintptr +} + +func NewNode(cptr uintptr) Node { + return Node{cptr: cptr} +} + +type Dom struct { + cdom uintptr +} + +func (ctx *Context) Root() Node { + root := (uintptr)(((*rt.GoSlice)(unsafe.Pointer(&ctx.Parser.nodes))).Ptr) + return Node{cptr: root} +} + +type Array struct { + cptr uintptr +} + +type Object struct { + cptr uintptr +} + +func (obj Object) Len() int { + cobj := ptrCast(obj.cptr) + return int(uint64(cobj.val) & ConLenMask) +} + +func (arr Array) Len() int { + carr := ptrCast(arr.cptr) + return int(uint64(carr.val) & ConLenMask) +} + +// / Helper functions to eliminate CGO calls +func (val Node) Type() uint8 { + ctype := ptrCast(val.cptr) + return uint8(ctype.typ & TypeMask) +} + +func (val Node) Next() uintptr { + if val.Type() != KObject && val.Type() != KArray { + return PtrOffset(val.cptr, 1) + } + cobj := ptrCast(val.cptr) + offset := int64(uint64(cobj.val) >> ConLenBits) + return PtrOffset(val.cptr, offset) +} + +func (val *Node) next() { + *val = NewNode(val.Next()) +} + +type NodeIter struct { + next uintptr +} + +func NewNodeIter(node Node) NodeIter { + return NodeIter{next: node.cptr} +} + +func (iter *NodeIter) Next() Node { + ret := NewNode(iter.next) + iter.next = PtrOffset(iter.next, 1) + return ret +} + + +func (iter *NodeIter) Peek() Node { + return NewNode(iter.next) +} + +func (val Node) U64() uint64 { + cnum := ptrCast(val.cptr) + return *(*uint64)((unsafe.Pointer)(&(cnum.val))) +} + +func (val Node) I64() int64 { + cnum := ptrCast(val.cptr) + return *(*int64)((unsafe.Pointer)(&(cnum.val))) +} + +func (val Node) IsNull() bool { + return val.Type() == KNull +} + +func (val Node) IsNumber() bool { + return val.Type() & KNumber != 0 +} + +func (val Node) F64() float64 { + cnum := ptrCast(val.cptr) + return *(*float64)((unsafe.Pointer)(&(cnum.val))) +} + +func (val Node) Bool() bool { + return val.Type() == KTrue +} + +func (self Node) AsU64(ctx *Context) (uint64, bool) { + if self.Type() == KUint { + return self.U64(), true + } else if self.Type() == KRawNumber { + num, err := ParseU64(self.Raw(ctx)) + if err != nil { + return 0, false + } + return num, true + } else { + return 0, false + } +} + +func (val *Node) AsObj() (Object, bool) { + var ret Object + if val.Type() != KObject { + return ret, false + } + return Object{ + cptr: val.cptr, + }, true +} + +func (val Node) Obj() Object { + return Object{cptr: val.cptr} +} + +func (val Node) Arr() Array { + return Array{cptr: val.cptr} +} + +func (val *Node) AsArr() (Array, bool) { + var ret Array + if val.Type() != KArray { + return ret, false + } + return Array{ + cptr: val.cptr, + }, true +} + +func (self Node) AsI64(ctx *Context) (int64, bool) { + typ := self.Type() + if typ == KUint && self.U64() <= math.MaxInt64 { + return int64(self.U64()), true + } else if typ == KSint { + return self.I64(), true + } else if typ == KRawNumber { + val, err := self.Number(ctx).Int64() + if err != nil { + return 0, false + } + return val, true + } else { + return 0, false + } +} + +/********* Parse Node String into Value ***************/ + +func (val Node) ParseI64(ctx *Context) (int64, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return 0, false + } + + if s == "null" { + return 0, true + } + + i, err := ParseI64(s) + if err != nil { + return 0, false + } + return i, true +} + +func (val Node) ParseBool(ctx *Context) (bool, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return false, false + } + + if s == "null" { + return false, true + } + + b, err := ParseBool(s) + if err != nil { + return false, false + } + return b, true +} + +func (val Node) ParseU64(ctx *Context) (uint64, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return 0, false + } + + if s == "null" { + return 0, true + } + + i, err := ParseU64(s) + if err != nil { + return 0, false + } + return i, true +} + +func (val Node) ParseF64(ctx *Context) (float64, bool) { + s, ok := val.AsStrRef(ctx) + if !ok { + return 0, false + } + + if s == "null" { + return 0, true + } + + i, err := ParseF64(s) + if err != nil { + return 0, false + } + return i, true +} + +func (val Node) ParseString(ctx *Context) (string, bool) { + // shoud not use AsStrRef + s, ok := val.AsStr(ctx) + if !ok { + return "", false + } + + if s == "null" { + return "", true + } + + s, err := Unquote(s) + if err != nil { + return "", false + } + return s, true +} + + +func (val Node) ParseNumber(ctx *Context) (json.Number, bool) { + // shoud not use AsStrRef + s, ok := val.AsStr(ctx) + if !ok { + return json.Number(""), false + } + + if s == "null" { + return json.Number(""), true + } + + end, err := SkipNumberFast(s, 0) + // has error or trailing chars + if err != nil || end != len(s) { + return json.Number(""), false + } + return json.Number(s), true +} + + + +func (val Node) AsF64(ctx *Context) (float64, bool) { + switch val.Type() { + case KUint: return float64(val.U64()), true + case KSint: return float64(val.I64()), true + case KReal: return float64(val.F64()), true + case KRawNumber: f, err := val.Number(ctx).Float64(); return f, err == nil + default: return 0, false + } +} + +func (val Node) AsBool() (bool, bool) { + switch val.Type() { + case KTrue: return true, true + case KFalse: return false, true + default: return false, false + } +} + +func (val Node) AsStr(ctx *Context) (string, bool) { + switch val.Type() { + case KStringCommon: + s := val.StringRef(ctx) + if (ctx.Options() & (1 << _F_copy_string) == 0) { + return s, true + } + return string(rt.Str2Mem(s)), true + case KStringEscaped: + return val.StringCopyEsc(ctx), true + default: return "", false + } +} + +func (val Node) AsStrRef(ctx *Context) (string, bool) { + switch val.Type() { + case KStringEscaped: + node := ptrCast(val.cptr) + offset := val.Position() + len := int(node.val) + return rt.Mem2Str(ctx.Parser.JsonBytes()[offset : offset + len]), true + case KStringCommon: + return val.StringRef(ctx), true + default: + return "", false + } +} + +func (val Node) AsBytesRef(ctx *Context) ([]byte, bool) { + switch val.Type() { + case KStringEscaped: + node := ptrCast(val.cptr) + offset := val.Position() + len := int(node.val) + return ctx.Parser.JsonBytes()[offset : offset + len], true + case KStringCommon: + return rt.Str2Mem(val.StringRef(ctx)), true + default: + return nil, false + } +} + +func (val Node) AsStringText(ctx *Context) ([]byte, bool) { + if !val.IsStr() { + return nil, false + } + + // clone to new bytes + s, b := val.AsStrRef(ctx) + return []byte(s), b +} + +func (val Node) IsStr() bool { + return (val.Type() == KStringCommon) || (val.Type() == KStringEscaped) +} + +func (val Node) IsRawNumber() bool { + return val.Type() == KRawNumber +} + +func (val Node) Number(ctx *Context) json.Number { + return json.Number(val.Raw(ctx)) +} + +func (val Node) Raw(ctx *Context) string { + node := ptrCast(val.cptr) + len := int(node.val) + offset := val.Position() + return ctx.Parser.Json[offset:int(offset+len)] +} + +func (val Node) Position() int { + node := ptrCast(val.cptr) + return int(node.typ >> PosBits) +} + +func (val Node) AsNumber(ctx *Context) (json.Number, bool) { + // parse JSON string as number + if val.IsStr() { + s, _ := val.AsStr(ctx) + err := ValidNumberFast(s) + if err != nil { + return "", false + } + + return json.Number(s), true + } + + return val.NonstrAsNumber(ctx) +} + +func (val Node) NonstrAsNumber(ctx *Context) (json.Number, bool) { + // deal with raw number + if val.IsRawNumber() { + return val.Number(ctx), true + } + + // deal with parse number + if !val.IsNumber() { + return json.Number(""), false + } + + start := val.Position() + end, err := SkipNumberFast(ctx.Parser.Json, start) + if err != nil { + return "", false + } + return json.Number(ctx.Parser.Json[start:end]), true +} + +func (val Node) AsRaw(ctx *Context) string { + // fast path for unescaped strings + switch val.Type() { + case KNull: + return "null" + case KTrue: + return "true" + case KFalse: + return "false" + case KStringCommon: + node := ptrCast(val.cptr) + len := int(node.val) + offset := val.Position() + // add start abd end quote + ref := rt.Str2Mem(ctx.Parser.Json)[offset-1 : offset+len+1] + return rt.Mem2Str(ref) + case KRawNumber: fallthrough + case KRaw: return val.Raw(ctx) + case KStringEscaped: + raw, _ := SkipOneFast(ctx.Parser.Json, val.Position() - 1) + return raw + default: + raw, err := SkipOneFast(ctx.Parser.Json, val.Position()) + if err != nil { + break + } + return raw + } + panic("should always be valid json here") +} + +// reference from the input JSON as possible +func (val Node) StringRef(ctx *Context) string { + return val.Raw(ctx) +} + +//go:nocheckptr +func ptrCast(p uintptr) *node { + return (*node)(unsafe.Pointer(p)) +} + +func (val Node) StringCopyEsc(ctx *Context) string { + // check whether there are in padded + node := ptrCast(val.cptr) + len := int(node.val) + offset := val.Position() + return string(ctx.Parser.JsonBytes()[offset : offset + len]) +} + +func (val Node) Object() Object { + return Object{cptr: val.cptr} +} + +func (val Node) Array() Array { + return Array{cptr: val.cptr} +} + +func (val *Array) Children() uintptr { + return PtrOffset(val.cptr, 1) +} + +func (val *Object) Children() uintptr { + return PtrOffset(val.cptr, 1) +} + +func (val *Node) Equal(ctx *Context, lhs string) bool { + // check whether escaped + cstr := ptrCast(val.cptr) + offset := int(val.Position()) + len := int(cstr.val) + return lhs == ctx.Parser.Json[offset:offset+len] +} + +func (node *Node) AsMapEface(ctx *Context, vp unsafe.Pointer) error { + if node.IsNull() { + return nil + } + + obj, ok := node.AsObj() + if !ok { + return newUnmatched(node.Position(), rt.MapEfaceType) + } + + var err, gerr error + size := obj.Len() + + var m map[string]interface{} + if *(*unsafe.Pointer)(vp) == nil { + if ctx.efacePool != nil { + p := ctx.efacePool.GetMap(size) + m = *(*map[string]interface{})(unsafe.Pointer(&p)) + } else { + m = make(map[string]interface{}, size) + } + } else { + m = *(*map[string]interface{})(vp) + } + + next := obj.Children() + for i := 0; i < size; i++ { + knode := NewNode(next) + key, _ := knode.AsStr(ctx) + val := NewNode(PtrOffset(next, 1)) + m[key], err = val.AsEface(ctx) + next = val.cptr + if gerr == nil && err != nil { + gerr = err + } + } + + *(*map[string]interface{})(vp) = m + return gerr +} + +func (node *Node) AsMapString(ctx *Context, vp unsafe.Pointer) error { + obj, ok := node.AsObj() + if !ok { + return newUnmatched(node.Position(), rt.MapStringType) + } + + size := obj.Len() + + var m map[string]string + if *(*unsafe.Pointer)(vp) == nil { + m = make(map[string]string, size) + } else { + m = *(*map[string]string)(vp) + } + + var gerr error + next := obj.Children() + for i := 0; i < size; i++ { + knode := NewNode(next) + key, _ := knode.AsStr(ctx) + val := NewNode(PtrOffset(next, 1)) + m[key], ok = val.AsStr(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.StringType) + } + next = val.Next() + } else { + next = PtrOffset(val.cptr, 1) + } + } + + *(*map[string]string)(vp) = m + return gerr +} + +func (node *Node) AsSliceEface(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceEfaceType) + } + + size := arr.Len() + var s []interface{} + if size != 0 && ctx.efacePool != nil { + slice := rt.GoSlice { + Ptr: ctx.efacePool.GetSlice(size), + Len: size, + Cap: size, + } + *(*rt.GoSlice)(unsafe.Pointer(&s)) = slice + } else { + s = *(*[]interface{})((unsafe.Pointer)(rt.MakeSlice(vp, rt.AnyType, size))) + } + + *node = NewNode(arr.Children()) + + var err, gerr error + for i := 0; i < size; i++ { + s[i], err = node.AsEface(ctx) + if gerr == nil && err != nil { + gerr = err + } + } + + *(*[]interface{})(vp) = s + return nil +} + +func (node *Node) AsSliceI32(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceI32Type) + } + + size := arr.Len() + s := *(*[]int32)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Int32Type, size))) + next := arr.Children() + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsI64(ctx) + if !ok || ret > math.MaxInt32 || ret < math.MinInt32 { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Int32Type) + } + next = val.Next() + } else { + s[i] = int32(ret) + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]int32)(vp) = s + return gerr +} + +func (node *Node) AsSliceI64(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceI64Type) + } + + size := arr.Len() + s := *(*[]int64)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Int64Type, size))) + next := arr.Children() + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + + ret, ok := val.AsI64(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Int64Type) + } + next = val.Next() + } else { + s[i] = ret + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]int64)(vp) = s + return gerr +} + +func (node *Node) AsSliceU32(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceU32Type) + } + + size := arr.Len() + next := arr.Children() + s := *(*[]uint32)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Uint32Type, size))) + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsU64(ctx) + if !ok || ret > math.MaxUint32 { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Uint32Type) + } + next = val.Next() + } else { + s[i] = uint32(ret) + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]uint32)(vp) = s + return gerr +} + +func (node *Node) AsSliceU64(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceU64Type) + } + + size := arr.Len() + next := arr.Children() + + s := *(*[]uint64)((unsafe.Pointer)(rt.MakeSlice(vp, rt.Uint64Type, size))) + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsU64(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.Uint64Type) + } + next = val.Next() + } else { + s[i] = ret + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]uint64)(vp) = s + return gerr +} + +func (node *Node) AsSliceString(ctx *Context, vp unsafe.Pointer) error { + arr, ok := node.AsArr() + if !ok { + return newUnmatched(node.Position(), rt.SliceStringType) + } + + size := arr.Len() + next := arr.Children() + s := *(*[]string)((unsafe.Pointer)(rt.MakeSlice(vp, rt.StringType, size))) + + var gerr error + for i := 0; i < size; i++ { + val := NewNode(next) + ret, ok := val.AsStr(ctx) + if !ok { + if gerr == nil { + gerr = newUnmatched(val.Position(), rt.StringType) + } + next = val.Next() + } else { + s[i] = ret + next = PtrOffset(val.cptr, 1) + } + } + + *(*[]string)(vp) = s + return gerr +} + +func (node *Node) AsSliceBytes(ctx *Context) ([]byte, error) { + b, ok := node.AsBytesRef(ctx) + if !ok { + return nil, newUnmatched(node.Position(), rt.BytesType) + } + + b64, err := rt.DecodeBase64(b) + if err != nil { + return nil, newUnmatched(node.Position(), rt.BytesType) + } + return b64, nil +} + +// AsEface will always ok, because we have parse in native. +func (node *Node) AsEface(ctx *Context) (interface{}, error) { + if ctx.efacePool != nil { + iter := NewNodeIter(*node) + v := AsEfaceFast(&iter, ctx) + *node = iter.Peek() + return v, nil + } else { + return node.AsEfaceFallback(ctx) + } +} + +func parseSingleNode(node Node, ctx *Context) interface{} { + var v interface{} + switch node.Type() { + case KObject: v = map[string]interface{}{} + case KArray: v = []interface{}{} + case KStringCommon: v = node.StringRef(ctx) + case KStringEscaped: v = node.StringCopyEsc(ctx) + case KTrue: v = true + case KFalse: v = false + case KNull: v = nil + case KUint: v = float64(node.U64()) + case KSint: v = float64(node.I64()) + case KReal: v = float64(node.F64()) + case KRawNumber: v = node.Number(ctx) + default: panic("unreachable for as eface") + } + return v +} + +func castU64(val float64) uint64 { + return *((*uint64)(unsafe.Pointer((&val)))) +} + +func AsEfaceFast(iter *NodeIter, ctx *Context) interface{} { + var mp, sp, parent unsafe.Pointer // current container pointer + var node Node + var size int + var isObj bool + var slice rt.GoSlice + var val unsafe.Pointer + var vt **rt.GoType + var vp *unsafe.Pointer + var rootM unsafe.Pointer + var rootS rt.GoSlice + var root interface{} + var key string + + node = iter.Next() + + switch node.Type() { + case KObject: + size = node.Object().Len() + if size != 0 { + ctx.Stack.Push(nil, 0, true) + mp = ctx.efacePool.GetMap(size) + rootM = mp + isObj = true + goto _object_key + } else { + return rt.GoEface { + Type: rt.MapEfaceType, + Value: ctx.efacePool.GetMap(0), + }.Pack() + } + case KArray: + size = node.Array().Len() + if size != 0 { + ctx.Stack.Push(nil, 0, false) + sp = ctx.efacePool.GetSlice(size) + slice = rt.GoSlice { + Ptr: sp, + Len: size, + Cap: size, + } + rootS = slice + isObj = false + val = sp + goto _arr_val; + } else { + ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, unsafe.Pointer(&root)) + } + case KStringCommon: ctx.efacePool.ConvTstring(node.StringRef(ctx), unsafe.Pointer(&root)) + case KStringEscaped: ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), unsafe.Pointer(&root)) + case KTrue: root = true + case KFalse: root = false + case KNull: root = nil + case KUint: ctx.efacePool.ConvF64(float64(node.U64()), unsafe.Pointer(&root)) + case KSint: ctx.efacePool.ConvF64(float64(node.I64()), unsafe.Pointer(&root)) + case KReal: ctx.efacePool.ConvF64(node.F64(), unsafe.Pointer(&root)) + case KRawNumber: ctx.efacePool.ConvTnum(node.Number(ctx), unsafe.Pointer(&root)) + default: panic("unreachable for as eface") + } + return root + +_object_key: + node = iter.Next() + if node.Type() == KStringCommon { + key = node.StringRef(ctx) + } else { + key = node.StringCopyEsc(ctx) + } + + // interface{} slot in map bucket + val = rt.Mapassign_faststr(rt.MapEfaceMapType, mp, key) + vt = &(*rt.GoEface)(val).Type + vp = &(*rt.GoEface)(val).Value + + // parse value node + node = iter.Next() + switch node.Type() { + case KObject: + newSize := node.Object().Len() + newMp := ctx.efacePool.GetMap(newSize) + *vt = rt.MapEfaceType + *vp = newMp + remain := size - 1 + isObj = true + if newSize != 0 { + if remain > 0 { + ctx.Stack.Push(mp, remain, true) + } + mp = newMp + size = newSize + goto _object_key; + } + case KArray: + newSize := node.Array().Len() + if newSize == 0 { + ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, val) + break; + } + + newSp := ctx.efacePool.GetSlice(newSize) + // pack to []interface{} + ctx.efacePool.ConvTSlice(rt.GoSlice{ + Ptr: newSp, + Len: newSize, + Cap: newSize, + }, rt.SliceEfaceType, val) + remain := size - 1 + if remain > 0 { + ctx.Stack.Push(mp, remain, true) + } + val = newSp + isObj = false + size = newSize + goto _arr_val; + case KStringCommon: + ctx.efacePool.ConvTstring(node.StringRef(ctx), val) + case KStringEscaped: + ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), val) + case KTrue: + rt.ConvTBool(true, (*interface{})(val)) + case KFalse: + rt.ConvTBool(false, (*interface{})(val)) + case KNull: /* skip */ + case KUint: + ctx.efacePool.ConvF64(float64(node.U64()), val) + case KSint: + ctx.efacePool.ConvF64(float64(node.I64()), val) + case KReal: + ctx.efacePool.ConvF64(node.F64(), val) + case KRawNumber: + ctx.efacePool.ConvTnum(node.Number(ctx), val) + default: + panic("unreachable for as eface") + } + + // check size + size -= 1 + if size != 0 { + goto _object_key; + } + + parent, size, isObj = ctx.Stack.Pop() + + // parent is empty + if parent == nil { + if isObj { + return rt.GoEface { + Type: rt.MapEfaceType, + Value: rootM, + }.Pack() + } else { + ctx.efacePool.ConvTSlice(rootS, rt.SliceEfaceType, (unsafe.Pointer)(&root)) + return root + } + } + + // continue to parse parent + if isObj { + mp = parent + goto _object_key; + } else { + val = rt.PtrAdd(parent, rt.AnyType.Size) + goto _arr_val; + } + +_arr_val: + // interface{} slot in slice + vt = &(*rt.GoEface)(val).Type + vp = &(*rt.GoEface)(val).Value + + // parse value node + node = iter.Next() + switch node.Type() { + case KObject: + newSize := node.Object().Len() + newMp := ctx.efacePool.GetMap(newSize) + *vt = rt.MapEfaceType + *vp = newMp + remain := size - 1 + if newSize != 0 { + // push next array elem into stack + if remain > 0 { + ctx.Stack.Push(val, remain, false) + } + mp = newMp + size = newSize + isObj = true + goto _object_key; + } + case KArray: + newSize := node.Array().Len() + if newSize == 0 { + ctx.efacePool.ConvTSlice(rt.EmptySlice, rt.SliceEfaceType, val) + break; + } + + newSp := ctx.efacePool.GetSlice(newSize) + // pack to []interface{} + ctx.efacePool.ConvTSlice(rt.GoSlice { + Ptr: newSp, + Len: newSize, + Cap: newSize, + }, rt.SliceEfaceType, val) + + remain := size - 1 + if remain > 0 { + ctx.Stack.Push(val, remain, false) + } + + val = newSp + isObj = false + size = newSize + goto _arr_val; + case KStringCommon: + ctx.efacePool.ConvTstring(node.StringRef(ctx), val) + case KStringEscaped: + ctx.efacePool.ConvTstring(node.StringCopyEsc(ctx), val) + case KTrue: + rt.ConvTBool(true, (*interface{})(val)) + case KFalse: + rt.ConvTBool(false, (*interface{})(val)) + case KNull: /* skip */ + case KUint: + ctx.efacePool.ConvF64(float64(node.U64()), val) + case KSint: + ctx.efacePool.ConvF64(float64(node.I64()), val) + case KReal: + ctx.efacePool.ConvF64(node.F64(), val) + case KRawNumber: + ctx.efacePool.ConvTnum(node.Number(ctx), val) + default: panic("unreachable for as eface") + } + + // check size + size -= 1 + if size != 0 { + val = rt.PtrAdd(val, rt.AnyType.Size) + goto _arr_val; + } + + + parent, size, isObj = ctx.Stack.Pop() + + // parent is empty + if parent == nil { + if isObj { + return rt.GoEface { + Type: rt.MapEfaceType, + Value: rootM, + }.Pack() + } else { + ctx.efacePool.ConvTSlice(rootS, rt.SliceEfaceType, unsafe.Pointer(&root)) + return root + } + } + + // continue to parse parent + if isObj { + mp = parent + goto _object_key; + } else { + val = rt.PtrAdd(parent, rt.AnyType.Size) + goto _arr_val; + } +} + +func (node *Node) AsEfaceFallback(ctx *Context) (interface{}, error) { + switch node.Type() { + case KObject: + obj := node.Object() + size := obj.Len() + m := make(map[string]interface{}, size) + *node = NewNode(obj.Children()) + var gerr, err error + for i := 0; i < size; i++ { + key, _ := node.AsStr(ctx) + *node = NewNode(PtrOffset(node.cptr, 1)) + m[key], err = node.AsEfaceFallback(ctx) + if gerr == nil && err != nil { + gerr = err + } + } + return m, gerr + case KArray: + arr := node.Array() + size := arr.Len() + a := make([]interface{}, size) + *node = NewNode(arr.Children()) + var gerr, err error + for i := 0; i < size; i++ { + a[i], err = node.AsEfaceFallback(ctx) + if gerr == nil && err != nil { + gerr = err + } + } + return a, gerr + case KStringCommon: + str, _ := node.AsStr(ctx) + *node = NewNode(PtrOffset(node.cptr, 1)) + return str, nil + case KStringEscaped: + str := node.StringCopyEsc(ctx) + *node = NewNode(PtrOffset(node.cptr, 1)) + return str, nil + case KTrue: + *node = NewNode(PtrOffset(node.cptr, 1)) + return true, nil + case KFalse: + *node = NewNode(PtrOffset(node.cptr, 1)) + return false, nil + case KNull: + *node = NewNode(PtrOffset(node.cptr, 1)) + return nil, nil + default: + // use float64 + if ctx.Parser.options & (1 << _F_use_number) != 0 { + num, ok := node.AsNumber(ctx) + if !ok { + // skip the unmacthed type + *node = NewNode(node.Next()) + return nil, newUnmatched(node.Position(), rt.JsonNumberType) + } else { + *node = NewNode(PtrOffset(node.cptr, 1)) + return num, nil + } + } else if ctx.Parser.options & (1 << _F_use_int64) != 0 { + // first try int64 + i, ok := node.AsI64(ctx) + if ok { + *node = NewNode(PtrOffset(node.cptr, 1)) + return i, nil + } + + // is not integer, then use float64 + f, ok := node.AsF64(ctx) + if ok { + *node = NewNode(PtrOffset(node.cptr, 1)) + return f, nil + } + + // skip the unmacthed type + *node = NewNode(node.Next()) + return nil, newUnmatched(node.Position(), rt.Int64Type) + } else { + num, ok := node.AsF64(ctx) + if !ok { + // skip the unmacthed type + *node = NewNode(node.Next()) + return nil, newUnmatched(node.Position(), rt.Float64Type) + } else { + *node = NewNode(PtrOffset(node.cptr, 1)) + return num, nil + } + } + } +} + +//go:nosplit +func PtrOffset(ptr uintptr, off int64) uintptr { + return uintptr(int64(ptr) + off * int64(unsafe.Sizeof(node{}))) +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go new file mode 100644 index 00000000..a94e422b --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/slice.go @@ -0,0 +1,224 @@ +package optdec + +import ( + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +type sliceDecoder struct { + elemType *rt.GoType + elemDec decFunc + typ reflect.Type +} + +var ( + emptyPtr = &struct{}{} +) + +func (d *sliceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + arr, ok := node.AsArr() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + slice := rt.MakeSlice(vp, d.elemType, arr.Len()) + elems := slice.Ptr + next := arr.Children() + + var gerr error + for i := 0; i < arr.Len(); i++ { + val := NewNode(next) + elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size) + err := d.elemDec.FromDom(elem, val, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = val.Next() + } + + *(*rt.GoSlice)(vp) = *slice + return gerr +} + +type arrayDecoder struct { + len int + elemType *rt.GoType + elemDec decFunc + typ reflect.Type +} + +//go:nocheckptr +func (d *arrayDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + arr, ok := node.AsArr() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + next := arr.Children() + i := 0 + + var gerr error + for ; i < d.len && i < arr.Len(); i++ { + elem := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size) + val := NewNode(next) + err := d.elemDec.FromDom(elem, val, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = val.Next() + } + + /* zero rest of array */ + ptr := unsafe.Pointer(uintptr(vp) + uintptr(i)*d.elemType.Size) + n := uintptr(d.len-i) * d.elemType.Size + rt.ClearMemory(d.elemType, ptr, n) + return gerr +} + +type sliceEfaceDecoder struct { +} + +func (d *sliceEfaceDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceEface(ctx, vp) +} + +type sliceI32Decoder struct { +} + +func (d *sliceI32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceI32(ctx, vp) +} + +type sliceI64Decoder struct { +} + +func (d *sliceI64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceI64(ctx, vp) +} + +type sliceU32Decoder struct { +} + +func (d *sliceU32Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceU32(ctx, vp) +} + +type sliceU64Decoder struct { +} + +func (d *sliceU64Decoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceU64(ctx, vp) +} + +type sliceStringDecoder struct { +} + +func (d *sliceStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + return node.AsSliceString(ctx, vp) +} + +type sliceBytesDecoder struct { +} + +func (d *sliceBytesDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + s, err := node.AsSliceBytes(ctx) + if err != nil { + return err + } + + *(*[]byte)(vp) = s + return nil +} + +type sliceBytesUnmarshalerDecoder struct { + elemType *rt.GoType + elemDec decFunc + typ reflect.Type +} + +func (d *sliceBytesUnmarshalerDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*rt.GoSlice)(vp) = rt.GoSlice{} + return nil + } + + /* parse JSON string into `[]byte` */ + if node.IsStr() { + slice, err := node.AsSliceBytes(ctx) + if err != nil { + return err + } + *(*[]byte)(vp) = slice + return nil + } + + /* parse JSON array into `[]byte` */ + arr, ok := node.AsArr() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + slice := rt.MakeSlice(vp, d.elemType, arr.Len()) + elems := slice.Ptr + + var gerr error + next := arr.Children() + for i := 0; i < arr.Len(); i++ { + child := NewNode(next) + elem := unsafe.Pointer(uintptr(elems) + uintptr(i)*d.elemType.Size) + err := d.elemDec.FromDom(elem, child, ctx) + if gerr == nil && err != nil { + gerr = err + } + next = child.Next() + } + + *(*rt.GoSlice)(vp) = *slice + return gerr +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go new file mode 100644 index 00000000..627b5ebe --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/stringopts.go @@ -0,0 +1,360 @@ +package optdec + +import ( + "encoding/json" + "math" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +type ptrStrDecoder struct { + typ *rt.GoType + deref decFunc +} + +// Pointer Value is allocated in the Caller +func (d *ptrStrDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + *(*unsafe.Pointer)(vp) = nil + return nil + } + + if *(*unsafe.Pointer)(vp) == nil { + *(*unsafe.Pointer)(vp) = rt.Mallocgc(d.typ.Size, d.typ, true) + } + + return d.deref.FromDom(*(*unsafe.Pointer)(vp), node, ctx) +} + +type boolStringDecoder struct { +} + +func (d *boolStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + b, err := ParseBool(s) + if err != nil { + return error_mismatch(node, ctx, boolType) + } + + *(*bool)(vp) = b + return nil +} + +func parseI64(node Node, ctx *context) (int64, error, bool) { + if node.IsNull() { + return 0, nil, true + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return 0, error_mismatch(node, ctx, stringType), false + } + + if s == "null" { + return 0, nil, true + } + + ret, err := ParseI64(s) + return ret, err, false +} + +type i8StringDecoder struct{} + +func (d *i8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxInt8 || ret < math.MinInt8 { + return error_mismatch(node, ctx, int8Type) + } + + *(*int8)(vp) = int8(ret) + return nil +} + +type i16StringDecoder struct{} + +func (d *i16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxInt16 || ret < math.MinInt16 { + return error_mismatch(node, ctx, int16Type) + } + + *(*int16)(vp) = int16(ret) + return nil +} + +type i32StringDecoder struct{} + +func (d *i32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxInt32 || ret < math.MinInt32 { + return error_mismatch(node, ctx, int32Type) + } + + *(*int32)(vp) = int32(ret) + return nil +} + +type i64StringDecoder struct{} + +func (d *i64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseI64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + *(*int64)(vp) = int64(ret) + return nil +} + +func parseU64(node Node, ctx *context) (uint64, error, bool) { + if node.IsNull() { + return 0, nil, true + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return 0, error_mismatch(node, ctx, stringType), false + } + + if s == "null" { + return 0, nil, true + } + + ret, err := ParseU64(s) + return ret, err, false +} + +type u8StringDecoder struct{} + +func (d *u8StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxUint8 { + return error_mismatch(node, ctx, uint8Type) + } + + *(*uint8)(vp) = uint8(ret) + return nil +} + +type u16StringDecoder struct{} + +func (d *u16StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxUint16 { + return error_mismatch(node, ctx, uint16Type) + } + + *(*uint16)(vp) = uint16(ret) + return nil +} + +type u32StringDecoder struct{} + +func (d *u32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + if ret > math.MaxUint32 { + return error_mismatch(node, ctx, uint32Type) + } + + *(*uint32)(vp) = uint32(ret) + return nil +} + + +type u64StringDecoder struct{} + +func (d *u64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + ret, err, null := parseU64(node, ctx) + if null { + return nil + } + + if err != nil { + return err + } + + *(*uint64)(vp) = uint64(ret) + return nil +} + +type f32StringDecoder struct{} + +func (d *f32StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + ret, err := ParseF64(s) + if err != nil || ret > math.MaxFloat32 || ret < -math.MaxFloat32 { + return error_mismatch(node, ctx, float32Type) + } + + *(*float32)(vp) = float32(ret) + return nil +} + +type f64StringDecoder struct{} + +func (d *f64StringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + ret, err := ParseF64(s) + if err != nil { + return error_mismatch(node, ctx, float64Type) + } + + *(*float64)(vp) = float64(ret) + return nil +} + +/* parse string field with string options */ +type strStringDecoder struct{} + +func (d *strStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + s, err := Unquote(s) + if err != nil { + return error_mismatch(node, ctx, stringType) + } + + *(*string)(vp) = s + return nil +} + +type numberStringDecoder struct{} + +func (d *numberStringDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + s, ok := node.AsStrRef(ctx) + if !ok { + return error_mismatch(node, ctx, stringType) + } + + if s == "null" { + return nil + } + + num, ok := node.ParseNumber(ctx) + if !ok { + return error_mismatch(node, ctx, jsonNumberType) + } + + end, err := SkipNumberFast(s, 0) + // has error or trailing chars + if err != nil || end != len(s) { + return error_mismatch(node, ctx, jsonNumberType) + } + + *(*json.Number)(vp) = json.Number(num) + return nil +} diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go new file mode 100644 index 00000000..bce2758f --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/structs.go @@ -0,0 +1,61 @@ +package optdec + +import ( + "reflect" + "unsafe" + + caching "github.com/bytedance/sonic/internal/optcaching" + "github.com/bytedance/sonic/internal/resolver" +) + +type fieldEntry struct { + resolver.FieldMeta + fieldDec decFunc +} + +type structDecoder struct { + fieldMap caching.FieldLookup + fields []fieldEntry + structName string + typ reflect.Type +} + +func (d *structDecoder) FromDom(vp unsafe.Pointer, node Node, ctx *context) error { + if node.IsNull() { + return nil + } + + var gerr error + obj, ok := node.AsObj() + if !ok { + return error_mismatch(node, ctx, d.typ) + } + + next := obj.Children() + for i := 0; i < obj.Len(); i++ { + key, _ := NewNode(next).AsStrRef(ctx) + val := NewNode(PtrOffset(next, 1)) + next = val.Next() + + // find field idx + idx := d.fieldMap.Get(key) + if idx == -1 { + if Options(ctx.Options())&OptionDisableUnknown != 0 { + return error_field(key) + } + continue + } + + offset := d.fields[idx].Path[0].Size + elem := unsafe.Pointer(uintptr(vp) + offset) + err := d.fields[idx].fieldDec.FromDom(elem, val, ctx) + + // deal with mismatch type errors + if gerr == nil && err != nil { + // TODO: better error info + gerr = err + } + } + return gerr +} + diff --git a/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go new file mode 100644 index 00000000..fe1433ee --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/decoder/optdec/types.go @@ -0,0 +1,60 @@ +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package optdec + +import ( + "encoding" + "encoding/base64" + "encoding/json" + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/rt" +) + +var ( + boolType = reflect.TypeOf(bool(false)) + byteType = reflect.TypeOf(byte(0)) + intType = reflect.TypeOf(int(0)) + int8Type = reflect.TypeOf(int8(0)) + int16Type = reflect.TypeOf(int16(0)) + int32Type = reflect.TypeOf(int32(0)) + int64Type = reflect.TypeOf(int64(0)) + uintType = reflect.TypeOf(uint(0)) + uint8Type = reflect.TypeOf(uint8(0)) + uint16Type = reflect.TypeOf(uint16(0)) + uint32Type = reflect.TypeOf(uint32(0)) + uint64Type = reflect.TypeOf(uint64(0)) + float32Type = reflect.TypeOf(float32(0)) + float64Type = reflect.TypeOf(float64(0)) + stringType = reflect.TypeOf("") + bytesType = reflect.TypeOf([]byte(nil)) + jsonNumberType = reflect.TypeOf(json.Number("")) + base64CorruptInputError = reflect.TypeOf(base64.CorruptInputError(0)) + anyType = rt.UnpackType(reflect.TypeOf((*interface{})(nil)).Elem()) +) + +var ( + errorType = reflect.TypeOf((*error)(nil)).Elem() + jsonUnmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() + encodingTextUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() +) + +func rtype(t reflect.Type) (*rt.GoItab, *rt.GoType) { + p := (*rt.GoIface)(unsafe.Pointer(&t)) + return p.Itab, (*rt.GoType)(p.Value) +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go similarity index 52% rename from vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go rename to vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go index 8a322b3a..5d9956a9 100644 --- a/vendor/github.com/bytedance/sonic/internal/encoder/mapiter.go +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/mapiter.go @@ -14,15 +14,16 @@ * limitations under the License. */ -package encoder +package alg import ( "encoding" "reflect" + "strconv" "sync" "unsafe" - "github.com/bytedance/sonic/internal/native" + "github.com/bytedance/sonic/internal/encoder/vars" "github.com/bytedance/sonic/internal/rt" ) @@ -32,8 +33,8 @@ type _MapPair struct { m [32]byte } -type _MapIterator struct { - it rt.GoMapIterator // must be the first field +type MapIterator struct { + It rt.GoMapIterator // must be the first field kv rt.GoSlice // slice of _MapPair ki int } @@ -44,43 +45,43 @@ var ( ) func init() { - if unsafe.Offsetof(_MapIterator{}.it) != 0 { + if unsafe.Offsetof(MapIterator{}.It) != 0 { panic("_MapIterator.it is not the first field") } } -func newIterator() *_MapIterator { +func newIterator() *MapIterator { if v := iteratorPool.Get(); v == nil { - return new(_MapIterator) + return new(MapIterator) } else { - return resetIterator(v.(*_MapIterator)) + return resetIterator(v.(*MapIterator)) } } -func resetIterator(p *_MapIterator) *_MapIterator { +func resetIterator(p *MapIterator) *MapIterator { p.ki = 0 - p.it = rt.GoMapIterator{} + p.It = rt.GoMapIterator{} p.kv.Len = 0 return p } -func (self *_MapIterator) at(i int) *_MapPair { +func (self *MapIterator) at(i int) *_MapPair { return (*_MapPair)(unsafe.Pointer(uintptr(self.kv.Ptr) + uintptr(i) * unsafe.Sizeof(_MapPair{}))) } -func (self *_MapIterator) add() (p *_MapPair) { +func (self *MapIterator) add() (p *_MapPair) { p = self.at(self.kv.Len) self.kv.Len++ return } -func (self *_MapIterator) data() (p []_MapPair) { +func (self *MapIterator) data() (p []_MapPair) { *(*rt.GoSlice)(unsafe.Pointer(&p)) = self.kv return } -func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) { +func (self *MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) { p := self.add() p.v = v @@ -94,26 +95,26 @@ func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointe return nil } -func (self *_MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error { +func (self *MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error { switch v { - case reflect.Int : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int)(k)))]) ; return nil - case reflect.Int8 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int8)(k)))]) ; return nil - case reflect.Int16 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int16)(k)))]) ; return nil - case reflect.Int32 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int32)(k)))]) ; return nil - case reflect.Int64 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], *(*int64)(k))]) ; return nil - case reflect.Uint : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint)(k)))]) ; return nil - case reflect.Uint8 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint8)(k)))]) ; return nil - case reflect.Uint16 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint16)(k)))]) ; return nil - case reflect.Uint32 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint32)(k)))]) ; return nil - case reflect.Uint64 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], *(*uint64)(k))]) ; return nil - case reflect.Uintptr : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uintptr)(k)))]) ; return nil + case reflect.Int : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int)(k)), 10)) ; return nil + case reflect.Int8 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int8)(k)), 10)) ; return nil + case reflect.Int16 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int16)(k)), 10)) ; return nil + case reflect.Int32 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int32)(k)), 10)) ; return nil + case reflect.Int64 : p.k = rt.Mem2Str(strconv.AppendInt(p.m[:0], int64(*(*int64)(k)), 10)) ; return nil + case reflect.Uint : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint)(k)), 10)) ; return nil + case reflect.Uint8 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint8)(k)), 10)) ; return nil + case reflect.Uint16 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint16)(k)), 10)) ; return nil + case reflect.Uint32 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint32)(k)), 10)) ; return nil + case reflect.Uint64 : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uint64)(k)), 10)) ; return nil + case reflect.Uintptr : p.k = rt.Mem2Str(strconv.AppendUint(p.m[:0], uint64(*(*uintptr)(k)), 10)) ; return nil case reflect.Interface : return self.appendInterface(p, t, k) case reflect.Struct, reflect.Ptr : return self.appendConcrete(p, t, k) default : panic("unexpected map key type") } } -func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { +func (self *MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { // compiler has already checked that the type implements the encoding.MarshalText interface if !t.Indirect() { k = *(*unsafe.Pointer)(k) @@ -127,7 +128,7 @@ func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Poi return } -func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { +func (self *MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) { if len(rt.IfaceType(t).Methods) == 0 { panic("unexpected map key type") } else if p.k, err = asText(k); err == nil { @@ -137,17 +138,17 @@ func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Po } } -func iteratorStop(p *_MapIterator) { +func IteratorStop(p *MapIterator) { iteratorPool.Put(p) } -func iteratorNext(p *_MapIterator) { +func IteratorNext(p *MapIterator) { i := p.ki - t := &p.it + t := &p.It /* check for unordered iteration */ if i < 0 { - mapiternext(t) + rt.Mapiternext(t) return } @@ -164,25 +165,25 @@ func iteratorNext(p *_MapIterator) { p.ki++ } -func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, error) { +func IteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*MapIterator, error) { it := newIterator() - mapiterinit(t, m, &it.it) + rt.Mapiterinit(t, m, &it.It) /* check for key-sorting, empty map don't need sorting */ - if m.Count == 0 || (fv & uint64(SortMapKeys)) == 0 { + if m.Count == 0 || (fv & (1< it.kv.Cap { - it.kv = growslice(iteratorPair, it.kv, m.Count) + it.kv = rt.GrowSlice(iteratorPair, it.kv, m.Count) } /* dump all the key-value pairs */ - for ; it.it.K != nil; mapiternext(&it.it) { - if err := it.append(t.Key, it.it.K, it.it.V); err != nil { - iteratorStop(it) + for ; it.It.K != nil; rt.Mapiternext(&it.It) { + if err := it.append(t.Key, it.It.K, it.It.V); err != nil { + IteratorStop(it) return nil, err } } @@ -193,7 +194,13 @@ func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, erro } /* load the first pair into iterator */ - it.it.V = it.at(0).v - it.it.K = unsafe.Pointer(&it.at(0).k) + it.It.V = it.at(0).v + it.It.K = unsafe.Pointer(&it.at(0).k) return it, nil } + +func asText(v unsafe.Pointer) (string, error) { + text := rt.AssertI2I(rt.UnpackType(vars.EncodingTextMarshalerType), *(*rt.GoIface)(v)) + r, e := (*(*encoding.TextMarshaler)(unsafe.Pointer(&text))).MarshalText() + return rt.Mem2Str(r), e +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go new file mode 100644 index 00000000..c19e2de4 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/opts.go @@ -0,0 +1,31 @@ +/** + * Copyright 2024 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package alg + +const ( + BitSortMapKeys = iota + BitEscapeHTML + BitCompactMarshaler + BitNoQuoteTextMarshaler + BitNoNullSliceOrMap + BitValidateString + BitNoValidateJSONMarshaler + BitNoEncoderNewline + BitEncodeNullForInfOrNan + + BitPointerValue = 63 +) diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go new file mode 100644 index 00000000..63fa0189 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/primitives.go @@ -0,0 +1,95 @@ +/** + * Copyright 2024 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package alg + +import ( + "encoding" + "encoding/json" + + "github.com/bytedance/sonic/internal/encoder/vars" + "github.com/bytedance/sonic/internal/rt" +) + +func Compact(p *[]byte, v []byte) error { + buf := vars.NewBuffer() + err := json.Compact(buf, v) + + /* check for errors */ + if err != nil { + return err + } + + /* add to result */ + v = buf.Bytes() + *p = append(*p, v...) + + /* return the buffer into pool */ + vars.FreeBuffer(buf) + return nil +} + +func EncodeNil(rb *[]byte) error { + *rb = append(*rb, 'n', 'u', 'l', 'l') + return nil +} + +// func Make_EncodeTypedPointer(computor func(*rt.GoType, ...interface{}) (interface{}, error)) func(*[]byte, *rt.GoType, *unsafe.Pointer, *vars.Stack, uint64) error { +// return func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error { +// if vt == nil { +// return EncodeNil(buf) +// } else if fn, err := vars.FindOrCompile(vt, (fv&(1< 0 { + // output buffer + dp := unsafe.Pointer(uintptr(b.Ptr) + uintptr(b.Len)) + dn := b.Cap - b.Len + // call native.Quote, dn is byte count it outputs + opts := uint64(0) + if double { + opts = types.F_DOUBLE_UNQUOTE + } + ret := native.Quote(sp, nb, dp, &dn, opts) + // update *buf length + b.Len += dn + + // no need more output + if ret >= 0 { + break + } + + // double buf size + *b = rt.GrowSlice(typeByte, *b, b.Cap*2) + // ret is the complement of consumed input + ret = ^ret + // update input buffer + nb -= ret + sp = unsafe.Pointer(uintptr(sp) + uintptr(ret)) + } + + runtime.KeepAlive(buf) + runtime.KeepAlive(sp) + if double { + buf = append(buf, `\""`...) + } else { + buf = append(buf, `"`...) + } + + return buf +} + +func HtmlEscape(dst []byte, src []byte) []byte { + var sidx int + + dst = append(dst, src[:0]...) // avoid check nil dst + sbuf := (*rt.GoSlice)(unsafe.Pointer(&src)) + dbuf := (*rt.GoSlice)(unsafe.Pointer(&dst)) + + /* grow dst if it is shorter */ + if cap(dst)-len(dst) < len(src)+types.BufPaddingSize { + cap := len(src)*3/2 + types.BufPaddingSize + *dbuf = rt.GrowSlice(typeByte, *dbuf, cap) + } + + for sidx < sbuf.Len { + sp := rt.Add(sbuf.Ptr, uintptr(sidx)) + dp := rt.Add(dbuf.Ptr, uintptr(dbuf.Len)) + + sn := sbuf.Len - sidx + dn := dbuf.Cap - dbuf.Len + nb := native.HTMLEscape(sp, sn, dp, &dn) + + /* check for errors */ + if dbuf.Len += dn; nb >= 0 { + break + } + + /* not enough space, grow the slice and try again */ + sidx += ^nb + *dbuf = rt.GrowSlice(typeByte, *dbuf, dbuf.Cap*2) + } + return dst +} + +func F64toa(buf []byte, v float64) ([]byte) { + if v == 0 { + return append(buf, '0') + } + buf = rt.GuardSlice2(buf, 64) + ret := native.F64toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + +func F32toa(buf []byte, v float32) ([]byte) { + if v == 0 { + return append(buf, '0') + } + buf = rt.GuardSlice2(buf, 64) + ret := native.F32toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + +func I64toa(buf []byte, v int64) ([]byte) { + buf = rt.GuardSlice2(buf, 32) + ret := native.I64toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + +func U64toa(buf []byte, v uint64) ([]byte) { + buf = rt.GuardSlice2(buf, 32) + ret := native.U64toa((*byte)(rt.IndexByte(buf, len(buf))), v) + if ret > 0 { + return buf[:len(buf)+ret] + } else { + return buf + } +} + diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go new file mode 100644 index 00000000..c15cbf7d --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/alg/spec_compat.go @@ -0,0 +1,148 @@ +// +build !amd64,!arm64 go1.24 !go1.16 arm64,!go1.20 + +/** + * Copyright 2024 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package alg + +import ( + _ "unsafe" + "unicode/utf8" + "strconv" + "bytes" + "encoding/json" + + "github.com/bytedance/sonic/internal/rt" +) + +// Valid validates json and returns first non-blank character position, +// if it is only one valid json value. +// Otherwise returns invalid character position using start. +// +// Note: it does not check for the invalid UTF-8 characters. +func Valid(data []byte) (ok bool, start int) { + ok = json.Valid(data) + return ok, 0 +} + +var typeByte = rt.UnpackEface(byte(0)).Type + +func Quote(e []byte, s string, double bool) []byte { + if len(s) == 0 { + if double { + return append(e, `"\"\""`...) + } + return append(e, `""`...) + } + + b := e + ss := len(e) + e = append(e, '"') + start := 0 + + for i := 0; i < len(s); { + if b := s[i]; b < utf8.RuneSelf { + if rt.SafeSet[b] { + i++ + continue + } + if start < i { + e = append(e, s[start:i]...) + } + e = append(e, '\\') + switch b { + case '\\', '"': + e = append(e, b) + case '\n': + e = append(e, 'n') + case '\r': + e = append(e, 'r') + case '\t': + e = append(e, 't') + default: + // This encodes bytes < 0x20 except for \t, \n and \r. + // If escapeHTML is set, it also escapes <, >, and & + // because they can lead to security holes when + // user-controlled strings are rendered into JSON + // and served to some browsers. + e = append(e, `u00`...) + e = append(e, rt.Hex[b>>4]) + e = append(e, rt.Hex[b&0xF]) + } + i++ + start = i + continue + } + c, size := utf8.DecodeRuneInString(s[i:]) + // if correct && c == utf8.RuneError && size == 1 { + // if start < i { + // e = append(e, s[start:i]...) + // } + // e = append(e, `\ufffd`...) + // i += size + // start = i + // continue + // } + if c == '\u2028' || c == '\u2029' { + if start < i { + e = append(e, s[start:i]...) + } + e = append(e, `\u202`...) + e = append(e, rt.Hex[c&0xF]) + i += size + start = i + continue + } + i += size + } + + if start < len(s) { + e = append(e, s[start:]...) + } + e = append(e, '"') + + if double { + return strconv.AppendQuote(b, string(e[ss:])) + } else { + return e + } +} + +func HtmlEscape(dst []byte, src []byte) []byte { + buf := bytes.NewBuffer(dst) + json.HTMLEscape(buf, src) + return buf.Bytes() +} + +func F64toa(buf []byte, v float64) ([]byte) { + bs := bytes.NewBuffer(buf) + _ = json.NewEncoder(bs).Encode(v) + return bs.Bytes() +} + +func F32toa(buf []byte, v float32) ([]byte) { + bs := bytes.NewBuffer(buf) + _ = json.NewEncoder(bs).Encode(v) + return bs.Bytes() +} + +func I64toa(buf []byte, v int64) ([]byte) { + return strconv.AppendInt(buf, int64(v), 10) +} + +func U64toa(buf []byte, v uint64) ([]byte) { + return strconv.AppendUint(buf, v, 10) +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go b/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go deleted file mode 100644 index 0a99f30a..00000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/asm_stubs_amd64_go116.go +++ /dev/null @@ -1,51 +0,0 @@ -// +build go1.16,!go1.17 - -// Copyright 2023 CloudWeGo Authors -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package encoder - -import ( - `strconv` - - `github.com/bytedance/sonic/internal/jit` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` -) - -var ( - _V_writeBarrier = jit.Imm(int64(_runtime_writeBarrier)) - - _F_gcWriteBarrierAX = jit.Func(gcWriteBarrierAX) -) - -func (self *_Assembler) WritePtr(i int, ptr obj.Addr, rec obj.Addr) { - if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX { - panic("rec contains AX!") - } - self.Emit("MOVQ", _V_writeBarrier, _R10) - self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0)) - self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, _AX) - self.xsave(_DI) - self.Emit("LEAQ", rec, _DI) - self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX - self.Rjmp("CALL", _R10) - self.xload(_DI) - self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}") - self.Emit("MOVQ", ptr, rec) - self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}") -} - diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go deleted file mode 100644 index 330b6881..00000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_regabi_amd64.go +++ /dev/null @@ -1,1176 +0,0 @@ -// +build go1.17,!go1.23 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - `fmt` - `reflect` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/cpu` - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native/types` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` - - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/rt` -) - -/** Register Allocations - * - * State Registers: - * - * %rbx : stack base - * %rdi : result pointer - * %rsi : result length - * %rdx : result capacity - * %r12 : sp->p - * %r13 : sp->q - * %r14 : sp->x - * %r15 : sp->f - * - * Error Registers: - * - * %r10 : error type register - * %r11 : error pointer register - */ - -/** Function Prototype & Stack Map - * - * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error) - * - * buf : (FP) - * p : 8(FP) - * sb : 16(FP) - * fv : 24(FP) - * err.vt : 32(FP) - * err.vp : 40(FP) - */ - -const ( - _S_cond = iota - _S_init -) - -const ( - _FP_args = 32 // 32 bytes for spill registers of arguments - _FP_fargs = 40 // 40 bytes for passing arguments to other Go functions - _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions - _FP_locals = 24 // 24 bytes for local variables -) - -const ( - _FP_loffs = _FP_fargs + _FP_saves - _FP_offs = _FP_loffs + _FP_locals - // _FP_offs = _FP_loffs + _FP_locals + _FP_debug - _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer - _FP_base = _FP_size + 8 // 8 bytes for the return address -) - -const ( - _FM_exp32 = 0x7f800000 - _FM_exp64 = 0x7ff0000000000000 -) - -const ( - _IM_null = 0x6c6c756e // 'null' - _IM_true = 0x65757274 // 'true' - _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e') - _IM_open = 0x00225c22 // '"\"∅' - _IM_array = 0x5d5b // '[]' - _IM_object = 0x7d7b // '{}' - _IM_mulv = -0x5555555555555555 -) - -const ( - _LB_more_space = "_more_space" - _LB_more_space_return = "_more_space_return_" -) - -const ( - _LB_error = "_error" - _LB_error_too_deep = "_error_too_deep" - _LB_error_invalid_number = "_error_invalid_number" - _LB_error_nan_or_infinite = "_error_nan_or_infinite" - _LB_panic = "_panic" -) - -var ( - _AX = jit.Reg("AX") - _BX = jit.Reg("BX") - _CX = jit.Reg("CX") - _DX = jit.Reg("DX") - _DI = jit.Reg("DI") - _SI = jit.Reg("SI") - _BP = jit.Reg("BP") - _SP = jit.Reg("SP") - _R8 = jit.Reg("R8") - _R9 = jit.Reg("R9") -) - -var ( - _X0 = jit.Reg("X0") - _X15 = jit.Reg("X15") - _Y0 = jit.Reg("Y0") -) - -var ( - _ST = jit.Reg("R15") // can't use R14 since it's always scratched by Go... - _RP = jit.Reg("DI") - _RL = jit.Reg("SI") - _RC = jit.Reg("DX") -) - -var ( - _LR = jit.Reg("R9") - _ET = jit.Reg("AX") - _EP = jit.Reg("BX") -) - -var ( - _SP_p = jit.Reg("R10") // saved on BX when call_c - _SP_q = jit.Reg("R11") // saved on BP when call_c - _SP_x = jit.Reg("R12") - _SP_f = jit.Reg("R13") -) - -var ( - _ARG_rb = jit.Ptr(_SP, _FP_base) - _ARG_vp = jit.Ptr(_SP, _FP_base + 8) - _ARG_sb = jit.Ptr(_SP, _FP_base + 16) - _ARG_fv = jit.Ptr(_SP, _FP_base + 24) -) - -var ( - _RET_et = _ET - _RET_ep = _EP -) - -var ( - _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves) - _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8) - _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16) -) - -var ( - _REG_ffi = []obj.Addr{ _RP, _RL, _RC, _SP_q} - _REG_b64 = []obj.Addr{_SP_p, _SP_q} - - _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC} - _REG_ms = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR} - _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL} -) - -type _Assembler struct { - jit.BaseAssembler - p _Program - x int - name string -} - -func newAssembler(p _Program) *_Assembler { - return new(_Assembler).Init(p) -} - -/** Assembler Interface **/ - -func (self *_Assembler) Load() _Encoder { - return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs)) -} - -func (self *_Assembler) Init(p _Program) *_Assembler { - self.p = p - self.BaseAssembler.Init(self.compile) - return self -} - -func (self *_Assembler) compile() { - self.prologue() - self.instrs() - self.epilogue() - self.builtins() -} - -/** Assembler Stages **/ - -var _OpFuncTab = [256]func(*_Assembler, *_Instr) { - _OP_null : (*_Assembler)._asm_OP_null, - _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr, - _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj, - _OP_bool : (*_Assembler)._asm_OP_bool, - _OP_i8 : (*_Assembler)._asm_OP_i8, - _OP_i16 : (*_Assembler)._asm_OP_i16, - _OP_i32 : (*_Assembler)._asm_OP_i32, - _OP_i64 : (*_Assembler)._asm_OP_i64, - _OP_u8 : (*_Assembler)._asm_OP_u8, - _OP_u16 : (*_Assembler)._asm_OP_u16, - _OP_u32 : (*_Assembler)._asm_OP_u32, - _OP_u64 : (*_Assembler)._asm_OP_u64, - _OP_f32 : (*_Assembler)._asm_OP_f32, - _OP_f64 : (*_Assembler)._asm_OP_f64, - _OP_str : (*_Assembler)._asm_OP_str, - _OP_bin : (*_Assembler)._asm_OP_bin, - _OP_quote : (*_Assembler)._asm_OP_quote, - _OP_number : (*_Assembler)._asm_OP_number, - _OP_eface : (*_Assembler)._asm_OP_eface, - _OP_iface : (*_Assembler)._asm_OP_iface, - _OP_byte : (*_Assembler)._asm_OP_byte, - _OP_text : (*_Assembler)._asm_OP_text, - _OP_deref : (*_Assembler)._asm_OP_deref, - _OP_index : (*_Assembler)._asm_OP_index, - _OP_load : (*_Assembler)._asm_OP_load, - _OP_save : (*_Assembler)._asm_OP_save, - _OP_drop : (*_Assembler)._asm_OP_drop, - _OP_drop_2 : (*_Assembler)._asm_OP_drop_2, - _OP_recurse : (*_Assembler)._asm_OP_recurse, - _OP_is_nil : (*_Assembler)._asm_OP_is_nil, - _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1, - _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1, - _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2, - _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4, - _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8, - _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map, - _OP_goto : (*_Assembler)._asm_OP_goto, - _OP_map_iter : (*_Assembler)._asm_OP_map_iter, - _OP_map_stop : (*_Assembler)._asm_OP_map_stop, - _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key, - _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key, - _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next, - _OP_slice_len : (*_Assembler)._asm_OP_slice_len, - _OP_slice_next : (*_Assembler)._asm_OP_slice_next, - _OP_marshal : (*_Assembler)._asm_OP_marshal, - _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p, - _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text, - _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p, - _OP_cond_set : (*_Assembler)._asm_OP_cond_set, - _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc, -} - -func (self *_Assembler) instr(v *_Instr) { - if fn := _OpFuncTab[v.op()]; fn != nil { - fn(self, v) - } else { - panic(fmt.Sprintf("invalid opcode: %d", v.op())) - } -} - -func (self *_Assembler) instrs() { - for i, v := range self.p { - self.Mark(i) - self.instr(&v) - self.debug_instr(i, &v) - } -} - -func (self *_Assembler) builtins() { - self.more_space() - self.error_too_deep() - self.error_invalid_number() - self.error_nan_or_infinite() - self.go_panic() -} - -func (self *_Assembler) epilogue() { - self.Mark(len(self.p)) - self.Emit("XORL", _ET, _ET) - self.Emit("XORL", _EP, _EP) - self.Link(_LB_error) - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", jit.Imm(0), _ARG_rb) // MOVQ AX, rb<>+0(FP) - self.Emit("MOVQ", jit.Imm(0), _ARG_vp) // MOVQ BX, vp<>+8(FP) - self.Emit("MOVQ", jit.Imm(0), _ARG_sb) // MOVQ CX, sb<>+16(FP) - self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP - self.Emit("RET") // RET -} - -func (self *_Assembler) prologue() { - self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP - self.Emit("MOVQ", _AX, _ARG_rb) // MOVQ AX, rb<>+0(FP) - self.Emit("MOVQ", _BX, _ARG_vp) // MOVQ BX, vp<>+8(FP) - self.Emit("MOVQ", _CX, _ARG_sb) // MOVQ CX, sb<>+16(FP) - self.Emit("MOVQ", _DI, _ARG_fv) // MOVQ DI, rb<>+24(FP) - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX) , DI - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX) , SI - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), DX - self.Emit("MOVQ", _BX, _SP_p) // MOVQ BX, R10 - self.Emit("MOVQ", _CX, _ST) // MOVQ CX, R8 - self.Emit("XORL", _SP_x, _SP_x) // XORL R10, R12 - self.Emit("XORL", _SP_f, _SP_f) // XORL R11, R13 - self.Emit("XORL", _SP_q, _SP_q) // XORL R13, R11 -} - -/** Assembler Inline Functions **/ - -func (self *_Assembler) xsave(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8)) - } - } -} - -func (self *_Assembler) xload(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v) - } - } -} - -func (self *_Assembler) rbuf_di() { - if _RP.Reg != x86.REG_DI { - panic("register allocation messed up: RP != DI") - } else { - self.Emit("ADDQ", _RL, _RP) - } -} - -func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) { - self.check_size(nd) - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI - self.call_c(fn) // CALL_C $fn - self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) store_str(s string) { - i := 0 - m := rt.Str2Mem(s) - - /* 8-byte stores */ - for i <= len(m) - 8 { - self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX - self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL) - i += 8 - } - - /* 4-byte stores */ - if i <= len(m) - 4 { - self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL) - i += 4 - } - - /* 2-byte stores */ - if i <= len(m) - 2 { - self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL) - i += 2 - } - - /* last byte */ - if i < len(m) { - self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL) - } -} - -func (self *_Assembler) check_size(n int) { - self.check_size_rl(jit.Ptr(_RL, int64(n))) -} - -func (self *_Assembler) check_size_r(r obj.Addr, d int) { - self.check_size_rl(jit.Sib(_RL, r, 1, int64(d))) -} - -func (self *_Assembler) check_size_rl(v obj.Addr) { - idx := self.x - key := _LB_more_space_return + strconv.Itoa(idx) - - /* the following code relies on LR == R9 to work */ - if _LR.Reg != x86.REG_R9 { - panic("register allocation messed up: LR != R9") - } - - /* check for buffer capacity */ - self.x++ - self.Emit("LEAQ", v, _AX) // LEAQ $v, AX - self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC - self.Sjmp("JBE" , key) // JBE _more_space_return_{n} - self.slice_grow_ax(key) // GROW $key - self.Link(key) // _more_space_return_{n}: -} - -func (self *_Assembler) slice_grow_ax(ret string) { - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9 - self.Sref(ret, 4) // .... &ret - self.Sjmp("JMP" , _LB_more_space) // JMP _more_space -} - -/** State Stack Helpers **/ - -const ( - _StateSize = int64(unsafe.Sizeof(_State{})) - _StackLimit = _MaxStack * _StateSize -) - -func (self *_Assembler) save_state() { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX - self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R9) // LEAQ _StateSize(CX), R9 - self.Emit("CMPQ", _R9, jit.Imm(_StackLimit)) // CMPQ R9, $_StackLimit - self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep - self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX) - self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX) - self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX) - self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX) - self.Emit("MOVQ", _R9, jit.Ptr(_ST, 0)) // MOVQ R9, (ST) -} - -func (self *_Assembler) drop_state(decr int64) { - self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX) -} - -/** Buffer Helpers **/ - -func (self *_Assembler) add_char(ch byte) { - self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) add_long(ch uint32, n int64) { - self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL -} - -func (self *_Assembler) add_text(ss string) { - self.store_str(ss) // TEXT $ss - self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL -} - -// get *buf at AX -func (self *_Assembler) prep_buffer_AX() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) -} - -func (self *_Assembler) save_buffer() { - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX) - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX) -} - -// get *buf at AX -func (self *_Assembler) load_buffer_AX() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC -} - -/** Function Interface Helpers **/ - -func (self *_Assembler) call(pc obj.Addr) { - self.Emit("MOVQ", pc, _LR) // MOVQ $pc, AX - self.Rjmp("CALL", _LR) // CALL AX -} - -func (self *_Assembler) save_c() { - self.xsave(_REG_ffi...) // SAVE $REG_ffi -} - -func (self *_Assembler) call_b64(pc obj.Addr) { - self.xsave(_REG_b64...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_b64...) // LOAD $REG_ffi -} - -func (self *_Assembler) call_c(pc obj.Addr) { - self.Emit("XCHGQ", _SP_p, _BX) - self.call(pc) // CALL $pc - self.xload(_REG_ffi...) // LOAD $REG_ffi - self.Emit("XCHGQ", _SP_p, _BX) - self.Emit("XORPS", _X15, _X15) -} - -func (self *_Assembler) call_go(pc obj.Addr) { - self.xsave(_REG_all...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_all...) // LOAD $REG_all -} - -func (self *_Assembler) call_more_space(pc obj.Addr) { - self.xsave(_REG_ms...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_ms...) // LOAD $REG_all -} - -func (self *_Assembler) call_encoder(pc obj.Addr) { - self.xsave(_REG_enc...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_enc...) // LOAD $REG_all -} - -func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) { - switch vt.Kind() { - case reflect.Interface : self.call_marshaler_i(fn, it) - case reflect.Ptr, reflect.Map : self.call_marshaler_v(fn, it, vt, true) - // struct/array of 1 direct iface type can be direct - default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect()) - } -} - -func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ" , _AX, _BX) // MOVQ AX, BX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX - self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX - self.call_go(_F_assertI2I) // CALL_GO assertI2I - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ", _BX, _CX) // MOVQ BX, CX - self.Emit("MOVQ", _AX, _BX) // MOVQ AX, BX - self.prep_buffer_AX() - self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI - self.call_go(fn) // CALL $fn - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_null_{n}") // _null_{n}: - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) { - self.prep_buffer_AX() // MOVE {buf}, (SP) - self.Emit("MOVQ", jit.Itab(it, vt), _BX) // MOVQ $(itab(it, vt)), BX - - /* dereference the pointer if needed */ - if !deref { - self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX - } else { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _CX) // MOVQ 0(SP.p), CX - } - - /* call the encoder, and perform error checks */ - self.Emit("MOVQ", _ARG_fv, _DI) // MOVQ ARG.fv, DI - self.call_go(fn) // CALL $fn - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -/** Builtin: _more_space **/ - -var ( - _T_byte = jit.Type(byteType) - _F_growslice = jit.Func(growslice) -) - -// AX must saving n -func (self *_Assembler) more_space() { - self.Link(_LB_more_space) - self.Emit("MOVQ", _RP, _BX) // MOVQ DI, BX - self.Emit("MOVQ", _RL, _CX) // MOVQ SI, CX - self.Emit("MOVQ", _RC, _DI) // MOVQ DX, DI - self.Emit("MOVQ", _AX, _SI) // MOVQ AX, SI - self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, AX - self.call_more_space(_F_growslice) // CALL $pc - self.Emit("MOVQ", _AX, _RP) // MOVQ AX, DI - self.Emit("MOVQ", _BX, _RL) // MOVQ BX, SI - self.Emit("MOVQ", _CX, _RC) // MOVQ CX, DX - self.save_buffer() // SAVE {buf} - self.Rjmp("JMP" , _LR) // JMP LR -} - -/** Builtin Errors **/ - -var ( - _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep)))) - _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite)))) - _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType) -) - -func (self *_Assembler) error_too_deep() { - self.Link(_LB_error_too_deep) - self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_invalid_number() { - self.Link(_LB_error_invalid_number) - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ 0(SP), AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _BX) // MOVQ 8(SP), BX - self.call_go(_F_error_number) // CALL_GO error_number - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_nan_or_infinite() { - self.Link(_LB_error_nan_or_infinite) - self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -/** String Encoding Routine **/ - -var ( - _F_quote = jit.Imm(int64(native.S_quote)) - _F_panic = jit.Func(goPanic) -) - -func (self *_Assembler) go_panic() { - self.Link(_LB_panic) - self.Emit("MOVQ", _SP_p, _BX) - self.call_go(_F_panic) -} - -func (self *_Assembler) encode_string(doubleQuote bool) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n} - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) - self.Sjmp("JNE" , "_str_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX) - self.Sjmp("JMP", _LB_panic) - self.Link("_str_next_{n}") - - /* openning quote, check for double quote */ - if !doubleQuote { - self.check_size_r(_AX, 2) // SIZE $2 - self.add_char('"') // CHAR $'"' - } else { - self.check_size_r(_AX, 6) // SIZE $6 - self.add_long(_IM_open, 3) // TEXT $`"\"` - } - - /* quoting loop */ - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp - self.Link("_str_loop_{n}") // _str_loop_{n}: - self.save_c() // SAVE $REG_ffi - - /* load the output buffer first, and then input buffer, - * because the parameter registers collide with RP / RL / RC */ - self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX - self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX - self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn - self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX - self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX - self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI - self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI - self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI - - /* set the flags based on `doubleQuote` */ - if !doubleQuote { - self.Emit("XORL", _R8, _R8) // XORL R8, R8 - } else { - self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8 - } - - /* call the native quoter */ - self.call_c(_F_quote) // CALL quote - self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL - - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n} - - /* close the string, check for double quote */ - if !doubleQuote { - self.check_size(1) // SIZE $1 - self.add_char('"') // CHAR $'"' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } else { - self.check_size(3) // SIZE $3 - self.add_text("\\\"\"") // TEXT $'\""' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } - - /* not enough space to contain the quoted string */ - self.Link("_str_space_{n}") // _str_space_{n}: - self.Emit("NOTQ", _AX) // NOTQ AX - self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp - self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX - self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n} - - /* empty string, check for double quote */ - if !doubleQuote { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(2) // SIZE $2 - self.add_text("\"\"") // TEXT $'""' - self.Link("_str_end_{n}") // _str_end_{n}: - } else { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(6) // SIZE $6 - self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""' - self.Link("_str_end_{n}") // _str_end_{n}: - } -} - -/** OpCode Assembler Functions **/ - -var ( - _T_json_Marshaler = rt.UnpackType(jsonMarshalerType) - _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType) -) - -var ( - _F_f64toa = jit.Imm(int64(native.S_f64toa)) - _F_f32toa = jit.Imm(int64(native.S_f32toa)) - _F_i64toa = jit.Imm(int64(native.S_i64toa)) - _F_u64toa = jit.Imm(int64(native.S_u64toa)) - _F_b64encode = jit.Imm(int64(_subr__b64encode)) -) - -var ( - _F_memmove = jit.Func(memmove) - _F_error_number = jit.Func(error_number) - _F_isValidNumber = jit.Func(isValidNumber) -) - -var ( - _F_iteratorStop = jit.Func(iteratorStop) - _F_iteratorNext = jit.Func(iteratorNext) - _F_iteratorStart = jit.Func(iteratorStart) -) - -var ( - _F_encodeTypedPointer obj.Addr - _F_encodeJsonMarshaler obj.Addr - _F_encodeTextMarshaler obj.Addr -) - -const ( - _MODE_AVX2 = 1 << 2 -) - -func init() { - _F_encodeTypedPointer = jit.Func(encodeTypedPointer) - _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler) - _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler) -} - -func (self *_Assembler) _asm_OP_null(_ *_Instr) { - self.check_size(4) - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL -} - -func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_arr_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_arr_end_{n}") - self.Link("_empty_arr_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_arr_end_{n}") -} - -func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_obj_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_obj_end_{n}") - self.Link("_empty_obj_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_obj_end_{n}") -} - -func (self *_Assembler) _asm_OP_bool(_ *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Sjmp("JE" , "_false_{n}") // JE _false_{n} - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_false_{n}") // _false_{n}: - self.check_size(5) // SIZE $5 - self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1) - self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1) - self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_i8(_ *_Instr) { - self.store_int(4, _F_i64toa, "MOVBQSX") -} - -func (self *_Assembler) _asm_OP_i16(_ *_Instr) { - self.store_int(6, _F_i64toa, "MOVWQSX") -} - -func (self *_Assembler) _asm_OP_i32(_ *_Instr) { - self.store_int(17, _F_i64toa, "MOVLQSX") -} - -func (self *_Assembler) _asm_OP_i64(_ *_Instr) { - self.store_int(21, _F_i64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_u8(_ *_Instr) { - self.store_int(3, _F_u64toa, "MOVBQZX") -} - -func (self *_Assembler) _asm_OP_u16(_ *_Instr) { - self.store_int(5, _F_u64toa, "MOVWQZX") -} - -func (self *_Assembler) _asm_OP_u32(_ *_Instr) { - self.store_int(16, _F_u64toa, "MOVLQZX") -} - -func (self *_Assembler) _asm_OP_u64(_ *_Instr) { - self.store_int(20, _F_u64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_f32(_ *_Instr) { - self.check_size(32) - self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX - self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX - self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0 - self.call_c(_F_f32toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_f64(_ *_Instr) { - self.check_size(32) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX - self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX - self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0 - self.call_c(_F_f64toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_str(_ *_Instr) { - self.encode_string(false) -} - -func (self *_Assembler) _asm_OP_bin(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX - self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX - self.Emit("MOVQ", _DX, _BX) // MOVQ DX, BX - self.From("MULQ", _CX) // MULQ CX - self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX - self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX - self.Emit("MOVQ", _BX, _DX) // MOVQ BX, DX - self.check_size_r(_AX, 0) // SIZE AX - self.add_char('"') // CHAR $'"' - self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI - self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ SI, 8(DI) - self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI - - /* check for AVX2 support */ - if !cpu.HasAVX2 { - self.Emit("XORL", _DX, _DX) // XORL DX, DX - } else { - self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX - } - - /* call the encoder */ - self.call_b64(_F_b64encode) // CALL b64encode - self.load_buffer_AX() // LOAD {buf} - self.add_char('"') // CHAR $'"' -} - -func (self *_Assembler) _asm_OP_quote(_ *_Instr) { - self.encode_string(true) -} - -func (self *_Assembler) _asm_OP_number(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX - self.Emit("TESTQ", _BX, _BX) // TESTQ BX, BX - self.Sjmp("JZ" , "_empty_{n}") - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_number_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), _AX) - self.Sjmp("JMP", _LB_panic) - self.Link("_number_next_{n}") - self.call_go(_F_isValidNumber) // CALL_GO isValidNumber - self.Emit("CMPB" , _AX, jit.Imm(0)) // CMPB AX, $0 - self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _BX) // MOVQ (SP.p), BX - self.check_size_r(_BX, 0) // SIZE BX - self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX - self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _BX) // MOVOU (SP.p), BX - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _CX) // MOVOU X0, 8(SP) - self.call_go(_F_memmove) // CALL_GO memmove - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_empty_{n}") // _empty_{n} - self.check_size(1) // SIZE $1 - self.add_char('0') // CHAR $'0' - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) _asm_OP_eface(_ *_Instr) { - self.prep_buffer_AX() // MOVE {buf}, AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX - self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *_Assembler) _asm_OP_iface(_ *_Instr) { - self.prep_buffer_AX() // MOVE {buf}, AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX - self.Emit("MOVQ" , jit.Ptr(_CX, 8), _BX) // MOVQ 8(CX), BX - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _CX) // LEAQ 8(SP.p), CX - self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ fv, AX - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *_Assembler) _asm_OP_byte(p *_Instr) { - self.check_size(1) - self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) _asm_OP_text(p *_Instr) { - self.check_size(len(p.vs())) // SIZE ${len(p.vs())} - self.add_text(p.vs()) // TEXT ${p.vs()} -} - -func (self *_Assembler) _asm_OP_deref(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p -} - -func (self *_Assembler) _asm_OP_index(p *_Instr) { - self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX - self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p -} - -func (self *_Assembler) _asm_OP_load(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q -} - -func (self *_Assembler) _asm_OP_save(_ *_Instr) { - self.save_state() -} - -func (self *_Assembler) _asm_OP_drop(_ *_Instr) { - self.drop_state(_StateSize) -} - -func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) { - self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX) -} - -func (self *_Assembler) _asm_OP_recurse(p *_Instr) { - self.prep_buffer_AX() // MOVE {buf}, (SP) - vt, pv := p.vp() - self.Emit("MOVQ", jit.Type(vt), _BX) // MOVQ $(type(p.vt())), BX - - /* check for indirection */ - if !rt.UnpackType(vt).Indirect() { - self.Emit("MOVQ", _SP_p, _CX) // MOVQ SP.p, CX - } else { - self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, VAR.vp - self.Emit("LEAQ", _VAR_vp, _CX) // LEAQ VAR.vp, CX - } - - /* call the encoder */ - self.Emit("MOVQ" , _ST, _DI) // MOVQ ST, DI - self.Emit("MOVQ" , _ARG_fv, _SI) // MOVQ $fv, SI - if pv { - self.Emit("BTCQ", jit.Imm(bitPointerValue), _SI) // BTCQ $1, SI - } - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.load_buffer_AX() -} - -func (self *_Assembler) _asm_OP_is_nil(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) { - self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) { - self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_goto(p *_Instr) { - self.Xjmp("JMP", p.vi()) -} - -func (self *_Assembler) _asm_OP_map_iter(p *_Instr) { - self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _BX) // MOVQ (SP.p), BX - self.Emit("MOVQ" , _ARG_fv, _CX) // MOVQ fv, CX - self.call_go(_F_iteratorStart) // CALL_GO iteratorStart - self.Emit("MOVQ" , _AX, _SP_q) // MOVQ AX, SP.q - self.Emit("MOVQ" , _BX, _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , _CX, _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) { - self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX - self.call_go(_F_iteratorStop) // CALL_GO iteratorStop - self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q -} - -func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p - self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p - self.Xjmp("JZ" , p.vi()) // JNZ p.vi() -} - -func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) { - self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv - self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n} - self.encode_string(false) // STR $false - self.Xjmp("JMP", p.vi()) // JMP ${p.vi()} - self.Link("_unordered_key_{n}") // _unordered_key_{n}: -} - -func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p - self.Emit("MOVQ", _SP_q, _AX) // MOVQ SP.q, AX - self.call_go(_F_iteratorNext) // CALL_GO iteratorNext -} - -func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p - self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f -} - -func (self *_Assembler) _asm_OP_slice_next(p *_Instr) { - self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x - self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f - self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX - self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p -} - -func (self *_Assembler) _asm_OP_marshal(p *_Instr) { - self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) { - self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_text_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) { - self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f -} - -func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) { - self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f - self.Xjmp("JC" , p.vi()) -} - -func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) { - self.Emit("MOVQ", jit.Imm(int64(p2.op())), _CX) // MOVQ $(p2.op()), AX - self.Emit("MOVQ", jit.Imm(int64(p1.op())), _BX) // MOVQ $(p1.op()), BX - self.Emit("MOVQ", jit.Imm(int64(i)), _AX) // MOVQ $(i), CX - self.call_go(_F_println) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go b/vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go deleted file mode 100644 index 89dafc84..00000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/assembler_stkabi_amd64.go +++ /dev/null @@ -1,1175 +0,0 @@ -// +build go1.16,!go1.17 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - `fmt` - `reflect` - `strconv` - `unsafe` - - `github.com/bytedance/sonic/internal/cpu` - `github.com/bytedance/sonic/internal/jit` - `github.com/bytedance/sonic/internal/native/types` - `github.com/twitchyliquid64/golang-asm/obj` - `github.com/twitchyliquid64/golang-asm/obj/x86` - - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/rt` -) - -/** Register Allocations - * - * State Registers: - * - * %rbx : stack base - * %rdi : result pointer - * %rsi : result length - * %rdx : result capacity - * %r12 : sp->p - * %r13 : sp->q - * %r14 : sp->x - * %r15 : sp->f - * - * Error Registers: - * - * %r10 : error type register - * %r11 : error pointer register - */ - -/** Function Prototype & Stack Map - * - * func (buf *[]byte, p unsafe.Pointer, sb *_Stack, fv uint64) (err error) - * - * buf : (FP) - * p : 8(FP) - * sb : 16(FP) - * fv : 24(FP) - * err.vt : 32(FP) - * err.vp : 40(FP) - */ - -const ( - _S_cond = iota - _S_init -) - -const ( - _FP_args = 48 // 48 bytes for passing arguments to this function - _FP_fargs = 64 // 64 bytes for passing arguments to other Go functions - _FP_saves = 64 // 64 bytes for saving the registers before CALL instructions - _FP_locals = 24 // 24 bytes for local variables -) - -const ( - _FP_offs = _FP_fargs + _FP_saves + _FP_locals - _FP_size = _FP_offs + 8 // 8 bytes for the parent frame pointer - _FP_base = _FP_size + 8 // 8 bytes for the return address -) - -const ( - _FM_exp32 = 0x7f800000 - _FM_exp64 = 0x7ff0000000000000 -) - -const ( - _IM_null = 0x6c6c756e // 'null' - _IM_true = 0x65757274 // 'true' - _IM_fals = 0x736c6166 // 'fals' ('false' without the 'e') - _IM_open = 0x00225c22 // '"\"∅' - _IM_array = 0x5d5b // '[]' - _IM_object = 0x7d7b // '{}' - _IM_mulv = -0x5555555555555555 -) - -const ( - _LB_more_space = "_more_space" - _LB_more_space_return = "_more_space_return_" -) - -const ( - _LB_error = "_error" - _LB_error_too_deep = "_error_too_deep" - _LB_error_invalid_number = "_error_invalid_number" - _LB_error_nan_or_infinite = "_error_nan_or_infinite" - _LB_panic = "_panic" -) - -var ( - _AX = jit.Reg("AX") - _CX = jit.Reg("CX") - _DX = jit.Reg("DX") - _DI = jit.Reg("DI") - _SI = jit.Reg("SI") - _BP = jit.Reg("BP") - _SP = jit.Reg("SP") - _R8 = jit.Reg("R8") -) - -var ( - _X0 = jit.Reg("X0") - _Y0 = jit.Reg("Y0") -) - -var ( - _ST = jit.Reg("BX") - _RP = jit.Reg("DI") - _RL = jit.Reg("SI") - _RC = jit.Reg("DX") -) - -var ( - _LR = jit.Reg("R9") - _R10 = jit.Reg("R10") // used for gcWriterBarrier - _ET = jit.Reg("R10") - _EP = jit.Reg("R11") -) - -var ( - _SP_p = jit.Reg("R12") - _SP_q = jit.Reg("R13") - _SP_x = jit.Reg("R14") - _SP_f = jit.Reg("R15") -) - -var ( - _ARG_rb = jit.Ptr(_SP, _FP_base) - _ARG_vp = jit.Ptr(_SP, _FP_base + 8) - _ARG_sb = jit.Ptr(_SP, _FP_base + 16) - _ARG_fv = jit.Ptr(_SP, _FP_base + 24) -) - -var ( - _RET_et = jit.Ptr(_SP, _FP_base + 32) - _RET_ep = jit.Ptr(_SP, _FP_base + 40) -) - -var ( - _VAR_sp = jit.Ptr(_SP, _FP_fargs + _FP_saves) - _VAR_dn = jit.Ptr(_SP, _FP_fargs + _FP_saves + 8) - _VAR_vp = jit.Ptr(_SP, _FP_fargs + _FP_saves + 16) -) - -var ( - _REG_ffi = []obj.Addr{_RP, _RL, _RC} - _REG_enc = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RL} - _REG_jsr = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _LR} - _REG_all = []obj.Addr{_ST, _SP_x, _SP_f, _SP_p, _SP_q, _RP, _RL, _RC} -) - -type _Assembler struct { - jit.BaseAssembler - p _Program - x int - name string -} - -func newAssembler(p _Program) *_Assembler { - return new(_Assembler).Init(p) -} - -/** Assembler Interface **/ -func (self *_Assembler) Load() _Encoder { - return ptoenc(self.BaseAssembler.Load("encode_"+self.name, _FP_size, _FP_args, argPtrs, localPtrs)) -} - -func (self *_Assembler) Init(p _Program) *_Assembler { - self.p = p - self.BaseAssembler.Init(self.compile) - return self -} - -func (self *_Assembler) compile() { - self.prologue() - self.instrs() - self.epilogue() - self.builtins() -} - -/** Assembler Stages **/ - -var _OpFuncTab = [256]func(*_Assembler, *_Instr) { - _OP_null : (*_Assembler)._asm_OP_null, - _OP_empty_arr : (*_Assembler)._asm_OP_empty_arr, - _OP_empty_obj : (*_Assembler)._asm_OP_empty_obj, - _OP_bool : (*_Assembler)._asm_OP_bool, - _OP_i8 : (*_Assembler)._asm_OP_i8, - _OP_i16 : (*_Assembler)._asm_OP_i16, - _OP_i32 : (*_Assembler)._asm_OP_i32, - _OP_i64 : (*_Assembler)._asm_OP_i64, - _OP_u8 : (*_Assembler)._asm_OP_u8, - _OP_u16 : (*_Assembler)._asm_OP_u16, - _OP_u32 : (*_Assembler)._asm_OP_u32, - _OP_u64 : (*_Assembler)._asm_OP_u64, - _OP_f32 : (*_Assembler)._asm_OP_f32, - _OP_f64 : (*_Assembler)._asm_OP_f64, - _OP_str : (*_Assembler)._asm_OP_str, - _OP_bin : (*_Assembler)._asm_OP_bin, - _OP_quote : (*_Assembler)._asm_OP_quote, - _OP_number : (*_Assembler)._asm_OP_number, - _OP_eface : (*_Assembler)._asm_OP_eface, - _OP_iface : (*_Assembler)._asm_OP_iface, - _OP_byte : (*_Assembler)._asm_OP_byte, - _OP_text : (*_Assembler)._asm_OP_text, - _OP_deref : (*_Assembler)._asm_OP_deref, - _OP_index : (*_Assembler)._asm_OP_index, - _OP_load : (*_Assembler)._asm_OP_load, - _OP_save : (*_Assembler)._asm_OP_save, - _OP_drop : (*_Assembler)._asm_OP_drop, - _OP_drop_2 : (*_Assembler)._asm_OP_drop_2, - _OP_recurse : (*_Assembler)._asm_OP_recurse, - _OP_is_nil : (*_Assembler)._asm_OP_is_nil, - _OP_is_nil_p1 : (*_Assembler)._asm_OP_is_nil_p1, - _OP_is_zero_1 : (*_Assembler)._asm_OP_is_zero_1, - _OP_is_zero_2 : (*_Assembler)._asm_OP_is_zero_2, - _OP_is_zero_4 : (*_Assembler)._asm_OP_is_zero_4, - _OP_is_zero_8 : (*_Assembler)._asm_OP_is_zero_8, - _OP_is_zero_map : (*_Assembler)._asm_OP_is_zero_map, - _OP_goto : (*_Assembler)._asm_OP_goto, - _OP_map_iter : (*_Assembler)._asm_OP_map_iter, - _OP_map_stop : (*_Assembler)._asm_OP_map_stop, - _OP_map_check_key : (*_Assembler)._asm_OP_map_check_key, - _OP_map_write_key : (*_Assembler)._asm_OP_map_write_key, - _OP_map_value_next : (*_Assembler)._asm_OP_map_value_next, - _OP_slice_len : (*_Assembler)._asm_OP_slice_len, - _OP_slice_next : (*_Assembler)._asm_OP_slice_next, - _OP_marshal : (*_Assembler)._asm_OP_marshal, - _OP_marshal_p : (*_Assembler)._asm_OP_marshal_p, - _OP_marshal_text : (*_Assembler)._asm_OP_marshal_text, - _OP_marshal_text_p : (*_Assembler)._asm_OP_marshal_text_p, - _OP_cond_set : (*_Assembler)._asm_OP_cond_set, - _OP_cond_testc : (*_Assembler)._asm_OP_cond_testc, -} - -func (self *_Assembler) instr(v *_Instr) { - if fn := _OpFuncTab[v.op()]; fn != nil { - fn(self, v) - } else { - panic(fmt.Sprintf("invalid opcode: %d", v.op())) - } -} - -func (self *_Assembler) instrs() { - for i, v := range self.p { - self.Mark(i) - self.instr(&v) - self.debug_instr(i, &v) - } -} - -func (self *_Assembler) builtins() { - self.more_space() - self.error_too_deep() - self.error_invalid_number() - self.error_nan_or_infinite() - self.go_panic() -} - -func (self *_Assembler) epilogue() { - self.Mark(len(self.p)) - self.Emit("XORL", _ET, _ET) - self.Emit("XORL", _EP, _EP) - self.Link(_LB_error) - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) - self.Emit("MOVQ", _ET, _RET_et) // MOVQ ET, et<>+24(FP) - self.Emit("MOVQ", _EP, _RET_ep) // MOVQ EP, ep<>+32(FP) - self.Emit("MOVQ", jit.Ptr(_SP, _FP_offs), _BP) // MOVQ _FP_offs(SP), BP - self.Emit("ADDQ", jit.Imm(_FP_size), _SP) // ADDQ $_FP_size, SP - self.Emit("RET") // RET -} - -func (self *_Assembler) prologue() { - self.Emit("SUBQ", jit.Imm(_FP_size), _SP) // SUBQ $_FP_size, SP - self.Emit("MOVQ", _BP, jit.Ptr(_SP, _FP_offs)) // MOVQ BP, _FP_offs(SP) - self.Emit("LEAQ", jit.Ptr(_SP, _FP_offs), _BP) // LEAQ _FP_offs(SP), BP - self.load_buffer() // LOAD {buf} - self.Emit("MOVQ", _ARG_vp, _SP_p) // MOVQ vp<>+8(FP), SP.p - self.Emit("MOVQ", _ARG_sb, _ST) // MOVQ sb<>+16(FP), ST - self.Emit("XORL", _SP_x, _SP_x) // XORL SP.x, SP.x - self.Emit("XORL", _SP_f, _SP_f) // XORL SP.f, SP.f - self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q -} - -/** Assembler Inline Functions **/ - -func (self *_Assembler) xsave(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + int64(i) * 8)) - } - } -} - -func (self *_Assembler) xload(reg ...obj.Addr) { - for i, v := range reg { - if i > _FP_saves / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + int64(i) * 8), v) - } - } -} - -func (self *_Assembler) rbuf_di() { - if _RP.Reg != x86.REG_DI { - panic("register allocation messed up: RP != DI") - } else { - self.Emit("ADDQ", _RL, _RP) - } -} - -func (self *_Assembler) store_int(nd int, fn obj.Addr, ins string) { - self.check_size(nd) - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit(ins, jit.Ptr(_SP_p, 0), _SI) // $ins (SP.p), SI - self.call_c(fn) // CALL_C $fn - self.Emit("ADDQ", _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) store_str(s string) { - i := 0 - m := rt.Str2Mem(s) - - /* 8-byte stores */ - for i <= len(m) - 8 { - self.Emit("MOVQ", jit.Imm(rt.Get64(m[i:])), _AX) // MOVQ $s[i:], AX - self.Emit("MOVQ", _AX, jit.Sib(_RP, _RL, 1, int64(i))) // MOVQ AX, i(RP)(RL) - i += 8 - } - - /* 4-byte stores */ - if i <= len(m) - 4 { - self.Emit("MOVL", jit.Imm(int64(rt.Get32(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVL $s[i:], i(RP)(RL) - i += 4 - } - - /* 2-byte stores */ - if i <= len(m) - 2 { - self.Emit("MOVW", jit.Imm(int64(rt.Get16(m[i:]))), jit.Sib(_RP, _RL, 1, int64(i))) // MOVW $s[i:], i(RP)(RL) - i += 2 - } - - /* last byte */ - if i < len(m) { - self.Emit("MOVB", jit.Imm(int64(m[i])), jit.Sib(_RP, _RL, 1, int64(i))) // MOVB $s[i:], i(RP)(RL) - } -} - -func (self *_Assembler) check_size(n int) { - self.check_size_rl(jit.Ptr(_RL, int64(n))) -} - -func (self *_Assembler) check_size_r(r obj.Addr, d int) { - self.check_size_rl(jit.Sib(_RL, r, 1, int64(d))) -} - -func (self *_Assembler) check_size_rl(v obj.Addr) { - idx := self.x - key := _LB_more_space_return + strconv.Itoa(idx) - - /* the following code relies on LR == R9 to work */ - if _LR.Reg != x86.REG_R9 { - panic("register allocation messed up: LR != R9") - } - - /* check for buffer capacity */ - self.x++ - self.Emit("LEAQ", v, _AX) // LEAQ $v, AX - self.Emit("CMPQ", _AX, _RC) // CMPQ AX, RC - self.Sjmp("JBE" , key) // JBE _more_space_return_{n} - self.slice_grow_ax(key) // GROW $key - self.Link(key) // _more_space_return_{n}: -} - -func (self *_Assembler) slice_grow_ax(ret string) { - self.Byte(0x4c, 0x8d, 0x0d) // LEAQ ?(PC), R9 - self.Sref(ret, 4) // .... &ret - self.Sjmp("JMP" , _LB_more_space) // JMP _more_space -} - -/** State Stack Helpers **/ - -const ( - _StateSize = int64(unsafe.Sizeof(_State{})) - _StackLimit = _MaxStack * _StateSize -) - -func (self *_Assembler) save_state() { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _CX) // MOVQ (ST), CX - self.Emit("LEAQ", jit.Ptr(_CX, _StateSize), _R8) // LEAQ _StateSize(CX), R8 - self.Emit("CMPQ", _R8, jit.Imm(_StackLimit)) // CMPQ R8, $_StackLimit - self.Sjmp("JAE" , _LB_error_too_deep) // JA _error_too_deep - self.Emit("MOVQ", _SP_x, jit.Sib(_ST, _CX, 1, 8)) // MOVQ SP.x, 8(ST)(CX) - self.Emit("MOVQ", _SP_f, jit.Sib(_ST, _CX, 1, 16)) // MOVQ SP.f, 16(ST)(CX) - self.WritePtr(0, _SP_p, jit.Sib(_ST, _CX, 1, 24)) // MOVQ SP.p, 24(ST)(CX) - self.WritePtr(1, _SP_q, jit.Sib(_ST, _CX, 1, 32)) // MOVQ SP.q, 32(ST)(CX) - self.Emit("MOVQ", _R8, jit.Ptr(_ST, 0)) // MOVQ R8, (ST) -} - -func (self *_Assembler) drop_state(decr int64) { - self.Emit("MOVQ" , jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("SUBQ" , jit.Imm(decr), _AX) // SUBQ $decr, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_ST, 0)) // MOVQ AX, (ST) - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 8), _SP_x) // MOVQ 8(ST)(AX), SP.x - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 16), _SP_f) // MOVQ 16(ST)(AX), SP.f - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 24), _SP_p) // MOVQ 24(ST)(AX), SP.p - self.Emit("MOVQ" , jit.Sib(_ST, _AX, 1, 32), _SP_q) // MOVQ 32(ST)(AX), SP.q - self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0 - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 8)) // MOVOU X0, 8(ST)(AX) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 24)) // MOVOU X0, 24(ST)(AX) -} - -/** Buffer Helpers **/ - -func (self *_Assembler) add_char(ch byte) { - self.Emit("MOVB", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVB $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) add_long(ch uint32, n int64) { - self.Emit("MOVL", jit.Imm(int64(ch)), jit.Sib(_RP, _RL, 1, 0)) // MOVL $ch, (RP)(RL) - self.Emit("ADDQ", jit.Imm(n), _RL) // ADDQ $n, RL -} - -func (self *_Assembler) add_text(ss string) { - self.store_str(ss) // TEXT $ss - self.Emit("ADDQ", jit.Imm(int64(len(ss))), _RL) // ADDQ ${len(ss)}, RL -} - -func (self *_Assembler) prep_buffer() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", _RL, jit.Ptr(_AX, 8)) // MOVQ RL, 8(AX) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) -} - -func (self *_Assembler) prep_buffer_c() { - self.Emit("MOVQ", _ARG_rb, _DI) // MOVQ rb<>+0(FP), DI - self.Emit("MOVQ", _RL, jit.Ptr(_DI, 8)) // MOVQ RL, 8(DI) -} - -func (self *_Assembler) save_buffer() { - self.Emit("MOVQ", _ARG_rb, _CX) // MOVQ rb<>+0(FP), CX - self.Emit("MOVQ", _RP, jit.Ptr(_CX, 0)) // MOVQ RP, (CX) - self.Emit("MOVQ", _RL, jit.Ptr(_CX, 8)) // MOVQ RL, 8(CX) - self.Emit("MOVQ", _RC, jit.Ptr(_CX, 16)) // MOVQ RC, 16(CX) -} - -func (self *_Assembler) load_buffer() { - self.Emit("MOVQ", _ARG_rb, _AX) // MOVQ rb<>+0(FP), AX - self.Emit("MOVQ", jit.Ptr(_AX, 0), _RP) // MOVQ (AX), RP - self.Emit("MOVQ", jit.Ptr(_AX, 8), _RL) // MOVQ 8(AX), RL - self.Emit("MOVQ", jit.Ptr(_AX, 16), _RC) // MOVQ 16(AX), RC -} - -/** Function Interface Helpers **/ - -func (self *_Assembler) call(pc obj.Addr) { - self.Emit("MOVQ", pc, _AX) // MOVQ $pc, AX - self.Rjmp("CALL", _AX) // CALL AX -} - -func (self *_Assembler) save_c() { - self.xsave(_REG_ffi...) // SAVE $REG_ffi -} - -func (self *_Assembler) call_c(pc obj.Addr) { - self.call(pc) // CALL $pc - self.xload(_REG_ffi...) // LOAD $REG_ffi -} - -func (self *_Assembler) call_go(pc obj.Addr) { - self.xsave(_REG_all...) // SAVE $REG_all - self.call(pc) // CALL $pc - self.xload(_REG_all...) // LOAD $REG_all -} - -func (self *_Assembler) call_encoder(pc obj.Addr) { - self.xsave(_REG_enc...) // SAVE $REG_enc - self.call(pc) // CALL $pc - self.xload(_REG_enc...) // LOAD $REG_enc - self.load_buffer() // LOAD {buf} -} - -func (self *_Assembler) call_marshaler(fn obj.Addr, it *rt.GoType, vt reflect.Type) { - switch vt.Kind() { - case reflect.Interface : self.call_marshaler_i(fn, it) - case reflect.Ptr, reflect.Map: self.call_marshaler_v(fn, it, vt, true) - // struct/array of 1 direct iface type can be direct - default : self.call_marshaler_v(fn, it, vt, !rt.UnpackType(vt).Indirect()) - } -} - -func (self *_Assembler) call_marshaler_i(fn obj.Addr, it *rt.GoType) { - self.Emit("MOVQ" , jit.Gtype(it), _AX) // MOVQ $it, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ 8(SP.p), CX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_null_{n}") // JZ _null_{n} - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 16)) // MOVQ CX, 16(SP) - self.call_go(_F_assertI2I) // CALL_GO assertI2I - self.prep_buffer() // MOVE {buf}, (SP) - self.Emit("MOVOU", jit.Ptr(_SP, 24), _X0) // MOVOU 24(SP), X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP) - self.call_encoder(fn) // CALL $fn - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_null_{n}") // _null_{n}: - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) call_marshaler_v(fn obj.Addr, it *rt.GoType, vt reflect.Type, deref bool) { - self.prep_buffer() // MOVE {buf}, (SP) - self.Emit("MOVQ", jit.Itab(it, vt), _AX) // MOVQ $(itab(it, vt)), AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - - /* dereference the pointer if needed */ - if !deref { - self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 16)) // MOVQ SP.p, 16(SP) - } else { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - } - - /* call the encoder, and perform error checks */ - self.Emit("MOVQ", _ARG_fv, _CX) // MOVQ ARG.fv, CX - self.Emit("MOVQ", _CX, jit.Ptr(_SP, 24)) // MOVQ CX, 24(SP) - self.call_encoder(fn) // CALL $fn - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -/** Builtin: _more_space **/ - -var ( - _T_byte = jit.Type(byteType) - _F_growslice = jit.Func(growslice) -) - -func (self *_Assembler) more_space() { - self.Link(_LB_more_space) - self.Emit("MOVQ", _T_byte, _AX) // MOVQ $_T_byte, _AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ _AX, (SP) - self.Emit("MOVQ", _RP, jit.Ptr(_SP, 8)) // MOVQ RP, 8(SP) - self.Emit("MOVQ", _RL, jit.Ptr(_SP, 16)) // MOVQ RL, 16(SP) - self.Emit("MOVQ", _RC, jit.Ptr(_SP, 24)) // MOVQ RC, 24(SP) - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.xsave(_REG_jsr...) // SAVE $REG_jsr - self.call(_F_growslice) // CALL $pc - self.xload(_REG_jsr...) // LOAD $REG_jsr - self.Emit("MOVQ", jit.Ptr(_SP, 40), _RP) // MOVQ 40(SP), RP - self.Emit("MOVQ", jit.Ptr(_SP, 48), _RL) // MOVQ 48(SP), RL - self.Emit("MOVQ", jit.Ptr(_SP, 56), _RC) // MOVQ 56(SP), RC - self.save_buffer() // SAVE {buf} - self.Rjmp("JMP" , _LR) // JMP LR -} - -/** Builtin Errors **/ - -var ( - _V_ERR_too_deep = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_too_deep)))) - _V_ERR_nan_or_infinite = jit.Imm(int64(uintptr(unsafe.Pointer(_ERR_nan_or_infinite)))) - _I_json_UnsupportedValueError = jit.Itab(rt.UnpackType(errorType), jsonUnsupportedValueType) -) - -func (self *_Assembler) error_too_deep() { - self.Link(_LB_error_too_deep) - self.Emit("MOVQ", _V_ERR_too_deep, _EP) // MOVQ $_V_ERR_too_deep, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_invalid_number() { - self.Link(_LB_error_invalid_number) - self.call_go(_F_error_number) // CALL_GO error_number - self.Emit("MOVQ", jit.Ptr(_SP, 16), _ET) // MOVQ 16(SP), ET - self.Emit("MOVQ", jit.Ptr(_SP, 24), _EP) // MOVQ 24(SP), EP - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -func (self *_Assembler) error_nan_or_infinite() { - self.Link(_LB_error_nan_or_infinite) - self.Emit("MOVQ", _V_ERR_nan_or_infinite, _EP) // MOVQ $_V_ERR_nan_or_infinite, EP - self.Emit("MOVQ", _I_json_UnsupportedValueError, _ET) // MOVQ $_I_json_UnsupportedValuError, ET - self.Sjmp("JMP" , _LB_error) // JMP _error -} - -/** String Encoding Routine **/ - -var ( - _F_quote = jit.Imm(int64(native.S_quote)) - _F_panic = jit.Func(goPanic) -) - -func (self *_Assembler) go_panic() { - self.Link(_LB_panic) - self.Emit("MOVQ", _SP_p, jit.Ptr(_SP, 8)) - self.call_go(_F_panic) -} - -func (self *_Assembler) encode_string(doubleQuote bool) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JZ" , "_str_empty_{n}") // JZ _str_empty_{n} - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) - self.Sjmp("JNE" , "_str_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0)) - self.Sjmp("JMP", _LB_panic) - self.Link("_str_next_{n}") - - /* openning quote, check for double quote */ - if !doubleQuote { - self.check_size_r(_AX, 2) // SIZE $2 - self.add_char('"') // CHAR $'"' - } else { - self.check_size_r(_AX, 6) // SIZE $6 - self.add_long(_IM_open, 3) // TEXT $`"\"` - } - - /* quoting loop */ - self.Emit("XORL", _AX, _AX) // XORL AX, AX - self.Emit("MOVQ", _AX, _VAR_sp) // MOVQ AX, sp - self.Link("_str_loop_{n}") // _str_loop_{n}: - self.save_c() // SAVE $REG_ffi - - /* load the output buffer first, and then input buffer, - * because the parameter registers collide with RP / RL / RC */ - self.Emit("MOVQ", _RC, _CX) // MOVQ RC, CX - self.Emit("SUBQ", _RL, _CX) // SUBQ RL, CX - self.Emit("MOVQ", _CX, _VAR_dn) // MOVQ CX, dn - self.Emit("LEAQ", jit.Sib(_RP, _RL, 1, 0), _DX) // LEAQ (RP)(RL), DX - self.Emit("LEAQ", _VAR_dn, _CX) // LEAQ dn, CX - self.Emit("MOVQ", _VAR_sp, _AX) // MOVQ sp, AX - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _DI) // MOVQ (SP.p), DI - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _SI) // MOVQ 8(SP.p), SI - self.Emit("ADDQ", _AX, _DI) // ADDQ AX, DI - self.Emit("SUBQ", _AX, _SI) // SUBQ AX, SI - - /* set the flags based on `doubleQuote` */ - if !doubleQuote { - self.Emit("XORL", _R8, _R8) // XORL R8, R8 - } else { - self.Emit("MOVL", jit.Imm(types.F_DOUBLE_UNQUOTE), _R8) // MOVL ${types.F_DOUBLE_UNQUOTE}, R8 - } - - /* call the native quoter */ - self.call_c(_F_quote) // CALL quote - self.Emit("ADDQ" , _VAR_dn, _RL) // ADDQ dn, RL - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JS" , "_str_space_{n}") // JS _str_space_{n} - - /* close the string, check for double quote */ - if !doubleQuote { - self.check_size(1) // SIZE $1 - self.add_char('"') // CHAR $'"' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } else { - self.check_size(3) // SIZE $3 - self.add_text("\\\"\"") // TEXT $'\""' - self.Sjmp("JMP", "_str_end_{n}") // JMP _str_end_{n} - } - - /* not enough space to contain the quoted string */ - self.Link("_str_space_{n}") // _str_space_{n}: - self.Emit("NOTQ", _AX) // NOTQ AX - self.Emit("ADDQ", _AX, _VAR_sp) // ADDQ AX, sp - self.Emit("LEAQ", jit.Sib(_RC, _RC, 1, 0), _AX) // LEAQ (RC)(RC), AX - self.slice_grow_ax("_str_loop_{n}") // GROW _str_loop_{n} - - /* empty string, check for double quote */ - if !doubleQuote { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(2) // SIZE $2 - self.add_text("\"\"") // TEXT $'""' - self.Link("_str_end_{n}") // _str_end_{n}: - } else { - self.Link("_str_empty_{n}") // _str_empty_{n}: - self.check_size(6) // SIZE $6 - self.add_text("\"\\\"\\\"\"") // TEXT $'"\"\""' - self.Link("_str_end_{n}") // _str_end_{n}: - } -} - -/** OpCode Assembler Functions **/ - -var ( - _T_json_Marshaler = rt.UnpackType(jsonMarshalerType) - _T_encoding_TextMarshaler = rt.UnpackType(encodingTextMarshalerType) -) - -var ( - _F_f64toa = jit.Imm(int64(native.S_f64toa)) - _F_f32toa = jit.Imm(int64(native.S_f32toa)) - _F_i64toa = jit.Imm(int64(native.S_i64toa)) - _F_u64toa = jit.Imm(int64(native.S_u64toa)) - _F_b64encode = jit.Imm(int64(_subr__b64encode)) -) - -var ( - _F_memmove = jit.Func(memmove) - _F_error_number = jit.Func(error_number) - _F_isValidNumber = jit.Func(isValidNumber) -) - -var ( - _F_iteratorStop = jit.Func(iteratorStop) - _F_iteratorNext = jit.Func(iteratorNext) - _F_iteratorStart = jit.Func(iteratorStart) -) - -var ( - _F_encodeTypedPointer obj.Addr - _F_encodeJsonMarshaler obj.Addr - _F_encodeTextMarshaler obj.Addr -) - -const ( - _MODE_AVX2 = 1 << 2 -) - -func init() { - _F_encodeTypedPointer = jit.Func(encodeTypedPointer) - _F_encodeJsonMarshaler = jit.Func(encodeJsonMarshaler) - _F_encodeTextMarshaler = jit.Func(encodeTextMarshaler) -} - -func (self *_Assembler) _asm_OP_null(_ *_Instr) { - self.check_size(4) - self.Emit("MOVL", jit.Imm(_IM_null), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'null', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL -} - -func (self *_Assembler) _asm_OP_empty_arr(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_arr_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_arr_end_{n}") - self.Link("_empty_arr_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_array), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_arr_end_{n}") -} - -func (self *_Assembler) _asm_OP_empty_obj(_ *_Instr) { - self.Emit("BTQ", jit.Imm(int64(bitNoNullSliceOrMap)), _ARG_fv) - self.Sjmp("JC", "_empty_obj_{n}") - self._asm_OP_null(nil) - self.Sjmp("JMP", "_empty_obj_end_{n}") - self.Link("_empty_obj_{n}") - self.check_size(2) - self.Emit("MOVW", jit.Imm(_IM_object), jit.Sib(_RP, _RL, 1, 0)) - self.Emit("ADDQ", jit.Imm(2), _RL) - self.Link("_empty_obj_end_{n}") -} - -func (self *_Assembler) _asm_OP_bool(_ *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Sjmp("JE" , "_false_{n}") // JE _false_{n} - self.check_size(4) // SIZE $4 - self.Emit("MOVL", jit.Imm(_IM_true), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'true', (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(4), _RL) // ADDQ $4, RL - self.Sjmp("JMP" , "_end_{n}") // JMP _end_{n} - self.Link("_false_{n}") // _false_{n}: - self.check_size(5) // SIZE $5 - self.Emit("MOVL", jit.Imm(_IM_fals), jit.Sib(_RP, _RL, 1, 0)) // MOVL $'fals', (RP)(RL*1) - self.Emit("MOVB", jit.Imm('e'), jit.Sib(_RP, _RL, 1, 4)) // MOVB $'e', 4(RP)(RL*1) - self.Emit("ADDQ", jit.Imm(5), _RL) // ADDQ $5, RL - self.Link("_end_{n}") // _end_{n}: -} - -func (self *_Assembler) _asm_OP_i8(_ *_Instr) { - self.store_int(4, _F_i64toa, "MOVBQSX") -} - -func (self *_Assembler) _asm_OP_i16(_ *_Instr) { - self.store_int(6, _F_i64toa, "MOVWQSX") -} - -func (self *_Assembler) _asm_OP_i32(_ *_Instr) { - self.store_int(17, _F_i64toa, "MOVLQSX") -} - -func (self *_Assembler) _asm_OP_i64(_ *_Instr) { - self.store_int(21, _F_i64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_u8(_ *_Instr) { - self.store_int(3, _F_u64toa, "MOVBQZX") -} - -func (self *_Assembler) _asm_OP_u16(_ *_Instr) { - self.store_int(5, _F_u64toa, "MOVWQZX") -} - -func (self *_Assembler) _asm_OP_u32(_ *_Instr) { - self.store_int(16, _F_u64toa, "MOVLQZX") -} - -func (self *_Assembler) _asm_OP_u64(_ *_Instr) { - self.store_int(20, _F_u64toa, "MOVQ") -} - -func (self *_Assembler) _asm_OP_f32(_ *_Instr) { - self.check_size(32) - self.Emit("MOVL" , jit.Ptr(_SP_p, 0), _AX) // MOVL (SP.p), AX - self.Emit("ANDL" , jit.Imm(_FM_exp32), _AX) // ANDL $_FM_exp32, AX - self.Emit("XORL" , jit.Imm(_FM_exp32), _AX) // XORL $_FM_exp32, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSS" , jit.Ptr(_SP_p, 0), _X0) // MOVSS (SP.p), X0 - self.call_c(_F_f32toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_f64(_ *_Instr) { - self.check_size(32) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Imm(_FM_exp64), _CX) // MOVQ $_FM_exp64, CX - self.Emit("ANDQ" , _CX, _AX) // ANDQ CX, AX - self.Emit("XORQ" , _CX, _AX) // XORQ CX, AX - self.Sjmp("JZ" , _LB_error_nan_or_infinite) // JZ _error_nan_or_infinite - self.save_c() // SAVE $C_regs - self.rbuf_di() // MOVQ RP, DI - self.Emit("MOVSD" , jit.Ptr(_SP_p, 0), _X0) // MOVSD (SP.p), X0 - self.call_c(_F_f64toa) // CALL_C f64toa - self.Emit("ADDQ" , _AX, _RL) // ADDQ AX, RL -} - -func (self *_Assembler) _asm_OP_str(_ *_Instr) { - self.encode_string(false) -} - -func (self *_Assembler) _asm_OP_bin(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.Emit("ADDQ", jit.Imm(2), _AX) // ADDQ $2, AX - self.Emit("MOVQ", jit.Imm(_IM_mulv), _CX) // MOVQ $_MF_mulv, CX - self.Emit("MOVQ", _DX, _R8) // MOVQ DX, R8 - self.From("MULQ", _CX) // MULQ CX - self.Emit("LEAQ", jit.Sib(_DX, _DX, 1, 1), _AX) // LEAQ 1(DX)(DX), AX - self.Emit("ORQ" , jit.Imm(2), _AX) // ORQ $2, AX - self.Emit("MOVQ", _R8, _DX) // MOVQ R8, DX - self.check_size_r(_AX, 0) // SIZE AX - self.add_char('"') // CHAR $'"' - self.save_c() // SAVE $REG_ffi - self.prep_buffer_c() // MOVE {buf}, DI - self.Emit("MOVQ", _SP_p, _SI) // MOVQ SP.p, SI - - /* check for AVX2 support */ - if !cpu.HasAVX2 { - self.Emit("XORL", _DX, _DX) // XORL DX, DX - } else { - self.Emit("MOVL", jit.Imm(_MODE_AVX2), _DX) // MOVL $_MODE_AVX2, DX - } - - /* call the encoder */ - self.call_c(_F_b64encode) // CALL b64encode - self.load_buffer() // LOAD {buf} - self.add_char('"') // CHAR $'"' -} - -func (self *_Assembler) _asm_OP_quote(_ *_Instr) { - self.encode_string(true) -} - -func (self *_Assembler) _asm_OP_number(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _CX) // MOVQ (SP.p), CX - self.Emit("TESTQ", _CX, _CX) // TESTQ CX, CX - self.Sjmp("JZ" , "_empty_{n}") // JZ _empty_{n} - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Sjmp("JNZ" , "_number_next_{n}") - self.Emit("MOVQ", jit.Imm(int64(panicNilPointerOfNonEmptyString)), jit.Ptr(_SP, 0)) - self.Sjmp("JMP", _LB_panic) - self.Link("_number_next_{n}") - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.call_go(_F_isValidNumber) // CALL_GO isValidNumber - self.Emit("CMPB" , jit.Ptr(_SP, 16), jit.Imm(0)) // CMPB 16(SP), $0 - self.Sjmp("JE" , _LB_error_invalid_number) // JE _error_invalid_number - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _AX) // MOVQ 8(SP.p), AX - self.check_size_r(_AX, 0) // SIZE AX - self.Emit("LEAQ" , jit.Sib(_RP, _RL, 1, 0), _AX) // LEAQ (RP)(RL), AX - self.Emit("ADDQ" , jit.Ptr(_SP_p, 8), _RL) // ADDQ 8(SP.p), RL - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVOU", jit.Ptr(_SP_p, 0), _X0) // MOVOU (SP.p), X0 - self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP) - self.call_go(_F_memmove) // CALL_GO memmove - self.Sjmp("JMP" , "_done_{n}") // JMP _done_{n} - self.Link("_empty_{n}") // _empty_{n}: - self.check_size(1) // SIZE $1 - self.add_char('0') // CHAR $'0' - self.Link("_done_{n}") // _done_{n}: -} - -func (self *_Assembler) _asm_OP_eface(_ *_Instr) { - self.prep_buffer() // MOVE {buf}, (SP)s - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_iface(_ *_Instr) { - self.prep_buffer() // MOVE {buf}, (SP) - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("MOVQ" , jit.Ptr(_AX, 8), _AX) // MOVQ 8(AX), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - self.Emit("LEAQ" , jit.Ptr(_SP_p, 8), _AX) // LEAQ 8(SP.p), AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_byte(p *_Instr) { - self.check_size(1) - self.Emit("MOVB", jit.Imm(p.i64()), jit.Sib(_RP, _RL, 1, 0)) // MOVL p.vi(), (RP)(RL*1) - self.Emit("ADDQ", jit.Imm(1), _RL) // ADDQ $1, RL -} - -func (self *_Assembler) _asm_OP_text(p *_Instr) { - self.check_size(len(p.vs())) // SIZE ${len(p.vs())} - self.add_text(p.vs()) // TEXT ${p.vs()} -} - -func (self *_Assembler) _asm_OP_deref(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p -} - -func (self *_Assembler) _asm_OP_index(p *_Instr) { - self.Emit("MOVQ", jit.Imm(p.i64()), _AX) // MOVQ $p.vi(), AX - self.Emit("ADDQ", _AX, _SP_p) // ADDQ AX, SP.p -} - -func (self *_Assembler) _asm_OP_load(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_ST, 0), _AX) // MOVQ (ST), AX - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -24), _SP_x) // MOVQ -24(ST)(AX), SP.x - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, -8), _SP_p) // MOVQ -8(ST)(AX), SP.p - self.Emit("MOVQ", jit.Sib(_ST, _AX, 1, 0), _SP_q) // MOVQ (ST)(AX), SP.q -} - -func (self *_Assembler) _asm_OP_save(_ *_Instr) { - self.save_state() -} - -func (self *_Assembler) _asm_OP_drop(_ *_Instr) { - self.drop_state(_StateSize) -} - -func (self *_Assembler) _asm_OP_drop_2(_ *_Instr) { - self.drop_state(_StateSize * 2) // DROP $(_StateSize * 2) - self.Emit("MOVOU", _X0, jit.Sib(_ST, _AX, 1, 56)) // MOVOU X0, 56(ST)(AX) -} - -func (self *_Assembler) _asm_OP_recurse(p *_Instr) { - self.prep_buffer() // MOVE {buf}, (SP) - vt, pv := p.vp() - self.Emit("MOVQ", jit.Type(vt), _AX) // MOVQ $(type(p.vt())), AX - self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP) - - /* check for indirection */ - if !rt.UnpackType(vt).Indirect() { - self.Emit("MOVQ", _SP_p, _AX) // MOVQ SP.p, AX - } else { - self.Emit("MOVQ", _SP_p, _VAR_vp) // MOVQ SP.p, 48(SP) - self.Emit("LEAQ", _VAR_vp, _AX) // LEAQ 48(SP), AX - } - - /* call the encoder */ - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.Emit("MOVQ" , _ST, jit.Ptr(_SP, 24)) // MOVQ ST, 24(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - if pv { - self.Emit("BTCQ", jit.Imm(bitPointerValue), _AX) // BTCQ $1, AX - } - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 32)) // MOVQ AX, 32(SP) - self.call_encoder(_F_encodeTypedPointer) // CALL encodeTypedPointer - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _ET) // MOVQ 40(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 48), _EP) // MOVQ 48(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_is_nil(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_nil_p1(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 8), jit.Imm(0)) // CMPQ 8(SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_1(p *_Instr) { - self.Emit("CMPB", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPB (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_2(p *_Instr) { - self.Emit("CMPW", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPW (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_4(p *_Instr) { - self.Emit("CMPL", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPL (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_8(p *_Instr) { - self.Emit("CMPQ", jit.Ptr(_SP_p, 0), jit.Imm(0)) // CMPQ (SP.p), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_is_zero_map(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _AX) // MOVQ (SP.p), AX - self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("CMPQ" , jit.Ptr(_AX, 0), jit.Imm(0)) // CMPQ (AX), $0 - self.Xjmp("JE" , p.vi()) // JE p.vi() -} - -func (self *_Assembler) _asm_OP_goto(p *_Instr) { - self.Xjmp("JMP", p.vi()) -} - -func (self *_Assembler) _asm_OP_map_iter(p *_Instr) { - self.Emit("MOVQ" , jit.Type(p.vt()), _AX) // MOVQ $p.vt(), AX - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _CX) // MOVQ (SP.p), CX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP) - self.Emit("MOVQ" , _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP) - self.Emit("MOVQ" , _ARG_fv, _AX) // MOVQ fv, AX - self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 16)) // MOVQ AX, 16(SP) - self.call_go(_F_iteratorStart) // CALL_GO iteratorStart - self.Emit("MOVQ" , jit.Ptr(_SP, 24), _SP_q) // MOVQ 24(SP), SP.q - self.Emit("MOVQ" , jit.Ptr(_SP, 32), _ET) // MOVQ 32(SP), ET - self.Emit("MOVQ" , jit.Ptr(_SP, 40), _EP) // MOVQ 40(SP), EP - self.Emit("TESTQ", _ET, _ET) // TESTQ ET, ET - self.Sjmp("JNZ" , _LB_error) // JNZ _error -} - -func (self *_Assembler) _asm_OP_map_stop(_ *_Instr) { - self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, 0(SP) - self.call_go(_F_iteratorStop) // CALL_GO iteratorStop - self.Emit("XORL", _SP_q, _SP_q) // XORL SP.q, SP.q -} - -func (self *_Assembler) _asm_OP_map_check_key(p *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_q, 0), _SP_p) // MOVQ (SP.q), SP.p - self.Emit("TESTQ", _SP_p, _SP_p) // TESTQ SP.p, SP.p - self.Xjmp("JZ" , p.vi()) // JNZ p.vi() -} - -func (self *_Assembler) _asm_OP_map_write_key(p *_Instr) { - self.Emit("BTQ", jit.Imm(bitSortMapKeys), _ARG_fv) // BTQ ${SortMapKeys}, fv - self.Sjmp("JNC", "_unordered_key_{n}") // JNC _unordered_key_{n} - self.encode_string(false) // STR $false - self.Xjmp("JMP", p.vi()) // JMP ${p.vi()} - self.Link("_unordered_key_{n}") // _unordered_key_{n}: -} - -func (self *_Assembler) _asm_OP_map_value_next(_ *_Instr) { - self.Emit("MOVQ", jit.Ptr(_SP_q, 8), _SP_p) // MOVQ 8(SP.q), SP.p - self.Emit("MOVQ", _SP_q, jit.Ptr(_SP, 0)) // MOVQ SP.q, (SP) - self.call_go(_F_iteratorNext) // CALL_GO iteratorNext -} - -func (self *_Assembler) _asm_OP_slice_len(_ *_Instr) { - self.Emit("MOVQ" , jit.Ptr(_SP_p, 8), _SP_x) // MOVQ 8(SP.p), SP.x - self.Emit("MOVQ" , jit.Ptr(_SP_p, 0), _SP_p) // MOVQ (SP.p), SP.p - self.Emit("ORQ" , jit.Imm(1 << _S_init), _SP_f) // ORQ $(1<<_S_init), SP.f -} - -func (self *_Assembler) _asm_OP_slice_next(p *_Instr) { - self.Emit("TESTQ" , _SP_x, _SP_x) // TESTQ SP.x, SP.x - self.Xjmp("JZ" , p.vi()) // JZ p.vi() - self.Emit("SUBQ" , jit.Imm(1), _SP_x) // SUBQ $1, SP.x - self.Emit("BTRQ" , jit.Imm(_S_init), _SP_f) // BTRQ $_S_init, SP.f - self.Emit("LEAQ" , jit.Ptr(_SP_p, int64(p.vlen())), _AX) // LEAQ $(p.vlen())(SP.p), AX - self.Emit("CMOVQCC", _AX, _SP_p) // CMOVQNC AX, SP.p -} - -func (self *_Assembler) _asm_OP_marshal(p *_Instr) { - self.call_marshaler(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeJsonMarshaler, _T_json_Marshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_marshal_text(p *_Instr) { - self.call_marshaler(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt()) -} - -func (self *_Assembler) _asm_OP_marshal_text_p(p *_Instr) { - if p.vk() != reflect.Ptr { - panic("marshal_text_p: invalid type") - } else { - self.call_marshaler_v(_F_encodeTextMarshaler, _T_encoding_TextMarshaler, p.vt(), false) - } -} - -func (self *_Assembler) _asm_OP_cond_set(_ *_Instr) { - self.Emit("ORQ", jit.Imm(1 << _S_cond), _SP_f) // ORQ $(1<<_S_cond), SP.f -} - -func (self *_Assembler) _asm_OP_cond_testc(p *_Instr) { - self.Emit("BTRQ", jit.Imm(_S_cond), _SP_f) // BTRQ $_S_cond, SP.f - self.Xjmp("JC" , p.vi()) -} - -func (self *_Assembler) print_gc(i int, p1 *_Instr, p2 *_Instr) { - self.Emit("MOVQ", jit.Imm(int64(p2.op())), jit.Ptr(_SP, 16))// MOVQ $(p2.op()), 16(SP) - self.Emit("MOVQ", jit.Imm(int64(p1.op())), jit.Ptr(_SP, 8)) // MOVQ $(p1.op()), 8(SP) - self.Emit("MOVQ", jit.Imm(int64(i)), jit.Ptr(_SP, 0)) // MOVQ $(i), (SP) - self.call_go(_F_println) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go b/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go index ca0be8f4..902fbc98 100644 --- a/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go +++ b/vendor/github.com/bytedance/sonic/internal/encoder/compiler.go @@ -17,869 +17,660 @@ package encoder import ( - `fmt` - `reflect` - `strconv` - `strings` - `unsafe` - - `github.com/bytedance/sonic/internal/resolver` - `github.com/bytedance/sonic/internal/rt` - `github.com/bytedance/sonic/option` + "reflect" + "unsafe" + + "github.com/bytedance/sonic/internal/encoder/ir" + "github.com/bytedance/sonic/internal/encoder/vars" + "github.com/bytedance/sonic/internal/encoder/vm" + "github.com/bytedance/sonic/internal/resolver" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" ) -type _Op uint8 - -const ( - _OP_null _Op = iota + 1 - _OP_empty_arr - _OP_empty_obj - _OP_bool - _OP_i8 - _OP_i16 - _OP_i32 - _OP_i64 - _OP_u8 - _OP_u16 - _OP_u32 - _OP_u64 - _OP_f32 - _OP_f64 - _OP_str - _OP_bin - _OP_quote - _OP_number - _OP_eface - _OP_iface - _OP_byte - _OP_text - _OP_deref - _OP_index - _OP_load - _OP_save - _OP_drop - _OP_drop_2 - _OP_recurse - _OP_is_nil - _OP_is_nil_p1 - _OP_is_zero_1 - _OP_is_zero_2 - _OP_is_zero_4 - _OP_is_zero_8 - _OP_is_zero_map - _OP_goto - _OP_map_iter - _OP_map_stop - _OP_map_check_key - _OP_map_write_key - _OP_map_value_next - _OP_slice_len - _OP_slice_next - _OP_marshal - _OP_marshal_p - _OP_marshal_text - _OP_marshal_text_p - _OP_cond_set - _OP_cond_testc -) - -const ( - _INT_SIZE = 32 << (^uint(0) >> 63) - _PTR_SIZE = 32 << (^uintptr(0) >> 63) - _PTR_BYTE = unsafe.Sizeof(uintptr(0)) -) - -const ( - _MAX_ILBUF = 100000 // cutoff at 100k of IL instructions - _MAX_FIELDS = 50 // cutoff at 50 fields struct -) - -var _OpNames = [256]string { - _OP_null : "null", - _OP_empty_arr : "empty_arr", - _OP_empty_obj : "empty_obj", - _OP_bool : "bool", - _OP_i8 : "i8", - _OP_i16 : "i16", - _OP_i32 : "i32", - _OP_i64 : "i64", - _OP_u8 : "u8", - _OP_u16 : "u16", - _OP_u32 : "u32", - _OP_u64 : "u64", - _OP_f32 : "f32", - _OP_f64 : "f64", - _OP_str : "str", - _OP_bin : "bin", - _OP_quote : "quote", - _OP_number : "number", - _OP_eface : "eface", - _OP_iface : "iface", - _OP_byte : "byte", - _OP_text : "text", - _OP_deref : "deref", - _OP_index : "index", - _OP_load : "load", - _OP_save : "save", - _OP_drop : "drop", - _OP_drop_2 : "drop_2", - _OP_recurse : "recurse", - _OP_is_nil : "is_nil", - _OP_is_nil_p1 : "is_nil_p1", - _OP_is_zero_1 : "is_zero_1", - _OP_is_zero_2 : "is_zero_2", - _OP_is_zero_4 : "is_zero_4", - _OP_is_zero_8 : "is_zero_8", - _OP_is_zero_map : "is_zero_map", - _OP_goto : "goto", - _OP_map_iter : "map_iter", - _OP_map_stop : "map_stop", - _OP_map_check_key : "map_check_key", - _OP_map_write_key : "map_write_key", - _OP_map_value_next : "map_value_next", - _OP_slice_len : "slice_len", - _OP_slice_next : "slice_next", - _OP_marshal : "marshal", - _OP_marshal_p : "marshal_p", - _OP_marshal_text : "marshal_text", - _OP_marshal_text_p : "marshal_text_p", - _OP_cond_set : "cond_set", - _OP_cond_testc : "cond_testc", -} - -func (self _Op) String() string { - if ret := _OpNames[self]; ret != "" { - return ret - } else { - return "" - } -} - -func _OP_int() _Op { - switch _INT_SIZE { - case 32: return _OP_i32 - case 64: return _OP_i64 - default: panic("unsupported int size") - } -} - -func _OP_uint() _Op { - switch _INT_SIZE { - case 32: return _OP_u32 - case 64: return _OP_u64 - default: panic("unsupported uint size") - } -} - -func _OP_uintptr() _Op { - switch _PTR_SIZE { - case 32: return _OP_u32 - case 64: return _OP_u64 - default: panic("unsupported pointer size") - } -} - -func _OP_is_zero_ints() _Op { - switch _INT_SIZE { - case 32: return _OP_is_zero_4 - case 64: return _OP_is_zero_8 - default: panic("unsupported integer size") - } -} - -type _Instr struct { - u uint64 // union {op: 8, _: 8, vi: 48}, vi maybe int or len(str) - p unsafe.Pointer // maybe GoString.Ptr, or *GoType -} - -func packOp(op _Op) uint64 { - return uint64(op) << 56 -} - -func newInsOp(op _Op) _Instr { - return _Instr{u: packOp(op)} -} - -func newInsVi(op _Op, vi int) _Instr { - return _Instr{u: packOp(op) | rt.PackInt(vi)} -} - -func newInsVs(op _Op, vs string) _Instr { - return _Instr { - u: packOp(op) | rt.PackInt(len(vs)), - p: (*rt.GoString)(unsafe.Pointer(&vs)).Ptr, - } -} - -func newInsVt(op _Op, vt reflect.Type) _Instr { - return _Instr { - u: packOp(op), - p: unsafe.Pointer(rt.UnpackType(vt)), - } -} - -func newInsVp(op _Op, vt reflect.Type, pv bool) _Instr { - i := 0 - if pv { - i = 1 - } - return _Instr { - u: packOp(op) | rt.PackInt(i), - p: unsafe.Pointer(rt.UnpackType(vt)), - } -} - -func (self _Instr) op() _Op { - return _Op(self.u >> 56) -} - -func (self _Instr) vi() int { - return rt.UnpackInt(self.u) -} - -func (self _Instr) vf() uint8 { - return (*rt.GoType)(self.p).KindFlags -} - -func (self _Instr) vs() (v string) { - (*rt.GoString)(unsafe.Pointer(&v)).Ptr = self.p - (*rt.GoString)(unsafe.Pointer(&v)).Len = self.vi() - return -} - -func (self _Instr) vk() reflect.Kind { - return (*rt.GoType)(self.p).Kind() -} - -func (self _Instr) vt() reflect.Type { - return (*rt.GoType)(self.p).Pack() -} - -func (self _Instr) vp() (vt reflect.Type, pv bool) { - return (*rt.GoType)(self.p).Pack(), rt.UnpackInt(self.u) == 1 -} - -func (self _Instr) i64() int64 { - return int64(self.vi()) -} - -func (self _Instr) vlen() int { - return int((*rt.GoType)(self.p).Size) -} - -func (self _Instr) isBranch() bool { - switch self.op() { - case _OP_goto : fallthrough - case _OP_is_nil : fallthrough - case _OP_is_nil_p1 : fallthrough - case _OP_is_zero_1 : fallthrough - case _OP_is_zero_2 : fallthrough - case _OP_is_zero_4 : fallthrough - case _OP_is_zero_8 : fallthrough - case _OP_map_check_key : fallthrough - case _OP_map_write_key : fallthrough - case _OP_slice_next : fallthrough - case _OP_cond_testc : return true - default : return false - } -} - -func (self _Instr) disassemble() string { - switch self.op() { - case _OP_byte : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.QuoteRune(rune(self.vi()))) - case _OP_text : return fmt.Sprintf("%-18s%s", self.op().String(), strconv.Quote(self.vs())) - case _OP_index : return fmt.Sprintf("%-18s%d", self.op().String(), self.vi()) - case _OP_recurse : fallthrough - case _OP_map_iter : fallthrough - case _OP_marshal : fallthrough - case _OP_marshal_p : fallthrough - case _OP_marshal_text : fallthrough - case _OP_marshal_text_p : return fmt.Sprintf("%-18s%s", self.op().String(), self.vt()) - case _OP_goto : fallthrough - case _OP_is_nil : fallthrough - case _OP_is_nil_p1 : fallthrough - case _OP_is_zero_1 : fallthrough - case _OP_is_zero_2 : fallthrough - case _OP_is_zero_4 : fallthrough - case _OP_is_zero_8 : fallthrough - case _OP_is_zero_map : fallthrough - case _OP_cond_testc : fallthrough - case _OP_map_check_key : fallthrough - case _OP_map_write_key : return fmt.Sprintf("%-18sL_%d", self.op().String(), self.vi()) - case _OP_slice_next : return fmt.Sprintf("%-18sL_%d, %s", self.op().String(), self.vi(), self.vt()) - default : return self.op().String() - } -} - -type ( - _Program []_Instr -) - -func (self _Program) pc() int { - return len(self) -} - -func (self _Program) tag(n int) { - if n >= _MaxStack { - panic("type nesting too deep") - } -} - -func (self _Program) pin(i int) { - v := &self[i] - v.u &= 0xffff000000000000 - v.u |= rt.PackInt(self.pc()) -} - -func (self _Program) rel(v []int) { - for _, i := range v { - self.pin(i) - } -} - -func (self *_Program) add(op _Op) { - *self = append(*self, newInsOp(op)) -} - -func (self *_Program) key(op _Op) { - *self = append(*self, - newInsVi(_OP_byte, '"'), - newInsOp(op), - newInsVi(_OP_byte, '"'), - ) -} - -func (self *_Program) int(op _Op, vi int) { - *self = append(*self, newInsVi(op, vi)) -} - -func (self *_Program) str(op _Op, vs string) { - *self = append(*self, newInsVs(op, vs)) -} - -func (self *_Program) rtt(op _Op, vt reflect.Type) { - *self = append(*self, newInsVt(op, vt)) -} - -func (self *_Program) vp(op _Op, vt reflect.Type, pv bool) { - *self = append(*self, newInsVp(op, vt, pv)) -} - -func (self _Program) disassemble() string { - nb := len(self) - tab := make([]bool, nb + 1) - ret := make([]string, 0, nb + 1) - - /* prescan to get all the labels */ - for _, ins := range self { - if ins.isBranch() { - tab[ins.vi()] = true - } - } - - /* disassemble each instruction */ - for i, ins := range self { - if !tab[i] { - ret = append(ret, "\t" + ins.disassemble()) - } else { - ret = append(ret, fmt.Sprintf("L_%d:\n\t%s", i, ins.disassemble())) - } - } - - /* add the last label, if needed */ - if tab[nb] { - ret = append(ret, fmt.Sprintf("L_%d:", nb)) - } - - /* add an "end" indicator, and join all the strings */ - return strings.Join(append(ret, "\tend"), "\n") -} - -type _Compiler struct { - opts option.CompileOptions - pv bool - tab map[reflect.Type]bool - rec map[reflect.Type]uint8 -} - -func newCompiler() *_Compiler { - return &_Compiler { - opts: option.DefaultCompileOptions(), - tab: map[reflect.Type]bool{}, - rec: map[reflect.Type]uint8{}, - } -} - -func (self *_Compiler) apply(opts option.CompileOptions) *_Compiler { - self.opts = opts - if self.opts.RecursiveDepth > 0 { - self.rec = map[reflect.Type]uint8{} - } - return self -} - -func (self *_Compiler) rescue(ep *error) { - if val := recover(); val != nil { - if err, ok := val.(error); ok { - *ep = err - } else { - panic(val) - } - } -} - -func (self *_Compiler) compile(vt reflect.Type, pv bool) (ret _Program, err error) { - defer self.rescue(&err) - self.compileOne(&ret, 0, vt, pv) - return -} - -func (self *_Compiler) compileOne(p *_Program, sp int, vt reflect.Type, pv bool) { - if self.tab[vt] { - p.vp(_OP_recurse, vt, pv) - } else { - self.compileRec(p, sp, vt, pv) - } -} - -func (self *_Compiler) compileRec(p *_Program, sp int, vt reflect.Type, pv bool) { - pr := self.pv - pt := reflect.PtrTo(vt) - - /* check for addressable `json.Marshaler` with pointer receiver */ - if pv && pt.Implements(jsonMarshalerType) { - p.rtt(_OP_marshal_p, pt) - return - } - - /* check for `json.Marshaler` */ - if vt.Implements(jsonMarshalerType) { - self.compileMarshaler(p, _OP_marshal, vt, jsonMarshalerType) - return - } - - /* check for addressable `encoding.TextMarshaler` with pointer receiver */ - if pv && pt.Implements(encodingTextMarshalerType) { - p.rtt(_OP_marshal_text_p, pt) - return - } - - /* check for `encoding.TextMarshaler` */ - if vt.Implements(encodingTextMarshalerType) { - self.compileMarshaler(p, _OP_marshal_text, vt, encodingTextMarshalerType) - return - } - - /* enter the recursion, and compile the type */ - self.pv = pv - self.tab[vt] = true - self.compileOps(p, sp, vt) - - /* exit the recursion */ - self.pv = pr - delete(self.tab, vt) -} - -func (self *_Compiler) compileOps(p *_Program, sp int, vt reflect.Type) { - switch vt.Kind() { - case reflect.Bool : p.add(_OP_bool) - case reflect.Int : p.add(_OP_int()) - case reflect.Int8 : p.add(_OP_i8) - case reflect.Int16 : p.add(_OP_i16) - case reflect.Int32 : p.add(_OP_i32) - case reflect.Int64 : p.add(_OP_i64) - case reflect.Uint : p.add(_OP_uint()) - case reflect.Uint8 : p.add(_OP_u8) - case reflect.Uint16 : p.add(_OP_u16) - case reflect.Uint32 : p.add(_OP_u32) - case reflect.Uint64 : p.add(_OP_u64) - case reflect.Uintptr : p.add(_OP_uintptr()) - case reflect.Float32 : p.add(_OP_f32) - case reflect.Float64 : p.add(_OP_f64) - case reflect.String : self.compileString (p, vt) - case reflect.Array : self.compileArray (p, sp, vt.Elem(), vt.Len()) - case reflect.Interface : self.compileInterface (p, vt) - case reflect.Map : self.compileMap (p, sp, vt) - case reflect.Ptr : self.compilePtr (p, sp, vt.Elem()) - case reflect.Slice : self.compileSlice (p, sp, vt.Elem()) - case reflect.Struct : self.compileStruct (p, sp, vt) - default : panic (error_type(vt)) - } -} - -func (self *_Compiler) compileNil(p *_Program, sp int, vt reflect.Type, nil_op _Op, fn func(*_Program, int, reflect.Type)) { - x := p.pc() - p.add(_OP_is_nil) - fn(p, sp, vt) - e := p.pc() - p.add(_OP_goto) - p.pin(x) - p.add(nil_op) - p.pin(e) -} - -func (self *_Compiler) compilePtr(p *_Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, _OP_null, self.compilePtrBody) -} - -func (self *_Compiler) compilePtrBody(p *_Program, sp int, vt reflect.Type) { - p.tag(sp) - p.add(_OP_save) - p.add(_OP_deref) - self.compileOne(p, sp + 1, vt, true) - p.add(_OP_drop) -} - -func (self *_Compiler) compileMap(p *_Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, _OP_empty_obj, self.compileMapBody) -} - -func (self *_Compiler) compileMapBody(p *_Program, sp int, vt reflect.Type) { - p.tag(sp + 1) - p.int(_OP_byte, '{') - p.add(_OP_save) - p.rtt(_OP_map_iter, vt) - p.add(_OP_save) - i := p.pc() - p.add(_OP_map_check_key) - u := p.pc() - p.add(_OP_map_write_key) - self.compileMapBodyKey(p, vt.Key()) - p.pin(u) - p.int(_OP_byte, ':') - p.add(_OP_map_value_next) - self.compileOne(p, sp + 2, vt.Elem(), false) - j := p.pc() - p.add(_OP_map_check_key) - p.int(_OP_byte, ',') - v := p.pc() - p.add(_OP_map_write_key) - self.compileMapBodyKey(p, vt.Key()) - p.pin(v) - p.int(_OP_byte, ':') - p.add(_OP_map_value_next) - self.compileOne(p, sp + 2, vt.Elem(), false) - p.int(_OP_goto, j) - p.pin(i) - p.pin(j) - p.add(_OP_map_stop) - p.add(_OP_drop_2) - p.int(_OP_byte, '}') -} - -func (self *_Compiler) compileMapBodyKey(p *_Program, vk reflect.Type) { - if !vk.Implements(encodingTextMarshalerType) { - self.compileMapBodyTextKey(p, vk) - } else { - self.compileMapBodyUtextKey(p, vk) - } -} - -func (self *_Compiler) compileMapBodyTextKey(p *_Program, vk reflect.Type) { - switch vk.Kind() { - case reflect.Invalid : panic("map key is nil") - case reflect.Bool : p.key(_OP_bool) - case reflect.Int : p.key(_OP_int()) - case reflect.Int8 : p.key(_OP_i8) - case reflect.Int16 : p.key(_OP_i16) - case reflect.Int32 : p.key(_OP_i32) - case reflect.Int64 : p.key(_OP_i64) - case reflect.Uint : p.key(_OP_uint()) - case reflect.Uint8 : p.key(_OP_u8) - case reflect.Uint16 : p.key(_OP_u16) - case reflect.Uint32 : p.key(_OP_u32) - case reflect.Uint64 : p.key(_OP_u64) - case reflect.Uintptr : p.key(_OP_uintptr()) - case reflect.Float32 : p.key(_OP_f32) - case reflect.Float64 : p.key(_OP_f64) - case reflect.String : self.compileString(p, vk) - default : panic(error_type(vk)) - } -} - -func (self *_Compiler) compileMapBodyUtextKey(p *_Program, vk reflect.Type) { - if vk.Kind() != reflect.Ptr { - p.rtt(_OP_marshal_text, vk) - } else { - self.compileMapBodyUtextPtr(p, vk) - } -} - -func (self *_Compiler) compileMapBodyUtextPtr(p *_Program, vk reflect.Type) { - i := p.pc() - p.add(_OP_is_nil) - p.rtt(_OP_marshal_text, vk) - j := p.pc() - p.add(_OP_goto) - p.pin(i) - p.str(_OP_text, "\"\"") - p.pin(j) -} - -func (self *_Compiler) compileSlice(p *_Program, sp int, vt reflect.Type) { - self.compileNil(p, sp, vt, _OP_empty_arr, self.compileSliceBody) -} - -func (self *_Compiler) compileSliceBody(p *_Program, sp int, vt reflect.Type) { - if isSimpleByte(vt) { - p.add(_OP_bin) - } else { - self.compileSliceArray(p, sp, vt) - } -} - -func (self *_Compiler) compileSliceArray(p *_Program, sp int, vt reflect.Type) { - p.tag(sp) - p.int(_OP_byte, '[') - p.add(_OP_save) - p.add(_OP_slice_len) - i := p.pc() - p.rtt(_OP_slice_next, vt) - self.compileOne(p, sp + 1, vt, true) - j := p.pc() - p.rtt(_OP_slice_next, vt) - p.int(_OP_byte, ',') - self.compileOne(p, sp + 1, vt, true) - p.int(_OP_goto, j) - p.pin(i) - p.pin(j) - p.add(_OP_drop) - p.int(_OP_byte, ']') -} - -func (self *_Compiler) compileArray(p *_Program, sp int, vt reflect.Type, nb int) { - p.tag(sp) - p.int(_OP_byte, '[') - p.add(_OP_save) - - /* first item */ - if nb != 0 { - self.compileOne(p, sp + 1, vt, self.pv) - p.add(_OP_load) - } - - /* remaining items */ - for i := 1; i < nb; i++ { - p.int(_OP_byte, ',') - p.int(_OP_index, i * int(vt.Size())) - self.compileOne(p, sp + 1, vt, self.pv) - p.add(_OP_load) - } - - /* end of array */ - p.add(_OP_drop) - p.int(_OP_byte, ']') -} - -func (self *_Compiler) compileString(p *_Program, vt reflect.Type) { - if vt != jsonNumberType { - p.add(_OP_str) - } else { - p.add(_OP_number) - } -} - -func (self *_Compiler) compileStruct(p *_Program, sp int, vt reflect.Type) { - if sp >= self.opts.MaxInlineDepth || p.pc() >= _MAX_ILBUF || (sp > 0 && vt.NumField() >= _MAX_FIELDS) { - p.vp(_OP_recurse, vt, self.pv) - if self.opts.RecursiveDepth > 0 { - if self.pv { - self.rec[vt] = 1 - } else { - self.rec[vt] = 0 - } - } - } else { - self.compileStructBody(p, sp, vt) - } -} - -func (self *_Compiler) compileStructBody(p *_Program, sp int, vt reflect.Type) { - p.tag(sp) - p.int(_OP_byte, '{') - p.add(_OP_save) - p.add(_OP_cond_set) - - /* compile each field */ - for _, fv := range resolver.ResolveStruct(vt) { - var s []int - var o resolver.Offset - - /* "omitempty" for arrays */ - if fv.Type.Kind() == reflect.Array { - if fv.Type.Len() == 0 && (fv.Opts & resolver.F_omitempty) != 0 { - continue - } - } - - /* index to the field */ - for _, o = range fv.Path { - if p.int(_OP_index, int(o.Size)); o.Kind == resolver.F_deref { - s = append(s, p.pc()) - p.add(_OP_is_nil) - p.add(_OP_deref) - } - } - - /* check for "omitempty" option */ - if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts & resolver.F_omitempty) != 0 { - s = append(s, p.pc()) - self.compileStructFieldZero(p, fv.Type) - } - - /* add the comma if not the first element */ - i := p.pc() - p.add(_OP_cond_testc) - p.int(_OP_byte, ',') - p.pin(i) - - /* compile the key and value */ - ft := fv.Type - p.str(_OP_text, Quote(fv.Name) + ":") - - /* check for "stringnize" option */ - if (fv.Opts & resolver.F_stringize) == 0 { - self.compileOne(p, sp + 1, ft, self.pv) - } else { - self.compileStructFieldStr(p, sp + 1, ft) - } - - /* patch the skipping jumps and reload the struct pointer */ - p.rel(s) - p.add(_OP_load) - } - - /* end of object */ - p.add(_OP_drop) - p.int(_OP_byte, '}') -} - -func (self *_Compiler) compileStructFieldStr(p *_Program, sp int, vt reflect.Type) { - pc := -1 - ft := vt - sv := false - - /* dereference the pointer if needed */ - if ft.Kind() == reflect.Ptr { - ft = ft.Elem() - } - - /* check if it can be stringized */ - switch ft.Kind() { - case reflect.Bool : sv = true - case reflect.Int : sv = true - case reflect.Int8 : sv = true - case reflect.Int16 : sv = true - case reflect.Int32 : sv = true - case reflect.Int64 : sv = true - case reflect.Uint : sv = true - case reflect.Uint8 : sv = true - case reflect.Uint16 : sv = true - case reflect.Uint32 : sv = true - case reflect.Uint64 : sv = true - case reflect.Uintptr : sv = true - case reflect.Float32 : sv = true - case reflect.Float64 : sv = true - case reflect.String : sv = true - } - - /* if it's not, ignore the "string" and follow the regular path */ - if !sv { - self.compileOne(p, sp, vt, self.pv) - return - } - - /* dereference the pointer */ - if vt.Kind() == reflect.Ptr { - pc = p.pc() - vt = vt.Elem() - p.add(_OP_is_nil) - p.add(_OP_deref) - } - - /* special case of a double-quoted string */ - if ft != jsonNumberType && ft.Kind() == reflect.String { - p.add(_OP_quote) - } else { - self.compileStructFieldQuoted(p, sp, vt) - } - - /* the "null" case of the pointer */ - if pc != -1 { - e := p.pc() - p.add(_OP_goto) - p.pin(pc) - p.add(_OP_null) - p.pin(e) - } -} - -func (self *_Compiler) compileStructFieldZero(p *_Program, vt reflect.Type) { - switch vt.Kind() { - case reflect.Bool : p.add(_OP_is_zero_1) - case reflect.Int : p.add(_OP_is_zero_ints()) - case reflect.Int8 : p.add(_OP_is_zero_1) - case reflect.Int16 : p.add(_OP_is_zero_2) - case reflect.Int32 : p.add(_OP_is_zero_4) - case reflect.Int64 : p.add(_OP_is_zero_8) - case reflect.Uint : p.add(_OP_is_zero_ints()) - case reflect.Uint8 : p.add(_OP_is_zero_1) - case reflect.Uint16 : p.add(_OP_is_zero_2) - case reflect.Uint32 : p.add(_OP_is_zero_4) - case reflect.Uint64 : p.add(_OP_is_zero_8) - case reflect.Uintptr : p.add(_OP_is_nil) - case reflect.Float32 : p.add(_OP_is_zero_4) - case reflect.Float64 : p.add(_OP_is_zero_8) - case reflect.String : p.add(_OP_is_nil_p1) - case reflect.Interface : p.add(_OP_is_nil) - case reflect.Map : p.add(_OP_is_zero_map) - case reflect.Ptr : p.add(_OP_is_nil) - case reflect.Slice : p.add(_OP_is_nil_p1) - default : panic(error_type(vt)) - } -} - -func (self *_Compiler) compileStructFieldQuoted(p *_Program, sp int, vt reflect.Type) { - p.int(_OP_byte, '"') - self.compileOne(p, sp, vt, self.pv) - p.int(_OP_byte, '"') -} - -func (self *_Compiler) compileInterface(p *_Program, vt reflect.Type) { - x := p.pc() - p.add(_OP_is_nil_p1) - - /* iface and efaces are different */ - if vt.NumMethod() == 0 { - p.add(_OP_eface) - } else { - p.add(_OP_iface) - } - - /* the "null" value */ - e := p.pc() - p.add(_OP_goto) - p.pin(x) - p.add(_OP_null) - p.pin(e) -} - -func (self *_Compiler) compileMarshaler(p *_Program, op _Op, vt reflect.Type, mt reflect.Type) { - pc := p.pc() - vk := vt.Kind() - - /* direct receiver */ - if vk != reflect.Ptr { - p.rtt(op, vt) - return - } - - /* value receiver with a pointer type, check for nil before calling the marshaler */ - p.add(_OP_is_nil) - p.rtt(op, vt) - i := p.pc() - p.add(_OP_goto) - p.pin(pc) - p.add(_OP_null) - p.pin(i) +func ForceUseVM() { + vm.SetCompiler(makeEncoderVM) + pretouchType = pretouchTypeVM + encodeTypedPointer = vm.EncodeTypedPointer + vars.UseVM = true +} + +var encodeTypedPointer func(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *vars.Stack, fv uint64) error + +func makeEncoderVM(vt *rt.GoType, ex ...interface{}) (interface{}, error) { + pp, err := NewCompiler().Compile(vt.Pack(), ex[0].(bool)) + if err != nil { + return nil, err + } + return &pp, nil +} + +var pretouchType func(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) + +func pretouchTypeVM(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) { + /* compile function */ + compiler := NewCompiler().apply(opts) + + /* find or compile */ + vt := rt.UnpackType(_vt) + if val := vars.GetProgram(vt); val != nil { + return nil, nil + } else if _, err := vars.ComputeProgram(vt, makeEncoderVM, v == 1); err == nil { + return compiler.rec, nil + } else { + return nil, err + } +} + +func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error { + if opts.RecursiveDepth < 0 || len(vtm) == 0 { + return nil + } + next := make(map[reflect.Type]uint8) + for vt, v := range vtm { + sub, err := pretouchType(vt, opts, v) + if err != nil { + return err + } + for svt, v := range sub { + next[svt] = v + } + } + opts.RecursiveDepth -= 1 + return pretouchRec(next, opts) +} + +type Compiler struct { + opts option.CompileOptions + pv bool + tab map[reflect.Type]bool + rec map[reflect.Type]uint8 +} + +func NewCompiler() *Compiler { + return &Compiler{ + opts: option.DefaultCompileOptions(), + tab: map[reflect.Type]bool{}, + rec: map[reflect.Type]uint8{}, + } +} + +func (self *Compiler) apply(opts option.CompileOptions) *Compiler { + self.opts = opts + if self.opts.RecursiveDepth > 0 { + self.rec = map[reflect.Type]uint8{} + } + return self +} + +func (self *Compiler) rescue(ep *error) { + if val := recover(); val != nil { + if err, ok := val.(error); ok { + *ep = err + } else { + panic(val) + } + } +} + +func (self *Compiler) Compile(vt reflect.Type, pv bool) (ret ir.Program, err error) { + defer self.rescue(&err) + self.compileOne(&ret, 0, vt, pv) + return +} + +func (self *Compiler) compileOne(p *ir.Program, sp int, vt reflect.Type, pv bool) { + if self.tab[vt] { + p.Vp(ir.OP_recurse, vt, pv) + } else { + self.compileRec(p, sp, vt, pv) + } +} + +func (self *Compiler) tryCompileMarshaler(p *ir.Program, vt reflect.Type, pv bool) bool { + pt := reflect.PtrTo(vt) + + /* check for addressable `json.Marshaler` with pointer receiver */ + if pv && pt.Implements(vars.JsonMarshalerType) { + addMarshalerOp(p, ir.OP_marshal_p, pt, vars.JsonMarshalerType) + return true + } + + /* check for `json.Marshaler` */ + if vt.Implements(vars.JsonMarshalerType) { + self.compileMarshaler(p, ir.OP_marshal, vt, vars.JsonMarshalerType) + return true + } + + /* check for addressable `encoding.TextMarshaler` with pointer receiver */ + if pv && pt.Implements(vars.EncodingTextMarshalerType) { + addMarshalerOp(p, ir.OP_marshal_text_p, pt, vars.EncodingTextMarshalerType) + return true + } + + /* check for `encoding.TextMarshaler` */ + if vt.Implements(vars.EncodingTextMarshalerType) { + self.compileMarshaler(p, ir.OP_marshal_text, vt, vars.EncodingTextMarshalerType) + return true + } + + return false +} + +func (self *Compiler) compileRec(p *ir.Program, sp int, vt reflect.Type, pv bool) { + pr := self.pv + + if self.tryCompileMarshaler(p, vt, pv) { + return + } + + /* enter the recursion, and compile the type */ + self.pv = pv + self.tab[vt] = true + self.compileOps(p, sp, vt) + + /* exit the recursion */ + self.pv = pr + delete(self.tab, vt) +} + +func (self *Compiler) compileOps(p *ir.Program, sp int, vt reflect.Type) { + switch vt.Kind() { + case reflect.Bool: + p.Add(ir.OP_bool) + case reflect.Int: + p.Add(ir.OP_int()) + case reflect.Int8: + p.Add(ir.OP_i8) + case reflect.Int16: + p.Add(ir.OP_i16) + case reflect.Int32: + p.Add(ir.OP_i32) + case reflect.Int64: + p.Add(ir.OP_i64) + case reflect.Uint: + p.Add(ir.OP_uint()) + case reflect.Uint8: + p.Add(ir.OP_u8) + case reflect.Uint16: + p.Add(ir.OP_u16) + case reflect.Uint32: + p.Add(ir.OP_u32) + case reflect.Uint64: + p.Add(ir.OP_u64) + case reflect.Uintptr: + p.Add(ir.OP_uintptr()) + case reflect.Float32: + p.Add(ir.OP_f32) + case reflect.Float64: + p.Add(ir.OP_f64) + case reflect.String: + self.compileString(p, vt) + case reflect.Array: + self.compileArray(p, sp, vt.Elem(), vt.Len()) + case reflect.Interface: + self.compileInterface(p, vt) + case reflect.Map: + self.compileMap(p, sp, vt) + case reflect.Ptr: + self.compilePtr(p, sp, vt.Elem()) + case reflect.Slice: + self.compileSlice(p, sp, vt.Elem()) + case reflect.Struct: + self.compileStruct(p, sp, vt) + default: + panic(vars.Error_type(vt)) + } +} + +func (self *Compiler) compileNil(p *ir.Program, sp int, vt reflect.Type, nil_op ir.Op, fn func(*ir.Program, int, reflect.Type)) { + x := p.PC() + p.Add(ir.OP_is_nil) + fn(p, sp, vt) + e := p.PC() + p.Add(ir.OP_goto) + p.Pin(x) + p.Add(nil_op) + p.Pin(e) +} + +func (self *Compiler) compilePtr(p *ir.Program, sp int, vt reflect.Type) { + self.compileNil(p, sp, vt, ir.OP_null, self.compilePtrBody) +} + +func (self *Compiler) compilePtrBody(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp) + p.Add(ir.OP_save) + p.Add(ir.OP_deref) + self.compileOne(p, sp+1, vt, true) + p.Add(ir.OP_drop) +} + +func (self *Compiler) compileMap(p *ir.Program, sp int, vt reflect.Type) { + self.compileNil(p, sp, vt, ir.OP_empty_obj, self.compileMapBody) +} + +func (self *Compiler) compileMapBody(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp + 1) + p.Int(ir.OP_byte, '{') + e := p.PC() + p.Add(ir.OP_is_zero_map) + p.Add(ir.OP_save) + p.Rtt(ir.OP_map_iter, vt) + p.Add(ir.OP_save) + i := p.PC() + p.Add(ir.OP_map_check_key) + u := p.PC() + p.Add(ir.OP_map_write_key) + self.compileMapBodyKey(p, vt.Key()) + p.Pin(u) + p.Int(ir.OP_byte, ':') + p.Add(ir.OP_map_value_next) + self.compileOne(p, sp+2, vt.Elem(), false) + j := p.PC() + p.Add(ir.OP_map_check_key) + p.Int(ir.OP_byte, ',') + v := p.PC() + p.Add(ir.OP_map_write_key) + self.compileMapBodyKey(p, vt.Key()) + p.Pin(v) + p.Int(ir.OP_byte, ':') + p.Add(ir.OP_map_value_next) + self.compileOne(p, sp+2, vt.Elem(), false) + p.Int(ir.OP_goto, j) + p.Pin(i) + p.Pin(j) + p.Add(ir.OP_map_stop) + p.Add(ir.OP_drop_2) + p.Pin(e) + p.Int(ir.OP_byte, '}') +} + +func (self *Compiler) compileMapBodyKey(p *ir.Program, vk reflect.Type) { + if !vk.Implements(vars.EncodingTextMarshalerType) { + self.compileMapBodyTextKey(p, vk) + } else { + self.compileMapBodyUtextKey(p, vk) + } +} + +func (self *Compiler) compileMapBodyTextKey(p *ir.Program, vk reflect.Type) { + switch vk.Kind() { + case reflect.Invalid: + panic("map key is nil") + case reflect.Bool: + p.Key(ir.OP_bool) + case reflect.Int: + p.Key(ir.OP_int()) + case reflect.Int8: + p.Key(ir.OP_i8) + case reflect.Int16: + p.Key(ir.OP_i16) + case reflect.Int32: + p.Key(ir.OP_i32) + case reflect.Int64: + p.Key(ir.OP_i64) + case reflect.Uint: + p.Key(ir.OP_uint()) + case reflect.Uint8: + p.Key(ir.OP_u8) + case reflect.Uint16: + p.Key(ir.OP_u16) + case reflect.Uint32: + p.Key(ir.OP_u32) + case reflect.Uint64: + p.Key(ir.OP_u64) + case reflect.Uintptr: + p.Key(ir.OP_uintptr()) + case reflect.Float32: + p.Key(ir.OP_f32) + case reflect.Float64: + p.Key(ir.OP_f64) + case reflect.String: + self.compileString(p, vk) + default: + panic(vars.Error_type(vk)) + } +} + +func (self *Compiler) compileMapBodyUtextKey(p *ir.Program, vk reflect.Type) { + if vk.Kind() != reflect.Ptr { + addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType) + } else { + self.compileMapBodyUtextPtr(p, vk) + } +} + +func (self *Compiler) compileMapBodyUtextPtr(p *ir.Program, vk reflect.Type) { + i := p.PC() + p.Add(ir.OP_is_nil) + addMarshalerOp(p, ir.OP_marshal_text, vk, vars.EncodingTextMarshalerType) + j := p.PC() + p.Add(ir.OP_goto) + p.Pin(i) + p.Str(ir.OP_text, "\"\"") + p.Pin(j) +} + +func (self *Compiler) compileSlice(p *ir.Program, sp int, vt reflect.Type) { + self.compileNil(p, sp, vt, ir.OP_empty_arr, self.compileSliceBody) +} + +func (self *Compiler) compileSliceBody(p *ir.Program, sp int, vt reflect.Type) { + if vars.IsSimpleByte(vt) { + p.Add(ir.OP_bin) + } else { + self.compileSliceArray(p, sp, vt) + } +} + +func (self *Compiler) compileSliceArray(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp) + p.Int(ir.OP_byte, '[') + e := p.PC() + p.Add(ir.OP_is_nil) + p.Add(ir.OP_save) + p.Add(ir.OP_slice_len) + i := p.PC() + p.Rtt(ir.OP_slice_next, vt) + self.compileOne(p, sp+1, vt, true) + j := p.PC() + p.Rtt(ir.OP_slice_next, vt) + p.Int(ir.OP_byte, ',') + self.compileOne(p, sp+1, vt, true) + p.Int(ir.OP_goto, j) + p.Pin(i) + p.Pin(j) + p.Add(ir.OP_drop) + p.Pin(e) + p.Int(ir.OP_byte, ']') +} + +func (self *Compiler) compileArray(p *ir.Program, sp int, vt reflect.Type, nb int) { + p.Tag(sp) + p.Int(ir.OP_byte, '[') + p.Add(ir.OP_save) + + /* first item */ + if nb != 0 { + self.compileOne(p, sp+1, vt, self.pv) + p.Add(ir.OP_load) + } + + /* remaining items */ + for i := 1; i < nb; i++ { + p.Int(ir.OP_byte, ',') + p.Int(ir.OP_index, i*int(vt.Size())) + self.compileOne(p, sp+1, vt, self.pv) + p.Add(ir.OP_load) + } + + /* end of array */ + p.Add(ir.OP_drop) + p.Int(ir.OP_byte, ']') +} + +func (self *Compiler) compileString(p *ir.Program, vt reflect.Type) { + if vt != vars.JsonNumberType { + p.Add(ir.OP_str) + } else { + p.Add(ir.OP_number) + } +} + +func (self *Compiler) compileStruct(p *ir.Program, sp int, vt reflect.Type) { + if sp >= self.opts.MaxInlineDepth || p.PC() >= vars.MAX_ILBUF || (sp > 0 && vt.NumField() >= vars.MAX_FIELDS) { + p.Vp(ir.OP_recurse, vt, self.pv) + if self.opts.RecursiveDepth > 0 { + if self.pv { + self.rec[vt] = 1 + } else { + self.rec[vt] = 0 + } + } + } else { + self.compileStructBody(p, sp, vt) + } +} + +func (self *Compiler) compileStructBody(p *ir.Program, sp int, vt reflect.Type) { + p.Tag(sp) + p.Int(ir.OP_byte, '{') + p.Add(ir.OP_save) + p.Add(ir.OP_cond_set) + + /* compile each field */ + for _, fv := range resolver.ResolveStruct(vt) { + var s []int + var o resolver.Offset + + /* "omitempty" for arrays */ + if fv.Type.Kind() == reflect.Array { + if fv.Type.Len() == 0 && (fv.Opts&resolver.F_omitempty) != 0 { + continue + } + } + + /* index to the field */ + for _, o = range fv.Path { + if p.Int(ir.OP_index, int(o.Size)); o.Kind == resolver.F_deref { + s = append(s, p.PC()) + p.Add(ir.OP_is_nil) + p.Add(ir.OP_deref) + } + } + + /* check for "omitempty" option */ + if fv.Type.Kind() != reflect.Struct && fv.Type.Kind() != reflect.Array && (fv.Opts&resolver.F_omitempty) != 0 { + s = append(s, p.PC()) + self.compileStructFieldZero(p, fv.Type) + } + + /* add the comma if not the first element */ + i := p.PC() + p.Add(ir.OP_cond_testc) + p.Int(ir.OP_byte, ',') + p.Pin(i) + + /* compile the key and value */ + ft := fv.Type + p.Str(ir.OP_text, Quote(fv.Name)+":") + + /* check for "stringnize" option */ + if (fv.Opts & resolver.F_stringize) == 0 { + self.compileOne(p, sp+1, ft, self.pv) + } else { + self.compileStructFieldStr(p, sp+1, ft) + } + + /* patch the skipping jumps and reload the struct pointer */ + p.Rel(s) + p.Add(ir.OP_load) + } + + /* end of object */ + p.Add(ir.OP_drop) + p.Int(ir.OP_byte, '}') +} + +func (self *Compiler) compileStructFieldStr(p *ir.Program, sp int, vt reflect.Type) { + // NOTICE: according to encoding/json, Marshaler type has higher priority than string option + // see issue: + if self.tryCompileMarshaler(p, vt, self.pv) { + return + } + + pc := -1 + ft := vt + sv := false + + /* dereference the pointer if needed */ + if ft.Kind() == reflect.Ptr { + ft = ft.Elem() + } + + /* check if it can be stringized */ + switch ft.Kind() { + case reflect.Bool: + sv = true + case reflect.Int: + sv = true + case reflect.Int8: + sv = true + case reflect.Int16: + sv = true + case reflect.Int32: + sv = true + case reflect.Int64: + sv = true + case reflect.Uint: + sv = true + case reflect.Uint8: + sv = true + case reflect.Uint16: + sv = true + case reflect.Uint32: + sv = true + case reflect.Uint64: + sv = true + case reflect.Uintptr: + sv = true + case reflect.Float32: + sv = true + case reflect.Float64: + sv = true + case reflect.String: + sv = true + } + + /* if it's not, ignore the "string" and follow the regular path */ + if !sv { + self.compileOne(p, sp, vt, self.pv) + return + } + + /* dereference the pointer */ + if vt.Kind() == reflect.Ptr { + pc = p.PC() + vt = vt.Elem() + p.Add(ir.OP_is_nil) + p.Add(ir.OP_deref) + } + + /* special case of a double-quoted string */ + if ft != vars.JsonNumberType && ft.Kind() == reflect.String { + p.Add(ir.OP_quote) + } else { + self.compileStructFieldQuoted(p, sp, vt) + } + + /* the "null" case of the pointer */ + if pc != -1 { + e := p.PC() + p.Add(ir.OP_goto) + p.Pin(pc) + p.Add(ir.OP_null) + p.Pin(e) + } +} + +func (self *Compiler) compileStructFieldZero(p *ir.Program, vt reflect.Type) { + switch vt.Kind() { + case reflect.Bool: + p.Add(ir.OP_is_zero_1) + case reflect.Int: + p.Add(ir.OP_is_zero_ints()) + case reflect.Int8: + p.Add(ir.OP_is_zero_1) + case reflect.Int16: + p.Add(ir.OP_is_zero_2) + case reflect.Int32: + p.Add(ir.OP_is_zero_4) + case reflect.Int64: + p.Add(ir.OP_is_zero_8) + case reflect.Uint: + p.Add(ir.OP_is_zero_ints()) + case reflect.Uint8: + p.Add(ir.OP_is_zero_1) + case reflect.Uint16: + p.Add(ir.OP_is_zero_2) + case reflect.Uint32: + p.Add(ir.OP_is_zero_4) + case reflect.Uint64: + p.Add(ir.OP_is_zero_8) + case reflect.Uintptr: + p.Add(ir.OP_is_nil) + case reflect.Float32: + p.Add(ir.OP_is_zero_4) + case reflect.Float64: + p.Add(ir.OP_is_zero_8) + case reflect.String: + p.Add(ir.OP_is_nil_p1) + case reflect.Interface: + p.Add(ir.OP_is_nil) + case reflect.Map: + p.Add(ir.OP_is_zero_map) + case reflect.Ptr: + p.Add(ir.OP_is_nil) + case reflect.Slice: + p.Add(ir.OP_is_nil_p1) + default: + panic(vars.Error_type(vt)) + } +} + +func (self *Compiler) compileStructFieldQuoted(p *ir.Program, sp int, vt reflect.Type) { + p.Int(ir.OP_byte, '"') + self.compileOne(p, sp, vt, self.pv) + p.Int(ir.OP_byte, '"') +} + +func (self *Compiler) compileInterface(p *ir.Program, vt reflect.Type) { + x := p.PC() + p.Add(ir.OP_is_nil_p1) + + /* iface and efaces are different */ + if vt.NumMethod() == 0 { + p.Add(ir.OP_eface) + } else { + p.Add(ir.OP_iface) + } + + /* the "null" value */ + e := p.PC() + p.Add(ir.OP_goto) + p.Pin(x) + p.Add(ir.OP_null) + p.Pin(e) +} + +func (self *Compiler) compileMarshaler(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) { + pc := p.PC() + vk := vt.Kind() + + /* direct receiver */ + if vk != reflect.Ptr { + addMarshalerOp(p, op, vt, mt) + return + } + /* value receiver with a pointer type, check for nil before calling the marshaler */ + p.Add(ir.OP_is_nil) + + addMarshalerOp(p, op, vt, mt) + + i := p.PC() + p.Add(ir.OP_goto) + p.Pin(pc) + p.Add(ir.OP_null) + p.Pin(i) +} + +func addMarshalerOp(p *ir.Program, op ir.Op, vt reflect.Type, mt reflect.Type) { + if vars.UseVM { + itab := rt.GetItab(rt.IfaceType(rt.UnpackType(mt)), rt.UnpackType(vt), true) + p.Vtab(op, vt, itab) + } else { + // OPT: get itab here + p.Rtt(op, vt) + } } diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go b/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go deleted file mode 100644 index 37e6f7d4..00000000 --- a/vendor/github.com/bytedance/sonic/internal/encoder/debug_go117.go +++ /dev/null @@ -1,205 +0,0 @@ -// +build go1.17,!go1.23 - -/* - * Copyright 2021 ByteDance Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package encoder - -import ( - `fmt` - `os` - `runtime` - `strings` - `unsafe` - - `github.com/bytedance/sonic/internal/jit` - `github.com/twitchyliquid64/golang-asm/obj` -) - -const _FP_debug = 128 - -var ( - debugSyncGC = os.Getenv("SONIC_SYNC_GC") != "" - debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == "" - debugCheckPtr = os.Getenv("SONIC_CHECK_POINTER") != "" -) - -var ( - _Instr_End = newInsOp(_OP_is_nil) - - _F_gc = jit.Func(gc) - _F_println = jit.Func(println_wrapper) - _F_print = jit.Func(print) -) - -func (self *_Assembler) dsave(r ...obj.Addr) { - for i, v := range r { - if i > _FP_debug / 8 - 1 { - panic("too many registers to save") - } else { - self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8)) - } - } -} - -func (self *_Assembler) dload(r ...obj.Addr) { - for i, v := range r { - if i > _FP_debug / 8 - 1 { - panic("too many registers to load") - } else { - self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8), v) - } - } -} - -func println_wrapper(i int, op1 int, op2 int){ - println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2]) -} - -func print(i int){ - println(i) -} - -func gc() { - if !debugSyncGC { - return - } - runtime.GC() - // debug.FreeOSMemory() -} - -func (self *_Assembler) dcall(fn obj.Addr) { - self.Emit("MOVQ", fn, _R10) // MOVQ ${fn}, R10 - self.Rjmp("CALL", _R10) // CALL R10 -} - -func (self *_Assembler) debug_gc() { - if !debugSyncGC { - return - } - self.dsave(_REG_debug...) - self.dcall(_F_gc) - self.dload(_REG_debug...) -} - -func (self *_Assembler) debug_instr(i int, v *_Instr) { - if debugSyncGC { - if i+1 == len(self.p) { - self.print_gc(i, v, &_Instr_End) - } else { - next := &(self.p[i+1]) - self.print_gc(i, v, next) - name := _OpNames[next.op()] - if strings.Contains(name, "save") { - return - } - } - // self.debug_gc() - } -} - -//go:noescape -//go:linkname checkptrBase runtime.checkptrBase -func checkptrBase(p unsafe.Pointer) uintptr - -//go:noescape -//go:linkname findObject runtime.findObject -func findObject(p, refBase, refOff uintptr) (base uintptr, s unsafe.Pointer, objIndex uintptr) - -var ( - _F_checkptr = jit.Func(checkptr) - _F_printptr = jit.Func(printptr) -) - -var ( - _R10 = jit.Reg("R10") -) -var _REG_debug = []obj.Addr { - jit.Reg("AX"), - jit.Reg("BX"), - jit.Reg("CX"), - jit.Reg("DX"), - jit.Reg("DI"), - jit.Reg("SI"), - jit.Reg("BP"), - jit.Reg("SP"), - jit.Reg("R8"), - jit.Reg("R9"), - jit.Reg("R10"), - jit.Reg("R11"), - jit.Reg("R12"), - jit.Reg("R13"), - jit.Reg("R14"), - jit.Reg("R15"), -} - -func checkptr(ptr uintptr) { - if ptr == 0 { - return - } - fmt.Printf("pointer: %x\n", ptr) - f := checkptrBase(unsafe.Pointer(uintptr(ptr))) - if f == 0 { - fmt.Printf("! unknown-based pointer: %x\n", ptr) - } else if f == 1 { - fmt.Printf("! stack pointer: %x\n", ptr) - } else { - fmt.Printf("base: %x\n", f) - } - findobj(ptr) -} - -func findobj(ptr uintptr) { - base, s, objIndex := findObject(ptr, 0, 0) - if s != nil && base == 0 { - fmt.Printf("! invalid pointer: %x\n", ptr) - } - fmt.Printf("objIndex: %d\n", objIndex) -} - -func (self *_Assembler) check_ptr(ptr obj.Addr, lea bool) { - if !debugCheckPtr { - return - } - - self.dsave(_REG_debug...) - if lea { - self.Emit("LEAQ", ptr, _R10) - } else { - self.Emit("MOVQ", ptr, _R10) - } - self.Emit("MOVQ", _R10, jit.Ptr(_SP, 0)) - self.dcall(_F_checkptr) - self.dload(_REG_debug...) -} - -func printptr(i int, ptr uintptr) { - fmt.Printf("[%d] ptr: %x\n", i, ptr) -} - -func (self *_Assembler) print_ptr(i int, ptr obj.Addr, lea bool) { - self.dsave(_REG_debug...) - if lea { - self.Emit("LEAQ", ptr, _R10) - } else { - self.Emit("MOVQ", ptr, _R10) - } - - self.Emit("MOVQ", jit.Imm(int64(i)), _AX) - self.Emit("MOVQ", _R10, _BX) - self.dcall(_F_printptr) - self.dload(_REG_debug...) -} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go new file mode 100644 index 00000000..c5320643 --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/encode_norace.go @@ -0,0 +1,24 @@ +//go:build !race +// +build !race + +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package encoder + +func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error { + return encodeInto(buf, val, opts) +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go b/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go new file mode 100644 index 00000000..c373c55f --- /dev/null +++ b/vendor/github.com/bytedance/sonic/internal/encoder/encode_race.go @@ -0,0 +1,54 @@ +//go:build race +// +build race + +/* + * Copyright 2021 ByteDance Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package encoder + +import ( + `encoding/json` + + `github.com/bytedance/sonic/internal/rt` +) + + +func helpDetectDataRace(val interface{}) { + var out []byte + defer func() { + if v := recover(); v != nil { + // NOTICE: help user to locate where panic occurs + println("panic when encoding on: ", truncate(out)) + panic(v) + } + }() + out, _ = json.Marshal(val) +} + +func encodeIntoCheckRace(buf *[]byte, val interface{}, opts Options) error { + err := encodeInto(buf, val, opts) + /* put last to make the panic from sonic will always be caught at first */ + helpDetectDataRace(val) + return err +} + +func truncate(json []byte) string { + if len(json) <= 256 { + return rt.Mem2Str(json) + } else { + return rt.Mem2Str(json[len(json)-256:]) + } +} diff --git a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go b/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go index d285c299..4cba1a16 100644 --- a/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go +++ b/vendor/github.com/bytedance/sonic/internal/encoder/encoder.go @@ -17,72 +17,62 @@ package encoder import ( - `bytes` - `encoding/json` - `reflect` - `runtime` - `unsafe` - - `github.com/bytedance/sonic/internal/native` - `github.com/bytedance/sonic/internal/native/types` - `github.com/bytedance/sonic/internal/rt` - `github.com/bytedance/sonic/utf8` - `github.com/bytedance/sonic/option` + "bytes" + "encoding/json" + "reflect" + "runtime" + "unsafe" + + "github.com/bytedance/sonic/utf8" + "github.com/bytedance/sonic/internal/encoder/alg" + "github.com/bytedance/sonic/internal/encoder/vars" + "github.com/bytedance/sonic/internal/rt" + "github.com/bytedance/sonic/option" ) // Options is a set of encoding options. type Options uint64 -const ( - bitSortMapKeys = iota - bitEscapeHTML - bitCompactMarshaler - bitNoQuoteTextMarshaler - bitNoNullSliceOrMap - bitValidateString - bitNoValidateJSONMarshaler - bitNoEncoderNewline - - // used for recursive compile - bitPointerValue = 63 -) - const ( // SortMapKeys indicates that the keys of a map needs to be sorted // before serializing into JSON. // WARNING: This hurts performance A LOT, USE WITH CARE. - SortMapKeys Options = 1 << bitSortMapKeys + SortMapKeys Options = 1 << alg.BitSortMapKeys // EscapeHTML indicates encoder to escape all HTML characters // after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape). // WARNING: This hurts performance A LOT, USE WITH CARE. - EscapeHTML Options = 1 << bitEscapeHTML + EscapeHTML Options = 1 << alg.BitEscapeHTML // CompactMarshaler indicates that the output JSON from json.Marshaler // is always compact and needs no validation - CompactMarshaler Options = 1 << bitCompactMarshaler + CompactMarshaler Options = 1 << alg.BitCompactMarshaler // NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler // is always escaped string and needs no quoting - NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler + NoQuoteTextMarshaler Options = 1 << alg.BitNoQuoteTextMarshaler // NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}', - // instead of 'null' - NoNullSliceOrMap Options = 1 << bitNoNullSliceOrMap + // instead of 'null'. + // NOTE: The priority of this option is lower than json tag `omitempty`. + NoNullSliceOrMap Options = 1 << alg.BitNoNullSliceOrMap // ValidateString indicates that encoder should validate the input string // before encoding it into JSON. - ValidateString Options = 1 << bitValidateString + ValidateString Options = 1 << alg.BitValidateString // NoValidateJSONMarshaler indicates that the encoder should not validate the output string // after encoding the JSONMarshaler to JSON. - NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler + NoValidateJSONMarshaler Options = 1 << alg.BitNoValidateJSONMarshaler // NoEncoderNewline indicates that the encoder should not add a newline after every message - NoEncoderNewline Options = 1 << bitNoEncoderNewline + NoEncoderNewline Options = 1 << alg.BitNoEncoderNewline // CompatibleWithStd is used to be compatible with std encoder. CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler + + // Encode Infinity or Nan float into `null`, instead of returning an error. + EncodeNullForInfOrNan Options = 1 << alg.BitEncodeNullForInfOrNan ) // Encoder represents a specific set of encoder configurations. @@ -171,53 +161,45 @@ func (enc *Encoder) SetIndent(prefix, indent string) { // Quote returns the JSON-quoted version of s. func Quote(s string) string { - var n int - var p []byte - - /* check for empty string */ - if s == "" { - return `""` - } - - /* allocate space for result */ - n = len(s) + 2 - p = make([]byte, 0, n) - - /* call the encoder */ - _ = encodeString(&p, s) - return rt.Mem2Str(p) + buf := make([]byte, 0, len(s)+2) + buf = alg.Quote(buf, s, false) + return rt.Mem2Str(buf) } // Encode returns the JSON encoding of val, encoded with opts. func Encode(val interface{}, opts Options) ([]byte, error) { var ret []byte - buf := newBytes() - err := encodeInto(&buf, val, opts) + buf := vars.NewBytes() + err := encodeIntoCheckRace(buf, val, opts) /* check for errors */ if err != nil { - freeBytes(buf) + vars.FreeBytes(buf) return nil, err } /* htmlescape or correct UTF-8 if opts enable */ old := buf - buf = encodeFinish(old, opts) - pbuf := ((*rt.GoSlice)(unsafe.Pointer(&buf))).Ptr - pold := ((*rt.GoSlice)(unsafe.Pointer(&old))).Ptr + *buf = encodeFinish(*old, opts) + pbuf := ((*rt.GoSlice)(unsafe.Pointer(buf))).Ptr + pold := ((*rt.GoSlice)(unsafe.Pointer(old))).Ptr /* return when allocated a new buffer */ if pbuf != pold { - freeBytes(old) - return buf, nil + vars.FreeBytes(old) + return *buf, nil } /* make a copy of the result */ - ret = make([]byte, len(buf)) - copy(ret, buf) - - freeBytes(buf) + if rt.CanSizeResue(cap(*buf)) { + ret = make([]byte, len(*buf)) + copy(ret, *buf) + vars.FreeBytes(buf) + } else { + ret = *buf + } + /* return the buffer into pool */ return ret, nil } @@ -225,7 +207,7 @@ func Encode(val interface{}, opts Options) ([]byte, error) { // EncodeInto is like Encode but uses a user-supplied buffer instead of allocating // a new one. func EncodeInto(buf *[]byte, val interface{}, opts Options) error { - err := encodeInto(buf, val, opts) + err := encodeIntoCheckRace(buf, val, opts) if err != nil { return err } @@ -234,15 +216,15 @@ func EncodeInto(buf *[]byte, val interface{}, opts Options) error { } func encodeInto(buf *[]byte, val interface{}, opts Options) error { - stk := newStack() + stk := vars.NewStack() efv := rt.UnpackEface(val) err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts)) /* return the stack into pool */ if err != nil { - resetStack(stk) + vars.ResetStack(stk) } - freeStack(stk) + vars.FreeStack(stk) /* avoid GC ahead */ runtime.KeepAlive(buf) @@ -254,13 +236,12 @@ func encodeFinish(buf []byte, opts Options) []byte { if opts & EscapeHTML != 0 { buf = HTMLEscape(nil, buf) } - if opts & ValidateString != 0 && !utf8.Validate(buf) { + if (opts & ValidateString != 0) && !utf8.Validate(buf) { buf = utf8.CorrectWith(nil, buf, `\ufffd`) } return buf } -var typeByte = rt.UnpackType(reflect.TypeOf(byte(0))) // HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 // characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 @@ -269,7 +250,7 @@ var typeByte = rt.UnpackType(reflect.TypeOf(byte(0))) // escaping within