jwt auth test
This commit is contained in:
parent
76715b7b73
commit
0708b425fb
247 changed files with 68349 additions and 4 deletions
16
go.mod
16
go.mod
|
@ -8,22 +8,34 @@ require (
|
||||||
github.com/go-chi/chi/v5 v5.0.5
|
github.com/go-chi/chi/v5 v5.0.5
|
||||||
)
|
)
|
||||||
|
|
||||||
require github.com/unrolled/render v1.4.0
|
require (
|
||||||
|
github.com/go-chi/jwtauth/v5 v5.0.2
|
||||||
|
github.com/unrolled/render v1.4.0
|
||||||
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d // indirect
|
||||||
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
||||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||||
github.com/go-playground/locales v0.13.0 // indirect
|
github.com/go-playground/locales v0.13.0 // indirect
|
||||||
github.com/go-playground/universal-translator v0.17.0 // indirect
|
github.com/go-playground/universal-translator v0.17.0 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.4.1 // indirect
|
github.com/go-playground/validator/v10 v10.4.1 // indirect
|
||||||
|
github.com/goccy/go-json v0.7.6 // indirect
|
||||||
github.com/golang/protobuf v1.3.3 // indirect
|
github.com/golang/protobuf v1.3.3 // indirect
|
||||||
github.com/json-iterator/go v1.1.9 // indirect
|
github.com/json-iterator/go v1.1.9 // indirect
|
||||||
github.com/leodido/go-urn v1.2.0 // indirect
|
github.com/leodido/go-urn v1.2.0 // indirect
|
||||||
|
github.com/lestrrat-go/backoff/v2 v2.0.8 // indirect
|
||||||
|
github.com/lestrrat-go/blackmagic v1.0.0 // indirect
|
||||||
|
github.com/lestrrat-go/httpcc v1.0.0 // indirect
|
||||||
|
github.com/lestrrat-go/iter v1.0.1 // indirect
|
||||||
|
github.com/lestrrat-go/jwx v1.2.6 // indirect
|
||||||
|
github.com/lestrrat-go/option v1.0.0 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.12 // indirect
|
github.com/mattn/go-isatty v0.0.12 // indirect
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 // indirect
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.1.7 // indirect
|
github.com/ugorji/go/codec v1.1.7 // indirect
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 // indirect
|
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 // indirect
|
||||||
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea // indirect
|
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea // indirect
|
||||||
gopkg.in/yaml.v2 v2.2.8 // indirect
|
gopkg.in/yaml.v2 v2.2.8 // indirect
|
||||||
)
|
)
|
||||||
|
|
50
go.sum
50
go.sum
|
@ -1,6 +1,9 @@
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
|
||||||
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d h1:1iy2qD6JEhHKKhUOA9IWs7mjco7lnw2qx8FsRI2wirE=
|
||||||
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d/go.mod h1:tmAIfUFEirG/Y8jhZ9M+h36obRZAk/1fcSpXwAVlfqE=
|
||||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||||
|
@ -9,8 +12,11 @@ github.com/gin-gonic/gin v1.7.4 h1:QmUZXrvJ9qZ3GfWvQ+2wnW/1ePrTEJqPKMYEU3lD/DM=
|
||||||
github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY=
|
github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY=
|
||||||
github.com/go-chi/chi v1.5.4 h1:QHdzF2szwjqVV4wmByUnTcsbIg7UGaQ0tPF2t5GcAIs=
|
github.com/go-chi/chi v1.5.4 h1:QHdzF2szwjqVV4wmByUnTcsbIg7UGaQ0tPF2t5GcAIs=
|
||||||
github.com/go-chi/chi v1.5.4/go.mod h1:uaf8YgoFazUOkPBG7fxPftUylNumIev9awIWOENIuEg=
|
github.com/go-chi/chi v1.5.4/go.mod h1:uaf8YgoFazUOkPBG7fxPftUylNumIev9awIWOENIuEg=
|
||||||
|
github.com/go-chi/chi/v5 v5.0.4/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||||
github.com/go-chi/chi/v5 v5.0.5 h1:l3RJ8T8TAqLsXFfah+RA6N4pydMbPwSdvNM+AFWvLUM=
|
github.com/go-chi/chi/v5 v5.0.5 h1:l3RJ8T8TAqLsXFfah+RA6N4pydMbPwSdvNM+AFWvLUM=
|
||||||
github.com/go-chi/chi/v5 v5.0.5/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
github.com/go-chi/chi/v5 v5.0.5/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
|
||||||
|
github.com/go-chi/jwtauth/v5 v5.0.2 h1:CSKtr+b6Jnfy5T27sMaiBPxaVE/bjnjS3ramFQ0526w=
|
||||||
|
github.com/go-chi/jwtauth/v5 v5.0.2/go.mod h1:TeA7vmPe3uYThvHw8O8W13HOOpOd4MTgToxL41gZyjs=
|
||||||
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
|
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
|
||||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
|
github.com/go-playground/locales v0.13.0 h1:HyWk6mgj5qFqCT5fjGBuRArbVDfE4hi8+e8ceBS/t7Q=
|
||||||
|
@ -19,6 +25,8 @@ github.com/go-playground/universal-translator v0.17.0 h1:icxd5fm+REJzpZx7ZfpaD87
|
||||||
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA=
|
||||||
github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
|
github.com/go-playground/validator/v10 v10.4.1 h1:pH2c5ADXtd66mxoE0Zm9SUhxE20r7aM3F26W0hOn+GE=
|
||||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||||
|
github.com/goccy/go-json v0.7.6 h1:H0wq4jppBQ+9222sk5+hPLL25abZQiRuQ6YPnjO9c+A=
|
||||||
|
github.com/goccy/go-json v0.7.6/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I=
|
github.com/golang/protobuf v1.3.3 h1:gyjaxf+svBWX08ZjK86iN9geUJF0H6gp2IRKX6Nf6/I=
|
||||||
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
@ -26,40 +34,82 @@ github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGn
|
||||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
|
github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
|
||||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||||
|
github.com/lestrrat-go/backoff/v2 v2.0.8 h1:oNb5E5isby2kiro9AgdHLv5N5tint1AnDVVf2E2un5A=
|
||||||
|
github.com/lestrrat-go/backoff/v2 v2.0.8/go.mod h1:rHP/q/r9aT27n24JQLa7JhSQZCKBBOiM/uP402WwN8Y=
|
||||||
|
github.com/lestrrat-go/blackmagic v1.0.0 h1:XzdxDbuQTz0RZZEmdU7cnQxUtFUzgCSPq8RCz4BxIi4=
|
||||||
|
github.com/lestrrat-go/blackmagic v1.0.0/go.mod h1:TNgH//0vYSs8VXDCfkZLgIrVTTXQELZffUV0tz3MtdQ=
|
||||||
|
github.com/lestrrat-go/codegen v1.0.1/go.mod h1:JhJw6OQAuPEfVKUCLItpaVLumDGWQznd1VaXrBk9TdM=
|
||||||
|
github.com/lestrrat-go/httpcc v1.0.0 h1:FszVC6cKfDvBKcJv646+lkh4GydQg2Z29scgUfkOpYc=
|
||||||
|
github.com/lestrrat-go/httpcc v1.0.0/go.mod h1:tGS/u00Vh5N6FHNkExqGGNId8e0Big+++0Gf8MBnAvE=
|
||||||
|
github.com/lestrrat-go/iter v1.0.1 h1:q8faalr2dY6o8bV45uwrxq12bRa1ezKrB6oM9FUgN4A=
|
||||||
|
github.com/lestrrat-go/iter v1.0.1/go.mod h1:zIdgO1mRKhn8l9vrZJZz9TUMMFbQbLeTsbqPDrJ/OJc=
|
||||||
|
github.com/lestrrat-go/jwx v1.2.6 h1:XAgfuHaOB7fDZ/6WhVgl8K89af768dU+3Nx4DlTbLIk=
|
||||||
|
github.com/lestrrat-go/jwx v1.2.6/go.mod h1:tJuGuAI3LC71IicTx82Mz1n3w9woAs2bYJZpkjJQ5aU=
|
||||||
|
github.com/lestrrat-go/option v1.0.0 h1:WqAWL8kh8VcSoD6xjSH34/1m8yxluXQbDeKNfvFeEO4=
|
||||||
|
github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
|
||||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
|
||||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
|
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo=
|
github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo=
|
||||||
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
||||||
github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs=
|
github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs=
|
||||||
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
|
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
|
||||||
github.com/unrolled/render v1.4.0 h1:p73obhpsXuE3paXOtcuXTBKgBJpLCfmABnsUiO35x+Q=
|
github.com/unrolled/render v1.4.0 h1:p73obhpsXuE3paXOtcuXTBKgBJpLCfmABnsUiO35x+Q=
|
||||||
github.com/unrolled/render v1.4.0/go.mod h1:cK4RSTTVdND5j9EYEc0LAMOvdG11JeiKjyjfyZRvV2w=
|
github.com/unrolled/render v1.4.0/go.mod h1:cK4RSTTVdND5j9EYEc0LAMOvdG11JeiKjyjfyZRvV2w=
|
||||||
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
|
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620 h1:3wPMTskHO3+O6jqTEXyFcsnuxMQOqYSaHsDxcbUXpqA=
|
||||||
|
golang.org/x/crypto v0.0.0-20201217014255-9d1352758620/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||||
|
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
|
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||||
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42 h1:vEOn+mP2zCOVzKckCZy6YsCtDblrpj/w7B9nxGNELpg=
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea h1:+WiDlPBBaO+h9vPNZi8uJ3k4BkKQB7Iow3aqwHVA5hI=
|
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea h1:+WiDlPBBaO+h9vPNZi8uJ3k4BkKQB7Iow3aqwHVA5hI=
|
||||||
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20200918232735-d647fc253266/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU=
|
||||||
|
golang.org/x/tools v0.0.0-20210114065538-d78b04bdf963/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|
3
main.go
3
main.go
|
@ -3,10 +3,12 @@ package main
|
||||||
import (
|
import (
|
||||||
"embed"
|
"embed"
|
||||||
"gin-test/pkg/sample"
|
"gin-test/pkg/sample"
|
||||||
|
"gin-test/pkg/user"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/go-chi/chi/v5"
|
"github.com/go-chi/chi/v5"
|
||||||
"github.com/go-chi/chi/v5/middleware"
|
"github.com/go-chi/chi/v5/middleware"
|
||||||
|
|
||||||
"github.com/unrolled/render"
|
"github.com/unrolled/render"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -31,6 +33,7 @@ func main() {
|
||||||
render.HTML(w, http.StatusOK, "index", map[string]string {"title": "Soeren"})
|
render.HTML(w, http.StatusOK, "index", map[string]string {"title": "Soeren"})
|
||||||
})
|
})
|
||||||
sample.Register(r)
|
sample.Register(r)
|
||||||
|
user.Register(r)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
|
|
7
pkg/user/templates/login.tmpl
Normal file
7
pkg/user/templates/login.tmpl
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
<form method="post">
|
||||||
|
<label>Username</label>
|
||||||
|
<input type="text" name="username" class="form-control" placeholder="Username"></input>
|
||||||
|
<label>Password</label>
|
||||||
|
<input type="password" name="password" class="form-control" placeholder="Password"></input>
|
||||||
|
<input type="submit" class="btn btn-success" value="Login"></input>
|
||||||
|
</form>
|
129
pkg/user/user.go
Normal file
129
pkg/user/user.go
Normal file
|
@ -0,0 +1,129 @@
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"embed"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/go-chi/jwtauth/v5"
|
||||||
|
"github.com/unrolled/render"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// content holds our static web server content.
|
||||||
|
//go:embed templates/*
|
||||||
|
var webserver embed.FS
|
||||||
|
|
||||||
|
var ren *render.Render
|
||||||
|
|
||||||
|
|
||||||
|
var tokenAuth *jwtauth.JWTAuth
|
||||||
|
|
||||||
|
func Register(router chi.Router) {
|
||||||
|
ren = render.New(render.Options{
|
||||||
|
//Layout: "layout",
|
||||||
|
FileSystem: &render.EmbedFileSystem{
|
||||||
|
FS: webserver,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
router.Get("/login", loginForm)
|
||||||
|
router.Post("/login", login)
|
||||||
|
router.Get("/register", loginForm)
|
||||||
|
router.Post("/register", register)
|
||||||
|
|
||||||
|
tokenAuth = jwtauth.New("HS256", []byte("secret"), nil)
|
||||||
|
|
||||||
|
router.Group(func(r chi.Router) {
|
||||||
|
r.Use(jwtauth.Verifier(tokenAuth))
|
||||||
|
r.Get("/me", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
_, claims, _ := jwtauth.FromContext(r.Context())
|
||||||
|
if val, ok := claims["username"]; ok {
|
||||||
|
w.Write([]byte(fmt.Sprintf("hi %v", val)))
|
||||||
|
} else {
|
||||||
|
w.Write([]byte("Du bist nicht eingeloggt"))
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func loginForm(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ren.HTML(w, http.StatusOK, "login", nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func login(w http.ResponseWriter, r *http.Request) {
|
||||||
|
r.ParseForm()
|
||||||
|
|
||||||
|
|
||||||
|
uc := GetUserClient()
|
||||||
|
res, _ := uc.login(r.FormValue("username"), r.FormValue("password"))
|
||||||
|
if res {
|
||||||
|
|
||||||
|
_, tokenstring, err := tokenAuth.Encode(map[string]interface{}{"username": r.FormValue("username")})
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
expiration := time.Now().Add(365 * 24 * time.Hour)
|
||||||
|
cookie := http.Cookie{Name: "jwt",Value:tokenstring,Expires:expiration}
|
||||||
|
http.SetCookie(w, &cookie)
|
||||||
|
w.Write([]byte("Login ok"))
|
||||||
|
} else {
|
||||||
|
w.Write([]byte("Login failed"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func register(w http.ResponseWriter, r *http.Request) {
|
||||||
|
r.ParseForm()
|
||||||
|
|
||||||
|
|
||||||
|
uc := GetUserClient()
|
||||||
|
res, _ := uc.register(r.FormValue("username"), r.FormValue("password"))
|
||||||
|
if res {
|
||||||
|
w.Write([]byte("Register ok"))
|
||||||
|
} else {
|
||||||
|
w.Write([]byte("Register failed"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type UserClient struct {
|
||||||
|
users map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
var u *UserClient
|
||||||
|
|
||||||
|
func GetUserClient() *UserClient {
|
||||||
|
if u == nil {
|
||||||
|
uc := UserClient{}
|
||||||
|
uc.users = make(map[string]string)
|
||||||
|
uc.users["admin"]="password"
|
||||||
|
u = &uc
|
||||||
|
}
|
||||||
|
|
||||||
|
return u
|
||||||
|
}
|
||||||
|
|
||||||
|
func (uc *UserClient) register(username, password string) (bool, error) {
|
||||||
|
if _, ok := uc.users[username]; ok {
|
||||||
|
return false, errors.New("Username already used")
|
||||||
|
}
|
||||||
|
|
||||||
|
uc.users[username] = password
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (uc UserClient) login(username, password string) (bool, error) {
|
||||||
|
if val, ok := uc.users[username]; ok {
|
||||||
|
fmt.Println("Login for valide user")
|
||||||
|
if val == password {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fmt.Printf("User %v not found", username)
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, nil
|
||||||
|
}
|
|
@ -10,7 +10,11 @@
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<div class="row">
|
<div class="row">
|
||||||
<div class="col-md-3">
|
<div class="col-md-3">
|
||||||
NAV
|
<ul>
|
||||||
|
<li><a href="/register">Register</a></li>
|
||||||
|
<li><a href="/login">Login</a></li>
|
||||||
|
<li><a href="/me">Me</a></li>
|
||||||
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
<div class="col-md-9"> {{ yield }}</div>
|
<div class="col-md-9"> {{ yield }}</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
17
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/LICENSE
generated
vendored
Normal file
17
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
ISC License
|
||||||
|
|
||||||
|
Copyright (c) 2013-2017 The btcsuite developers
|
||||||
|
Copyright (c) 2015-2020 The Decred developers
|
||||||
|
Copyright (c) 2017 The Lightning Network Developers
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
72
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/README.md
generated
vendored
Normal file
72
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/README.md
generated
vendored
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
secp256k1
|
||||||
|
=========
|
||||||
|
|
||||||
|
[![Build Status](https://github.com/decred/dcrd/workflows/Build%20and%20Test/badge.svg)](https://github.com/decred/dcrd/actions)
|
||||||
|
[![ISC License](https://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org)
|
||||||
|
[![Doc](https://img.shields.io/badge/doc-reference-blue.svg)](https://pkg.go.dev/github.com/decred/dcrd/dcrec/secp256k1/v4)
|
||||||
|
|
||||||
|
Package secp256k1 implements optimized secp256k1 elliptic curve operations.
|
||||||
|
|
||||||
|
This package provides an optimized pure Go implementation of elliptic curve
|
||||||
|
cryptography operations over the secp256k1 curve as well as data structures and
|
||||||
|
functions for working with public and private secp256k1 keys. See
|
||||||
|
https://www.secg.org/sec2-v2.pdf for details on the standard.
|
||||||
|
|
||||||
|
In addition, sub packages are provided to produce, verify, parse, and serialize
|
||||||
|
ECDSA signatures and EC-Schnorr-DCRv0 (a custom Schnorr-based signature scheme
|
||||||
|
specific to Decred) signatures. See the README.md files in the relevant sub
|
||||||
|
packages for more details about those aspects.
|
||||||
|
|
||||||
|
An overview of the features provided by this package are as follows:
|
||||||
|
|
||||||
|
- Private key generation, serialization, and parsing
|
||||||
|
- Public key generation, serialization and parsing per ANSI X9.62-1998
|
||||||
|
- Parses uncompressed, compressed, and hybrid public keys
|
||||||
|
- Serializes uncompressed and compressed public keys
|
||||||
|
- Specialized types for performing optimized and constant time field operations
|
||||||
|
- `FieldVal` type for working modulo the secp256k1 field prime
|
||||||
|
- `ModNScalar` type for working modulo the secp256k1 group order
|
||||||
|
- Elliptic curve operations in Jacobian projective coordinates
|
||||||
|
- Point addition
|
||||||
|
- Point doubling
|
||||||
|
- Scalar multiplication with an arbitrary point
|
||||||
|
- Scalar multiplication with the base point (group generator)
|
||||||
|
- Point decompression from a given x coordinate
|
||||||
|
- Nonce generation via RFC6979 with support for extra data and version
|
||||||
|
information that can be used to prevent nonce reuse between signing algorithms
|
||||||
|
|
||||||
|
It also provides an implementation of the Go standard library `crypto/elliptic`
|
||||||
|
`Curve` interface via the `S256` function so that it may be used with other
|
||||||
|
packages in the standard library such as `crypto/tls`, `crypto/x509`, and
|
||||||
|
`crypto/ecdsa`. However, in the case of ECDSA, it is highly recommended to use
|
||||||
|
the `ecdsa` sub package of this package instead since it is optimized
|
||||||
|
specifically for secp256k1 and is significantly faster as a result.
|
||||||
|
|
||||||
|
Although this package was primarily written for dcrd, it has intentionally been
|
||||||
|
designed so it can be used as a standalone package for any projects needing to
|
||||||
|
use optimized secp256k1 elliptic curve cryptography.
|
||||||
|
|
||||||
|
Finally, a comprehensive suite of tests is provided to provide a high level of
|
||||||
|
quality assurance.
|
||||||
|
|
||||||
|
## secp256k1 use in Decred
|
||||||
|
|
||||||
|
At the time of this writing, the primary public key cryptography in widespread
|
||||||
|
use on the Decred network used to secure coins is based on elliptic curves
|
||||||
|
defined by the secp256k1 domain parameters.
|
||||||
|
|
||||||
|
## Installation and Updating
|
||||||
|
|
||||||
|
This package is part of the `github.com/decred/dcrd/dcrec/secp256k1/v3` module.
|
||||||
|
Use the standard go tooling for working with modules to incorporate it.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
* [Encryption](https://pkg.go.dev/github.com/decred/dcrd/dcrec/secp256k1/v4#example-package-EncryptDecryptMessage)
|
||||||
|
Demonstrates encrypting and decrypting a message using a shared key derived
|
||||||
|
through ECDHE.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Package secp256k1 is licensed under the [copyfree](http://copyfree.org) ISC
|
||||||
|
License.
|
11
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/compressedbytepoints.go
generated
vendored
Normal file
11
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/compressedbytepoints.go
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
943
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/curve.go
generated
vendored
Normal file
943
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/curve.go
generated
vendored
Normal file
|
@ -0,0 +1,943 @@
|
||||||
|
// Copyright (c) 2015-2021 The Decred developers
|
||||||
|
// Copyright 2013-2014 The btcsuite developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/hex"
|
||||||
|
"math/big"
|
||||||
|
)
|
||||||
|
|
||||||
|
// References:
|
||||||
|
// [SECG]: Recommended Elliptic Curve Domain Parameters
|
||||||
|
// https://www.secg.org/sec2-v2.pdf
|
||||||
|
//
|
||||||
|
// [GECC]: Guide to Elliptic Curve Cryptography (Hankerson, Menezes, Vanstone)
|
||||||
|
//
|
||||||
|
// [BRID]: On Binary Representations of Integers with Digits -1, 0, 1
|
||||||
|
// (Prodinger, Helmut)
|
||||||
|
|
||||||
|
// All group operations are performed using Jacobian coordinates. For a given
|
||||||
|
// (x, y) position on the curve, the Jacobian coordinates are (x1, y1, z1)
|
||||||
|
// where x = x1/z1^2 and y = y1/z1^3.
|
||||||
|
|
||||||
|
// hexToFieldVal converts the passed hex string into a FieldVal and will panic
|
||||||
|
// if there is an error. This is only provided for the hard-coded constants so
|
||||||
|
// errors in the source code can be detected. It will only (and must only) be
|
||||||
|
// called with hard-coded values.
|
||||||
|
func hexToFieldVal(s string) *FieldVal {
|
||||||
|
b, err := hex.DecodeString(s)
|
||||||
|
if err != nil {
|
||||||
|
panic("invalid hex in source file: " + s)
|
||||||
|
}
|
||||||
|
var f FieldVal
|
||||||
|
if overflow := f.SetByteSlice(b); overflow {
|
||||||
|
panic("hex in source file overflows mod P: " + s)
|
||||||
|
}
|
||||||
|
return &f
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// Next 6 constants are from Hal Finney's bitcointalk.org post:
|
||||||
|
// https://bitcointalk.org/index.php?topic=3238.msg45565#msg45565
|
||||||
|
// May he rest in peace.
|
||||||
|
//
|
||||||
|
// They have also been independently derived from the code in the
|
||||||
|
// EndomorphismVectors function in genstatics.go.
|
||||||
|
endomorphismLambda = fromHex("5363ad4cc05c30e0a5261c028812645a122e22ea20816678df02967c1b23bd72")
|
||||||
|
endomorphismBeta = hexToFieldVal("7ae96a2b657c07106e64479eac3434e99cf0497512f58995c1396c28719501ee")
|
||||||
|
endomorphismA1 = fromHex("3086d221a7d46bcde86c90e49284eb15")
|
||||||
|
endomorphismB1 = fromHex("-e4437ed6010e88286f547fa90abfe4c3")
|
||||||
|
endomorphismA2 = fromHex("114ca50f7a8e2f3f657c1108d9d44cfd8")
|
||||||
|
endomorphismB2 = fromHex("3086d221a7d46bcde86c90e49284eb15")
|
||||||
|
|
||||||
|
// Alternatively, the following parameters are valid as well, however, they
|
||||||
|
// seem to be about 8% slower in practice.
|
||||||
|
//
|
||||||
|
// endomorphismLambda = fromHex("AC9C52B33FA3CF1F5AD9E3FD77ED9BA4A880B9FC8EC739C2E0CFC810B51283CE")
|
||||||
|
// endomorphismBeta = hexToFieldVal("851695D49A83F8EF919BB86153CBCB16630FB68AED0A766A3EC693D68E6AFA40")
|
||||||
|
// endomorphismA1 = fromHex("E4437ED6010E88286F547FA90ABFE4C3")
|
||||||
|
// endomorphismB1 = fromHex("-3086D221A7D46BCDE86C90E49284EB15")
|
||||||
|
// endomorphismA2 = fromHex("3086D221A7D46BCDE86C90E49284EB15")
|
||||||
|
// endomorphismB2 = fromHex("114CA50F7A8E2F3F657C1108D9D44CFD8")
|
||||||
|
)
|
||||||
|
|
||||||
|
// JacobianPoint is an element of the group formed by the secp256k1 curve in
|
||||||
|
// Jacobian projective coordinates and thus represents a point on the curve.
|
||||||
|
type JacobianPoint struct {
|
||||||
|
// The X coordinate in Jacobian projective coordinates. The affine point is
|
||||||
|
// X/z^2.
|
||||||
|
X FieldVal
|
||||||
|
|
||||||
|
// The Y coordinate in Jacobian projective coordinates. The affine point is
|
||||||
|
// Y/z^3.
|
||||||
|
Y FieldVal
|
||||||
|
|
||||||
|
// The Z coordinate in Jacobian projective coordinates.
|
||||||
|
Z FieldVal
|
||||||
|
}
|
||||||
|
|
||||||
|
// MakeJacobianPoint returns a Jacobian point with the provided X, Y, and Z
|
||||||
|
// coordinates.
|
||||||
|
func MakeJacobianPoint(x, y, z *FieldVal) JacobianPoint {
|
||||||
|
var p JacobianPoint
|
||||||
|
p.X.Set(x)
|
||||||
|
p.Y.Set(y)
|
||||||
|
p.Z.Set(z)
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set sets the Jacobian point to the provided point.
|
||||||
|
func (p *JacobianPoint) Set(other *JacobianPoint) {
|
||||||
|
p.X.Set(&other.X)
|
||||||
|
p.Y.Set(&other.Y)
|
||||||
|
p.Z.Set(&other.Z)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToAffine reduces the Z value of the existing point to 1 effectively
|
||||||
|
// making it an affine coordinate in constant time. The point will be
|
||||||
|
// normalized.
|
||||||
|
func (p *JacobianPoint) ToAffine() {
|
||||||
|
// Inversions are expensive and both point addition and point doubling
|
||||||
|
// are faster when working with points that have a z value of one. So,
|
||||||
|
// if the point needs to be converted to affine, go ahead and normalize
|
||||||
|
// the point itself at the same time as the calculation is the same.
|
||||||
|
var zInv, tempZ FieldVal
|
||||||
|
zInv.Set(&p.Z).Inverse() // zInv = Z^-1
|
||||||
|
tempZ.SquareVal(&zInv) // tempZ = Z^-2
|
||||||
|
p.X.Mul(&tempZ) // X = X/Z^2 (mag: 1)
|
||||||
|
p.Y.Mul(tempZ.Mul(&zInv)) // Y = Y/Z^3 (mag: 1)
|
||||||
|
p.Z.SetInt(1) // Z = 1 (mag: 1)
|
||||||
|
|
||||||
|
// Normalize the x and y values.
|
||||||
|
p.X.Normalize()
|
||||||
|
p.Y.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
// addZ1AndZ2EqualsOne adds two Jacobian points that are already known to have
|
||||||
|
// z values of 1 and stores the result in the provided result param. That is to
|
||||||
|
// say result = p1 + p2. It performs faster addition than the generic add
|
||||||
|
// routine since less arithmetic is needed due to the ability to avoid the z
|
||||||
|
// value multiplications.
|
||||||
|
//
|
||||||
|
// NOTE: The points must be normalized for this function to return the correct
|
||||||
|
// result. The resulting point will be normalized.
|
||||||
|
func addZ1AndZ2EqualsOne(p1, p2, result *JacobianPoint) {
|
||||||
|
// To compute the point addition efficiently, this implementation splits
|
||||||
|
// the equation into intermediate elements which are used to minimize
|
||||||
|
// the number of field multiplications using the method shown at:
|
||||||
|
// https://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-mmadd-2007-bl
|
||||||
|
//
|
||||||
|
// In particular it performs the calculations using the following:
|
||||||
|
// H = X2-X1, HH = H^2, I = 4*HH, J = H*I, r = 2*(Y2-Y1), V = X1*I
|
||||||
|
// X3 = r^2-J-2*V, Y3 = r*(V-X3)-2*Y1*J, Z3 = 2*H
|
||||||
|
//
|
||||||
|
// This results in a cost of 4 field multiplications, 2 field squarings,
|
||||||
|
// 6 field additions, and 5 integer multiplications.
|
||||||
|
x1, y1 := &p1.X, &p1.Y
|
||||||
|
x2, y2 := &p2.X, &p2.Y
|
||||||
|
x3, y3, z3 := &result.X, &result.Y, &result.Z
|
||||||
|
|
||||||
|
// When the x coordinates are the same for two points on the curve, the
|
||||||
|
// y coordinates either must be the same, in which case it is point
|
||||||
|
// doubling, or they are opposite and the result is the point at
|
||||||
|
// infinity per the group law for elliptic curve cryptography.
|
||||||
|
if x1.Equals(x2) {
|
||||||
|
if y1.Equals(y2) {
|
||||||
|
// Since x1 == x2 and y1 == y2, point doubling must be
|
||||||
|
// done, otherwise the addition would end up dividing
|
||||||
|
// by zero.
|
||||||
|
DoubleNonConst(p1, result)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Since x1 == x2 and y1 == -y2, the sum is the point at
|
||||||
|
// infinity per the group law.
|
||||||
|
x3.SetInt(0)
|
||||||
|
y3.SetInt(0)
|
||||||
|
z3.SetInt(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate X3, Y3, and Z3 according to the intermediate elements
|
||||||
|
// breakdown above.
|
||||||
|
var h, i, j, r, v FieldVal
|
||||||
|
var negJ, neg2V, negX3 FieldVal
|
||||||
|
h.Set(x1).Negate(1).Add(x2) // H = X2-X1 (mag: 3)
|
||||||
|
i.SquareVal(&h).MulInt(4) // I = 4*H^2 (mag: 4)
|
||||||
|
j.Mul2(&h, &i) // J = H*I (mag: 1)
|
||||||
|
r.Set(y1).Negate(1).Add(y2).MulInt(2) // r = 2*(Y2-Y1) (mag: 6)
|
||||||
|
v.Mul2(x1, &i) // V = X1*I (mag: 1)
|
||||||
|
negJ.Set(&j).Negate(1) // negJ = -J (mag: 2)
|
||||||
|
neg2V.Set(&v).MulInt(2).Negate(2) // neg2V = -(2*V) (mag: 3)
|
||||||
|
x3.Set(&r).Square().Add(&negJ).Add(&neg2V) // X3 = r^2-J-2*V (mag: 6)
|
||||||
|
negX3.Set(x3).Negate(6) // negX3 = -X3 (mag: 7)
|
||||||
|
j.Mul(y1).MulInt(2).Negate(2) // J = -(2*Y1*J) (mag: 3)
|
||||||
|
y3.Set(&v).Add(&negX3).Mul(&r).Add(&j) // Y3 = r*(V-X3)-2*Y1*J (mag: 4)
|
||||||
|
z3.Set(&h).MulInt(2) // Z3 = 2*H (mag: 6)
|
||||||
|
|
||||||
|
// Normalize the resulting field values to a magnitude of 1 as needed.
|
||||||
|
x3.Normalize()
|
||||||
|
y3.Normalize()
|
||||||
|
z3.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
// addZ1EqualsZ2 adds two Jacobian points that are already known to have the
|
||||||
|
// same z value and stores the result in the provided result param. That is to
|
||||||
|
// say result = p1 + p2. It performs faster addition than the generic add
|
||||||
|
// routine since less arithmetic is needed due to the known equivalence.
|
||||||
|
//
|
||||||
|
// NOTE: The points must be normalized for this function to return the correct
|
||||||
|
// result. The resulting point will be normalized.
|
||||||
|
func addZ1EqualsZ2(p1, p2, result *JacobianPoint) {
|
||||||
|
// To compute the point addition efficiently, this implementation splits
|
||||||
|
// the equation into intermediate elements which are used to minimize
|
||||||
|
// the number of field multiplications using a slightly modified version
|
||||||
|
// of the method shown at:
|
||||||
|
// https://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-mmadd-2007-bl
|
||||||
|
//
|
||||||
|
// In particular it performs the calculations using the following:
|
||||||
|
// A = X2-X1, B = A^2, C=Y2-Y1, D = C^2, E = X1*B, F = X2*B
|
||||||
|
// X3 = D-E-F, Y3 = C*(E-X3)-Y1*(F-E), Z3 = Z1*A
|
||||||
|
//
|
||||||
|
// This results in a cost of 5 field multiplications, 2 field squarings,
|
||||||
|
// 9 field additions, and 0 integer multiplications.
|
||||||
|
x1, y1, z1 := &p1.X, &p1.Y, &p1.Z
|
||||||
|
x2, y2 := &p2.X, &p2.Y
|
||||||
|
x3, y3, z3 := &result.X, &result.Y, &result.Z
|
||||||
|
|
||||||
|
// When the x coordinates are the same for two points on the curve, the
|
||||||
|
// y coordinates either must be the same, in which case it is point
|
||||||
|
// doubling, or they are opposite and the result is the point at
|
||||||
|
// infinity per the group law for elliptic curve cryptography.
|
||||||
|
if x1.Equals(x2) {
|
||||||
|
if y1.Equals(y2) {
|
||||||
|
// Since x1 == x2 and y1 == y2, point doubling must be
|
||||||
|
// done, otherwise the addition would end up dividing
|
||||||
|
// by zero.
|
||||||
|
DoubleNonConst(p1, result)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Since x1 == x2 and y1 == -y2, the sum is the point at
|
||||||
|
// infinity per the group law.
|
||||||
|
x3.SetInt(0)
|
||||||
|
y3.SetInt(0)
|
||||||
|
z3.SetInt(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate X3, Y3, and Z3 according to the intermediate elements
|
||||||
|
// breakdown above.
|
||||||
|
var a, b, c, d, e, f FieldVal
|
||||||
|
var negX1, negY1, negE, negX3 FieldVal
|
||||||
|
negX1.Set(x1).Negate(1) // negX1 = -X1 (mag: 2)
|
||||||
|
negY1.Set(y1).Negate(1) // negY1 = -Y1 (mag: 2)
|
||||||
|
a.Set(&negX1).Add(x2) // A = X2-X1 (mag: 3)
|
||||||
|
b.SquareVal(&a) // B = A^2 (mag: 1)
|
||||||
|
c.Set(&negY1).Add(y2) // C = Y2-Y1 (mag: 3)
|
||||||
|
d.SquareVal(&c) // D = C^2 (mag: 1)
|
||||||
|
e.Mul2(x1, &b) // E = X1*B (mag: 1)
|
||||||
|
negE.Set(&e).Negate(1) // negE = -E (mag: 2)
|
||||||
|
f.Mul2(x2, &b) // F = X2*B (mag: 1)
|
||||||
|
x3.Add2(&e, &f).Negate(3).Add(&d) // X3 = D-E-F (mag: 5)
|
||||||
|
negX3.Set(x3).Negate(5).Normalize() // negX3 = -X3 (mag: 1)
|
||||||
|
y3.Set(y1).Mul(f.Add(&negE)).Negate(3) // Y3 = -(Y1*(F-E)) (mag: 4)
|
||||||
|
y3.Add(e.Add(&negX3).Mul(&c)) // Y3 = C*(E-X3)+Y3 (mag: 5)
|
||||||
|
z3.Mul2(z1, &a) // Z3 = Z1*A (mag: 1)
|
||||||
|
|
||||||
|
// Normalize the resulting field values to a magnitude of 1 as needed.
|
||||||
|
x3.Normalize()
|
||||||
|
y3.Normalize()
|
||||||
|
z3.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
// addZ2EqualsOne adds two Jacobian points when the second point is already
|
||||||
|
// known to have a z value of 1 (and the z value for the first point is not 1)
|
||||||
|
// and stores the result in the provided result param. That is to say result =
|
||||||
|
// p1 + p2. It performs faster addition than the generic add routine since
|
||||||
|
// less arithmetic is needed due to the ability to avoid multiplications by the
|
||||||
|
// second point's z value.
|
||||||
|
//
|
||||||
|
// NOTE: The points must be normalized for this function to return the correct
|
||||||
|
// result. The resulting point will be normalized.
|
||||||
|
func addZ2EqualsOne(p1, p2, result *JacobianPoint) {
|
||||||
|
// To compute the point addition efficiently, this implementation splits
|
||||||
|
// the equation into intermediate elements which are used to minimize
|
||||||
|
// the number of field multiplications using the method shown at:
|
||||||
|
// https://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-madd-2007-bl
|
||||||
|
//
|
||||||
|
// In particular it performs the calculations using the following:
|
||||||
|
// Z1Z1 = Z1^2, U2 = X2*Z1Z1, S2 = Y2*Z1*Z1Z1, H = U2-X1, HH = H^2,
|
||||||
|
// I = 4*HH, J = H*I, r = 2*(S2-Y1), V = X1*I
|
||||||
|
// X3 = r^2-J-2*V, Y3 = r*(V-X3)-2*Y1*J, Z3 = (Z1+H)^2-Z1Z1-HH
|
||||||
|
//
|
||||||
|
// This results in a cost of 7 field multiplications, 4 field squarings,
|
||||||
|
// 9 field additions, and 4 integer multiplications.
|
||||||
|
x1, y1, z1 := &p1.X, &p1.Y, &p1.Z
|
||||||
|
x2, y2 := &p2.X, &p2.Y
|
||||||
|
x3, y3, z3 := &result.X, &result.Y, &result.Z
|
||||||
|
|
||||||
|
// When the x coordinates are the same for two points on the curve, the
|
||||||
|
// y coordinates either must be the same, in which case it is point
|
||||||
|
// doubling, or they are opposite and the result is the point at
|
||||||
|
// infinity per the group law for elliptic curve cryptography. Since
|
||||||
|
// any number of Jacobian coordinates can represent the same affine
|
||||||
|
// point, the x and y values need to be converted to like terms. Due to
|
||||||
|
// the assumption made for this function that the second point has a z
|
||||||
|
// value of 1 (z2=1), the first point is already "converted".
|
||||||
|
var z1z1, u2, s2 FieldVal
|
||||||
|
z1z1.SquareVal(z1) // Z1Z1 = Z1^2 (mag: 1)
|
||||||
|
u2.Set(x2).Mul(&z1z1).Normalize() // U2 = X2*Z1Z1 (mag: 1)
|
||||||
|
s2.Set(y2).Mul(&z1z1).Mul(z1).Normalize() // S2 = Y2*Z1*Z1Z1 (mag: 1)
|
||||||
|
if x1.Equals(&u2) {
|
||||||
|
if y1.Equals(&s2) {
|
||||||
|
// Since x1 == x2 and y1 == y2, point doubling must be
|
||||||
|
// done, otherwise the addition would end up dividing
|
||||||
|
// by zero.
|
||||||
|
DoubleNonConst(p1, result)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Since x1 == x2 and y1 == -y2, the sum is the point at
|
||||||
|
// infinity per the group law.
|
||||||
|
x3.SetInt(0)
|
||||||
|
y3.SetInt(0)
|
||||||
|
z3.SetInt(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate X3, Y3, and Z3 according to the intermediate elements
|
||||||
|
// breakdown above.
|
||||||
|
var h, hh, i, j, r, rr, v FieldVal
|
||||||
|
var negX1, negY1, negX3 FieldVal
|
||||||
|
negX1.Set(x1).Negate(1) // negX1 = -X1 (mag: 2)
|
||||||
|
h.Add2(&u2, &negX1) // H = U2-X1 (mag: 3)
|
||||||
|
hh.SquareVal(&h) // HH = H^2 (mag: 1)
|
||||||
|
i.Set(&hh).MulInt(4) // I = 4 * HH (mag: 4)
|
||||||
|
j.Mul2(&h, &i) // J = H*I (mag: 1)
|
||||||
|
negY1.Set(y1).Negate(1) // negY1 = -Y1 (mag: 2)
|
||||||
|
r.Set(&s2).Add(&negY1).MulInt(2) // r = 2*(S2-Y1) (mag: 6)
|
||||||
|
rr.SquareVal(&r) // rr = r^2 (mag: 1)
|
||||||
|
v.Mul2(x1, &i) // V = X1*I (mag: 1)
|
||||||
|
x3.Set(&v).MulInt(2).Add(&j).Negate(3) // X3 = -(J+2*V) (mag: 4)
|
||||||
|
x3.Add(&rr) // X3 = r^2+X3 (mag: 5)
|
||||||
|
negX3.Set(x3).Negate(5) // negX3 = -X3 (mag: 6)
|
||||||
|
y3.Set(y1).Mul(&j).MulInt(2).Negate(2) // Y3 = -(2*Y1*J) (mag: 3)
|
||||||
|
y3.Add(v.Add(&negX3).Mul(&r)) // Y3 = r*(V-X3)+Y3 (mag: 4)
|
||||||
|
z3.Add2(z1, &h).Square() // Z3 = (Z1+H)^2 (mag: 1)
|
||||||
|
z3.Add(z1z1.Add(&hh).Negate(2)) // Z3 = Z3-(Z1Z1+HH) (mag: 4)
|
||||||
|
|
||||||
|
// Normalize the resulting field values to a magnitude of 1 as needed.
|
||||||
|
x3.Normalize()
|
||||||
|
y3.Normalize()
|
||||||
|
z3.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
// addGeneric adds two Jacobian points without any assumptions about the z
|
||||||
|
// values of the two points and stores the result in the provided result param.
|
||||||
|
// That is to say result = p1 + p2. It is the slowest of the add routines due
|
||||||
|
// to requiring the most arithmetic.
|
||||||
|
//
|
||||||
|
// NOTE: The points must be normalized for this function to return the correct
|
||||||
|
// result. The resulting point will be normalized.
|
||||||
|
func addGeneric(p1, p2, result *JacobianPoint) {
|
||||||
|
// To compute the point addition efficiently, this implementation splits
|
||||||
|
// the equation into intermediate elements which are used to minimize
|
||||||
|
// the number of field multiplications using the method shown at:
|
||||||
|
// https://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-add-2007-bl
|
||||||
|
//
|
||||||
|
// In particular it performs the calculations using the following:
|
||||||
|
// Z1Z1 = Z1^2, Z2Z2 = Z2^2, U1 = X1*Z2Z2, U2 = X2*Z1Z1, S1 = Y1*Z2*Z2Z2
|
||||||
|
// S2 = Y2*Z1*Z1Z1, H = U2-U1, I = (2*H)^2, J = H*I, r = 2*(S2-S1)
|
||||||
|
// V = U1*I
|
||||||
|
// X3 = r^2-J-2*V, Y3 = r*(V-X3)-2*S1*J, Z3 = ((Z1+Z2)^2-Z1Z1-Z2Z2)*H
|
||||||
|
//
|
||||||
|
// This results in a cost of 11 field multiplications, 5 field squarings,
|
||||||
|
// 9 field additions, and 4 integer multiplications.
|
||||||
|
x1, y1, z1 := &p1.X, &p1.Y, &p1.Z
|
||||||
|
x2, y2, z2 := &p2.X, &p2.Y, &p2.Z
|
||||||
|
x3, y3, z3 := &result.X, &result.Y, &result.Z
|
||||||
|
|
||||||
|
// When the x coordinates are the same for two points on the curve, the
|
||||||
|
// y coordinates either must be the same, in which case it is point
|
||||||
|
// doubling, or they are opposite and the result is the point at
|
||||||
|
// infinity. Since any number of Jacobian coordinates can represent the
|
||||||
|
// same affine point, the x and y values need to be converted to like
|
||||||
|
// terms.
|
||||||
|
var z1z1, z2z2, u1, u2, s1, s2 FieldVal
|
||||||
|
z1z1.SquareVal(z1) // Z1Z1 = Z1^2 (mag: 1)
|
||||||
|
z2z2.SquareVal(z2) // Z2Z2 = Z2^2 (mag: 1)
|
||||||
|
u1.Set(x1).Mul(&z2z2).Normalize() // U1 = X1*Z2Z2 (mag: 1)
|
||||||
|
u2.Set(x2).Mul(&z1z1).Normalize() // U2 = X2*Z1Z1 (mag: 1)
|
||||||
|
s1.Set(y1).Mul(&z2z2).Mul(z2).Normalize() // S1 = Y1*Z2*Z2Z2 (mag: 1)
|
||||||
|
s2.Set(y2).Mul(&z1z1).Mul(z1).Normalize() // S2 = Y2*Z1*Z1Z1 (mag: 1)
|
||||||
|
if u1.Equals(&u2) {
|
||||||
|
if s1.Equals(&s2) {
|
||||||
|
// Since x1 == x2 and y1 == y2, point doubling must be
|
||||||
|
// done, otherwise the addition would end up dividing
|
||||||
|
// by zero.
|
||||||
|
DoubleNonConst(p1, result)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Since x1 == x2 and y1 == -y2, the sum is the point at
|
||||||
|
// infinity per the group law.
|
||||||
|
x3.SetInt(0)
|
||||||
|
y3.SetInt(0)
|
||||||
|
z3.SetInt(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate X3, Y3, and Z3 according to the intermediate elements
|
||||||
|
// breakdown above.
|
||||||
|
var h, i, j, r, rr, v FieldVal
|
||||||
|
var negU1, negS1, negX3 FieldVal
|
||||||
|
negU1.Set(&u1).Negate(1) // negU1 = -U1 (mag: 2)
|
||||||
|
h.Add2(&u2, &negU1) // H = U2-U1 (mag: 3)
|
||||||
|
i.Set(&h).MulInt(2).Square() // I = (2*H)^2 (mag: 2)
|
||||||
|
j.Mul2(&h, &i) // J = H*I (mag: 1)
|
||||||
|
negS1.Set(&s1).Negate(1) // negS1 = -S1 (mag: 2)
|
||||||
|
r.Set(&s2).Add(&negS1).MulInt(2) // r = 2*(S2-S1) (mag: 6)
|
||||||
|
rr.SquareVal(&r) // rr = r^2 (mag: 1)
|
||||||
|
v.Mul2(&u1, &i) // V = U1*I (mag: 1)
|
||||||
|
x3.Set(&v).MulInt(2).Add(&j).Negate(3) // X3 = -(J+2*V) (mag: 4)
|
||||||
|
x3.Add(&rr) // X3 = r^2+X3 (mag: 5)
|
||||||
|
negX3.Set(x3).Negate(5) // negX3 = -X3 (mag: 6)
|
||||||
|
y3.Mul2(&s1, &j).MulInt(2).Negate(2) // Y3 = -(2*S1*J) (mag: 3)
|
||||||
|
y3.Add(v.Add(&negX3).Mul(&r)) // Y3 = r*(V-X3)+Y3 (mag: 4)
|
||||||
|
z3.Add2(z1, z2).Square() // Z3 = (Z1+Z2)^2 (mag: 1)
|
||||||
|
z3.Add(z1z1.Add(&z2z2).Negate(2)) // Z3 = Z3-(Z1Z1+Z2Z2) (mag: 4)
|
||||||
|
z3.Mul(&h) // Z3 = Z3*H (mag: 1)
|
||||||
|
|
||||||
|
// Normalize the resulting field values to a magnitude of 1 as needed.
|
||||||
|
x3.Normalize()
|
||||||
|
y3.Normalize()
|
||||||
|
z3.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddNonConst adds the passed Jacobian points together and stores the result in
|
||||||
|
// the provided result param in *non-constant* time.
|
||||||
|
//
|
||||||
|
// NOTE: The points must be normalized for this function to return the correct
|
||||||
|
// result. The resulting point will be normalized.
|
||||||
|
func AddNonConst(p1, p2, result *JacobianPoint) {
|
||||||
|
// A point at infinity is the identity according to the group law for
|
||||||
|
// elliptic curve cryptography. Thus, ∞ + P = P and P + ∞ = P.
|
||||||
|
if (p1.X.IsZero() && p1.Y.IsZero()) || p1.Z.IsZero() {
|
||||||
|
result.Set(p2)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (p2.X.IsZero() && p2.Y.IsZero()) || p2.Z.IsZero() {
|
||||||
|
result.Set(p1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Faster point addition can be achieved when certain assumptions are
|
||||||
|
// met. For example, when both points have the same z value, arithmetic
|
||||||
|
// on the z values can be avoided. This section thus checks for these
|
||||||
|
// conditions and calls an appropriate add function which is accelerated
|
||||||
|
// by using those assumptions.
|
||||||
|
isZ1One := p1.Z.IsOne()
|
||||||
|
isZ2One := p2.Z.IsOne()
|
||||||
|
switch {
|
||||||
|
case isZ1One && isZ2One:
|
||||||
|
addZ1AndZ2EqualsOne(p1, p2, result)
|
||||||
|
return
|
||||||
|
case p1.Z.Equals(&p2.Z):
|
||||||
|
addZ1EqualsZ2(p1, p2, result)
|
||||||
|
return
|
||||||
|
case isZ2One:
|
||||||
|
addZ2EqualsOne(p1, p2, result)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// None of the above assumptions are true, so fall back to generic
|
||||||
|
// point addition.
|
||||||
|
addGeneric(p1, p2, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// doubleZ1EqualsOne performs point doubling on the passed Jacobian point when
|
||||||
|
// the point is already known to have a z value of 1 and stores the result in
|
||||||
|
// the provided result param. That is to say result = 2*p. It performs faster
|
||||||
|
// point doubling than the generic routine since less arithmetic is needed due
|
||||||
|
// to the ability to avoid multiplication by the z value.
|
||||||
|
//
|
||||||
|
// NOTE: The resulting point will be normalized.
|
||||||
|
func doubleZ1EqualsOne(p, result *JacobianPoint) {
|
||||||
|
// This function uses the assumptions that z1 is 1, thus the point
|
||||||
|
// doubling formulas reduce to:
|
||||||
|
//
|
||||||
|
// X3 = (3*X1^2)^2 - 8*X1*Y1^2
|
||||||
|
// Y3 = (3*X1^2)*(4*X1*Y1^2 - X3) - 8*Y1^4
|
||||||
|
// Z3 = 2*Y1
|
||||||
|
//
|
||||||
|
// To compute the above efficiently, this implementation splits the
|
||||||
|
// equation into intermediate elements which are used to minimize the
|
||||||
|
// number of field multiplications in favor of field squarings which
|
||||||
|
// are roughly 35% faster than field multiplications with the current
|
||||||
|
// implementation at the time this was written.
|
||||||
|
//
|
||||||
|
// This uses a slightly modified version of the method shown at:
|
||||||
|
// https://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#doubling-mdbl-2007-bl
|
||||||
|
//
|
||||||
|
// In particular it performs the calculations using the following:
|
||||||
|
// A = X1^2, B = Y1^2, C = B^2, D = 2*((X1+B)^2-A-C)
|
||||||
|
// E = 3*A, F = E^2, X3 = F-2*D, Y3 = E*(D-X3)-8*C
|
||||||
|
// Z3 = 2*Y1
|
||||||
|
//
|
||||||
|
// This results in a cost of 1 field multiplication, 5 field squarings,
|
||||||
|
// 6 field additions, and 5 integer multiplications.
|
||||||
|
x1, y1 := &p.X, &p.Y
|
||||||
|
x3, y3, z3 := &result.X, &result.Y, &result.Z
|
||||||
|
var a, b, c, d, e, f FieldVal
|
||||||
|
z3.Set(y1).MulInt(2) // Z3 = 2*Y1 (mag: 2)
|
||||||
|
a.SquareVal(x1) // A = X1^2 (mag: 1)
|
||||||
|
b.SquareVal(y1) // B = Y1^2 (mag: 1)
|
||||||
|
c.SquareVal(&b) // C = B^2 (mag: 1)
|
||||||
|
b.Add(x1).Square() // B = (X1+B)^2 (mag: 1)
|
||||||
|
d.Set(&a).Add(&c).Negate(2) // D = -(A+C) (mag: 3)
|
||||||
|
d.Add(&b).MulInt(2) // D = 2*(B+D)(mag: 8)
|
||||||
|
e.Set(&a).MulInt(3) // E = 3*A (mag: 3)
|
||||||
|
f.SquareVal(&e) // F = E^2 (mag: 1)
|
||||||
|
x3.Set(&d).MulInt(2).Negate(16) // X3 = -(2*D) (mag: 17)
|
||||||
|
x3.Add(&f) // X3 = F+X3 (mag: 18)
|
||||||
|
f.Set(x3).Negate(18).Add(&d).Normalize() // F = D-X3 (mag: 1)
|
||||||
|
y3.Set(&c).MulInt(8).Negate(8) // Y3 = -(8*C) (mag: 9)
|
||||||
|
y3.Add(f.Mul(&e)) // Y3 = E*F+Y3 (mag: 10)
|
||||||
|
|
||||||
|
// Normalize the field values back to a magnitude of 1.
|
||||||
|
x3.Normalize()
|
||||||
|
y3.Normalize()
|
||||||
|
z3.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
// doubleGeneric performs point doubling on the passed Jacobian point without
|
||||||
|
// any assumptions about the z value and stores the result in the provided
|
||||||
|
// result param. That is to say result = 2*p. It is the slowest of the point
|
||||||
|
// doubling routines due to requiring the most arithmetic.
|
||||||
|
//
|
||||||
|
// NOTE: The resulting point will be normalized.
|
||||||
|
func doubleGeneric(p, result *JacobianPoint) {
|
||||||
|
// Point doubling formula for Jacobian coordinates for the secp256k1
|
||||||
|
// curve:
|
||||||
|
//
|
||||||
|
// X3 = (3*X1^2)^2 - 8*X1*Y1^2
|
||||||
|
// Y3 = (3*X1^2)*(4*X1*Y1^2 - X3) - 8*Y1^4
|
||||||
|
// Z3 = 2*Y1*Z1
|
||||||
|
//
|
||||||
|
// To compute the above efficiently, this implementation splits the
|
||||||
|
// equation into intermediate elements which are used to minimize the
|
||||||
|
// number of field multiplications in favor of field squarings which
|
||||||
|
// are roughly 35% faster than field multiplications with the current
|
||||||
|
// implementation at the time this was written.
|
||||||
|
//
|
||||||
|
// This uses a slightly modified version of the method shown at:
|
||||||
|
// https://hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#doubling-dbl-2009-l
|
||||||
|
//
|
||||||
|
// In particular it performs the calculations using the following:
|
||||||
|
// A = X1^2, B = Y1^2, C = B^2, D = 2*((X1+B)^2-A-C)
|
||||||
|
// E = 3*A, F = E^2, X3 = F-2*D, Y3 = E*(D-X3)-8*C
|
||||||
|
// Z3 = 2*Y1*Z1
|
||||||
|
//
|
||||||
|
// This results in a cost of 1 field multiplication, 5 field squarings,
|
||||||
|
// 6 field additions, and 5 integer multiplications.
|
||||||
|
x1, y1, z1 := &p.X, &p.Y, &p.Z
|
||||||
|
x3, y3, z3 := &result.X, &result.Y, &result.Z
|
||||||
|
var a, b, c, d, e, f FieldVal
|
||||||
|
z3.Mul2(y1, z1).MulInt(2) // Z3 = 2*Y1*Z1 (mag: 2)
|
||||||
|
a.SquareVal(x1) // A = X1^2 (mag: 1)
|
||||||
|
b.SquareVal(y1) // B = Y1^2 (mag: 1)
|
||||||
|
c.SquareVal(&b) // C = B^2 (mag: 1)
|
||||||
|
b.Add(x1).Square() // B = (X1+B)^2 (mag: 1)
|
||||||
|
d.Set(&a).Add(&c).Negate(2) // D = -(A+C) (mag: 3)
|
||||||
|
d.Add(&b).MulInt(2) // D = 2*(B+D)(mag: 8)
|
||||||
|
e.Set(&a).MulInt(3) // E = 3*A (mag: 3)
|
||||||
|
f.SquareVal(&e) // F = E^2 (mag: 1)
|
||||||
|
x3.Set(&d).MulInt(2).Negate(16) // X3 = -(2*D) (mag: 17)
|
||||||
|
x3.Add(&f) // X3 = F+X3 (mag: 18)
|
||||||
|
f.Set(x3).Negate(18).Add(&d).Normalize() // F = D-X3 (mag: 1)
|
||||||
|
y3.Set(&c).MulInt(8).Negate(8) // Y3 = -(8*C) (mag: 9)
|
||||||
|
y3.Add(f.Mul(&e)) // Y3 = E*F+Y3 (mag: 10)
|
||||||
|
|
||||||
|
// Normalize the field values back to a magnitude of 1.
|
||||||
|
x3.Normalize()
|
||||||
|
y3.Normalize()
|
||||||
|
z3.Normalize()
|
||||||
|
}
|
||||||
|
|
||||||
|
// DoubleNonConst doubles the passed Jacobian point and stores the result in the
|
||||||
|
// provided result parameter in *non-constant* time.
|
||||||
|
//
|
||||||
|
// NOTE: The point must be normalized for this function to return the correct
|
||||||
|
// result. The resulting point will be normalized.
|
||||||
|
func DoubleNonConst(p, result *JacobianPoint) {
|
||||||
|
// Doubling a point at infinity is still infinity.
|
||||||
|
if p.Y.IsZero() || p.Z.IsZero() {
|
||||||
|
result.X.SetInt(0)
|
||||||
|
result.Y.SetInt(0)
|
||||||
|
result.Z.SetInt(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Slightly faster point doubling can be achieved when the z value is 1
|
||||||
|
// by avoiding the multiplication on the z value. This section calls
|
||||||
|
// a point doubling function which is accelerated by using that
|
||||||
|
// assumption when possible.
|
||||||
|
if p.Z.IsOne() {
|
||||||
|
doubleZ1EqualsOne(p, result)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fall back to generic point doubling which works with arbitrary z
|
||||||
|
// values.
|
||||||
|
doubleGeneric(p, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// splitK returns a balanced length-two representation of k and their signs.
|
||||||
|
// This is algorithm 3.74 from [GECC].
|
||||||
|
//
|
||||||
|
// One thing of note about this algorithm is that no matter what c1 and c2 are,
|
||||||
|
// the final equation of k = k1 + k2 * lambda (mod n) will hold. This is
|
||||||
|
// provable mathematically due to how a1/b1/a2/b2 are computed.
|
||||||
|
//
|
||||||
|
// c1 and c2 are chosen to minimize the max(k1,k2).
|
||||||
|
func splitK(k []byte) ([]byte, []byte, int, int) {
|
||||||
|
// All math here is done with big.Int, which is slow.
|
||||||
|
// At some point, it might be useful to write something similar to
|
||||||
|
// FieldVal but for N instead of P as the prime field if this ends up
|
||||||
|
// being a bottleneck.
|
||||||
|
bigIntK := new(big.Int)
|
||||||
|
c1, c2 := new(big.Int), new(big.Int)
|
||||||
|
tmp1, tmp2 := new(big.Int), new(big.Int)
|
||||||
|
k1, k2 := new(big.Int), new(big.Int)
|
||||||
|
|
||||||
|
bigIntK.SetBytes(k)
|
||||||
|
// c1 = round(b2 * k / n) from step 4.
|
||||||
|
// Rounding isn't really necessary and costs too much, hence skipped
|
||||||
|
c1.Mul(endomorphismB2, bigIntK)
|
||||||
|
c1.Div(c1, curveParams.N)
|
||||||
|
// c2 = round(b1 * k / n) from step 4 (sign reversed to optimize one step)
|
||||||
|
// Rounding isn't really necessary and costs too much, hence skipped
|
||||||
|
c2.Mul(endomorphismB1, bigIntK)
|
||||||
|
c2.Div(c2, curveParams.N)
|
||||||
|
// k1 = k - c1 * a1 - c2 * a2 from step 5 (note c2's sign is reversed)
|
||||||
|
tmp1.Mul(c1, endomorphismA1)
|
||||||
|
tmp2.Mul(c2, endomorphismA2)
|
||||||
|
k1.Sub(bigIntK, tmp1)
|
||||||
|
k1.Add(k1, tmp2)
|
||||||
|
// k2 = - c1 * b1 - c2 * b2 from step 5 (note c2's sign is reversed)
|
||||||
|
tmp1.Mul(c1, endomorphismB1)
|
||||||
|
tmp2.Mul(c2, endomorphismB2)
|
||||||
|
k2.Sub(tmp2, tmp1)
|
||||||
|
|
||||||
|
// Note Bytes() throws out the sign of k1 and k2. This matters
|
||||||
|
// since k1 and/or k2 can be negative. Hence, we pass that
|
||||||
|
// back separately.
|
||||||
|
return k1.Bytes(), k2.Bytes(), k1.Sign(), k2.Sign()
|
||||||
|
}
|
||||||
|
|
||||||
|
// nafScalar represents a positive integer up to a maximum value of 2^256 - 1
|
||||||
|
// encoded in non-adjacent form.
|
||||||
|
//
|
||||||
|
// NAF is a signed-digit representation where each digit can be +1, 0, or -1.
|
||||||
|
//
|
||||||
|
// In order to efficiently encode that information, this type uses two arrays, a
|
||||||
|
// "positive" array where set bits represent the +1 signed digits and a
|
||||||
|
// "negative" array where set bits represent the -1 signed digits. 0 is
|
||||||
|
// represented by neither array having a bit set in that position.
|
||||||
|
//
|
||||||
|
// The Pos and Neg methods return the aforementioned positive and negative
|
||||||
|
// arrays, respectively.
|
||||||
|
type nafScalar struct {
|
||||||
|
// pos houses the positive portion of the representation. An additional
|
||||||
|
// byte is required for the positive portion because the NAF encoding can be
|
||||||
|
// up to 1 bit longer than the normal binary encoding of the value.
|
||||||
|
//
|
||||||
|
// neg houses the negative portion of the representation. Even though the
|
||||||
|
// additional byte is not required for the negative portion, since it can
|
||||||
|
// never exceed the length of the normal binary encoding of the value,
|
||||||
|
// keeping the same length for positive and negative portions simplifies
|
||||||
|
// working with the representation and allows extra conditional branches to
|
||||||
|
// be avoided.
|
||||||
|
//
|
||||||
|
// start and end specify the starting and ending index to use within the pos
|
||||||
|
// and neg arrays, respectively. This allows fixed size arrays to be used
|
||||||
|
// versus needing to dynamically allocate space on the heap.
|
||||||
|
//
|
||||||
|
// NOTE: The fields are defined in the order that they are to minimize the
|
||||||
|
// padding on 32-bit and 64-bit platforms.
|
||||||
|
pos [33]byte
|
||||||
|
start, end uint8
|
||||||
|
neg [33]byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pos returns the bytes of the encoded value with bits set in the positions
|
||||||
|
// that represent a signed digit of +1.
|
||||||
|
func (s *nafScalar) Pos() []byte {
|
||||||
|
return s.pos[s.start:s.end]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Neg returns the bytes of the encoded value with bits set in the positions
|
||||||
|
// that represent a signed digit of -1.
|
||||||
|
func (s *nafScalar) Neg() []byte {
|
||||||
|
return s.neg[s.start:s.end]
|
||||||
|
}
|
||||||
|
|
||||||
|
// naf takes a positive integer up to a maximum value of 2^256 - 1 and returns
|
||||||
|
// its non-adjacent form (NAF), which is a unique signed-digit representation
|
||||||
|
// such that no two consecutive digits are nonzero. See the documentation for
|
||||||
|
// the returned type for details on how the representation is encoded
|
||||||
|
// efficiently and how to interpret it
|
||||||
|
//
|
||||||
|
// NAF is useful in that it has the fewest nonzero digits of any signed digit
|
||||||
|
// representation, only 1/3rd of its digits are nonzero on average, and at least
|
||||||
|
// half of the digits will be 0.
|
||||||
|
//
|
||||||
|
// The aforementioned properties are particularly beneficial for optimizing
|
||||||
|
// elliptic curve point multiplication because they effectively minimize the
|
||||||
|
// number of required point additions in exchange for needing to perform a mix
|
||||||
|
// of fewer point additions and subtractions and possibly one additional point
|
||||||
|
// doubling. This is an excellent tradeoff because subtraction of points has
|
||||||
|
// the same computational complexity as addition of points and point doubling is
|
||||||
|
// faster than both.
|
||||||
|
func naf(k []byte) nafScalar {
|
||||||
|
// Strip leading zero bytes.
|
||||||
|
for len(k) > 0 && k[0] == 0x00 {
|
||||||
|
k = k[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// The non-adjacent form (NAF) of a positive integer k is an expression
|
||||||
|
// k = ∑_(i=0, l-1) k_i * 2^i where k_i ∈ {0,±1}, k_(l-1) != 0, and no two
|
||||||
|
// consecutive digits k_i are nonzero.
|
||||||
|
//
|
||||||
|
// The traditional method of computing the NAF of a positive integer is
|
||||||
|
// given by algorithm 3.30 in [GECC]. It consists of repeatedly dividing k
|
||||||
|
// by 2 and choosing the remainder so that the quotient (k−r)/2 is even
|
||||||
|
// which ensures the next NAF digit is 0. This requires log_2(k) steps.
|
||||||
|
//
|
||||||
|
// However, in [BRID], Prodinger notes that a closed form expression for the
|
||||||
|
// NAF representation is the bitwise difference 3k/2 - k/2. This is more
|
||||||
|
// efficient as it can be computed in O(1) versus the O(log(n)) of the
|
||||||
|
// traditional approach.
|
||||||
|
//
|
||||||
|
// The following code makes use of that formula to compute the NAF more
|
||||||
|
// efficiently.
|
||||||
|
//
|
||||||
|
// To understand the logic here, observe that the only way the NAF has a
|
||||||
|
// nonzero digit at a given bit is when either 3k/2 or k/2 has a bit set in
|
||||||
|
// that position, but not both. In other words, the result of a bitwise
|
||||||
|
// xor. This can be seen simply by considering that when the bits are the
|
||||||
|
// same, the subtraction is either 0-0 or 1-1, both of which are 0.
|
||||||
|
//
|
||||||
|
// Further, observe that the "+1" digits in the result are contributed by
|
||||||
|
// 3k/2 while the "-1" digits are from k/2. So, they can be determined by
|
||||||
|
// taking the bitwise and of each respective value with the result of the
|
||||||
|
// xor which identifies which bits are nonzero.
|
||||||
|
//
|
||||||
|
// Using that information, this loops backwards from the least significant
|
||||||
|
// byte to the most significant byte while performing the aforementioned
|
||||||
|
// calculations by propagating the potential carry and high order bit from
|
||||||
|
// the next word during the right shift.
|
||||||
|
kLen := len(k)
|
||||||
|
var result nafScalar
|
||||||
|
var carry uint8
|
||||||
|
for byteNum := kLen - 1; byteNum >= 0; byteNum-- {
|
||||||
|
// Calculate k/2. Notice the carry from the previous word is added and
|
||||||
|
// the low order bit from the next word is shifted in accordingly.
|
||||||
|
kc := uint16(k[byteNum]) + uint16(carry)
|
||||||
|
var nextWord uint8
|
||||||
|
if byteNum > 0 {
|
||||||
|
nextWord = k[byteNum-1]
|
||||||
|
}
|
||||||
|
halfK := kc>>1 | uint16(nextWord<<7)
|
||||||
|
|
||||||
|
// Calculate 3k/2 and determine the non-zero digits in the result.
|
||||||
|
threeHalfK := kc + halfK
|
||||||
|
nonZeroResultDigits := threeHalfK ^ halfK
|
||||||
|
|
||||||
|
// Determine the signed digits {0, ±1}.
|
||||||
|
result.pos[byteNum+1] = uint8(threeHalfK & nonZeroResultDigits)
|
||||||
|
result.neg[byteNum+1] = uint8(halfK & nonZeroResultDigits)
|
||||||
|
|
||||||
|
// Propagate the potential carry from the 3k/2 calculation.
|
||||||
|
carry = uint8(threeHalfK >> 8)
|
||||||
|
}
|
||||||
|
result.pos[0] = carry
|
||||||
|
|
||||||
|
// Set the starting and ending positions within the fixed size arrays to
|
||||||
|
// identify the bytes that are actually used. This is important since the
|
||||||
|
// encoding is big endian and thus trailing zero bytes changes its value.
|
||||||
|
result.start = 1 - carry
|
||||||
|
result.end = uint8(kLen + 1)
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScalarMultNonConst multiplies k*P where k is a big endian integer modulo the
|
||||||
|
// curve order and P is a point in Jacobian projective coordinates and stores
|
||||||
|
// the result in the provided Jacobian point.
|
||||||
|
//
|
||||||
|
// NOTE: The point must be normalized for this function to return the correct
|
||||||
|
// result. The resulting point will be normalized.
|
||||||
|
func ScalarMultNonConst(k *ModNScalar, point, result *JacobianPoint) {
|
||||||
|
// Decompose K into k1 and k2 in order to halve the number of EC ops.
|
||||||
|
// See Algorithm 3.74 in [GECC].
|
||||||
|
kBytes := k.Bytes()
|
||||||
|
k1, k2, signK1, signK2 := splitK(kBytes[:])
|
||||||
|
zeroArray32(&kBytes)
|
||||||
|
|
||||||
|
// The main equation here to remember is:
|
||||||
|
// k * P = k1 * P + k2 * ϕ(P)
|
||||||
|
//
|
||||||
|
// P1 below is P in the equation, P2 below is ϕ(P) in the equation
|
||||||
|
p1, p1Neg := new(JacobianPoint), new(JacobianPoint)
|
||||||
|
p1.Set(point)
|
||||||
|
p1Neg.Set(p1)
|
||||||
|
p1Neg.Y.Negate(1).Normalize()
|
||||||
|
|
||||||
|
// NOTE: ϕ(x,y) = (βx,y). The Jacobian z coordinates are the same, so this
|
||||||
|
// math goes through.
|
||||||
|
p2, p2Neg := new(JacobianPoint), new(JacobianPoint)
|
||||||
|
p2.Set(p1)
|
||||||
|
p2.X.Mul(endomorphismBeta).Normalize()
|
||||||
|
p2Neg.Set(p2)
|
||||||
|
p2Neg.Y.Negate(1).Normalize()
|
||||||
|
|
||||||
|
// Flip the positive and negative values of the points as needed
|
||||||
|
// depending on the signs of k1 and k2. As mentioned in the equation
|
||||||
|
// above, each of k1 and k2 are multiplied by the respective point.
|
||||||
|
// Since -k * P is the same thing as k * -P, and the group law for
|
||||||
|
// elliptic curves states that P(x, y) = -P(x, -y), it's faster and
|
||||||
|
// simplifies the code to just make the point negative.
|
||||||
|
if signK1 == -1 {
|
||||||
|
p1, p1Neg = p1Neg, p1
|
||||||
|
}
|
||||||
|
if signK2 == -1 {
|
||||||
|
p2, p2Neg = p2Neg, p2
|
||||||
|
}
|
||||||
|
|
||||||
|
// NAF versions of k1 and k2 should have a lot more zeros.
|
||||||
|
//
|
||||||
|
// The Pos version of the bytes contain the +1s and the Neg versions
|
||||||
|
// contain the -1s.
|
||||||
|
k1NAF, k2NAF := naf(k1), naf(k2)
|
||||||
|
k1PosNAF, k1NegNAF := k1NAF.Pos(), k1NAF.Neg()
|
||||||
|
k2PosNAF, k2NegNAF := k2NAF.Pos(), k2NAF.Neg()
|
||||||
|
k1Len, k2Len := len(k1PosNAF), len(k2PosNAF)
|
||||||
|
|
||||||
|
m := k1Len
|
||||||
|
if m < k2Len {
|
||||||
|
m = k2Len
|
||||||
|
}
|
||||||
|
|
||||||
|
// Point Q = ∞ (point at infinity).
|
||||||
|
var q JacobianPoint
|
||||||
|
|
||||||
|
// Add left-to-right using the NAF optimization. See algorithm 3.77
|
||||||
|
// from [GECC]. This should be faster overall since there will be a lot
|
||||||
|
// more instances of 0, hence reducing the number of Jacobian additions
|
||||||
|
// at the cost of 1 possible extra doubling.
|
||||||
|
for i := 0; i < m; i++ {
|
||||||
|
// Since k1 and k2 are potentially different lengths and the calculation
|
||||||
|
// is being done left to right, pad the front of the shorter one with
|
||||||
|
// 0s.
|
||||||
|
var k1BytePos, k1ByteNeg, k2BytePos, k2ByteNeg byte
|
||||||
|
if i >= m-k1Len {
|
||||||
|
k1BytePos, k1ByteNeg = k1PosNAF[i-(m-k1Len)], k1NegNAF[i-(m-k1Len)]
|
||||||
|
}
|
||||||
|
if i >= m-k2Len {
|
||||||
|
k2BytePos, k2ByteNeg = k2PosNAF[i-(m-k2Len)], k2NegNAF[i-(m-k2Len)]
|
||||||
|
}
|
||||||
|
for bit, mask := 7, uint8(1<<7); bit >= 0; bit, mask = bit-1, mask>>1 {
|
||||||
|
// Q = 2 * Q
|
||||||
|
DoubleNonConst(&q, &q)
|
||||||
|
|
||||||
|
// Add or subtract the first point based on the signed digit of the
|
||||||
|
// NAF representation of k1 at this bit position.
|
||||||
|
//
|
||||||
|
// +1: Q = Q + p1
|
||||||
|
// -1: Q = Q - p1
|
||||||
|
// 0: Q = Q (no change)
|
||||||
|
if k1BytePos&mask == mask {
|
||||||
|
AddNonConst(&q, p1, &q)
|
||||||
|
} else if k1ByteNeg&mask == mask {
|
||||||
|
AddNonConst(&q, p1Neg, &q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add or subtract the second point based on the signed digit of the
|
||||||
|
// NAF representation of k2 at this bit position.
|
||||||
|
//
|
||||||
|
// +1: Q = Q + p2
|
||||||
|
// -1: Q = Q - p2
|
||||||
|
// 0: Q = Q (no change)
|
||||||
|
if k2BytePos&mask == mask {
|
||||||
|
AddNonConst(&q, p2, &q)
|
||||||
|
} else if k2ByteNeg&mask == mask {
|
||||||
|
AddNonConst(&q, p2Neg, &q)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Set(&q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScalarBaseMultNonConst multiplies k*G where G is the base point of the group
|
||||||
|
// and k is a big endian integer. The result is stored in Jacobian coordinates
|
||||||
|
// (x1, y1, z1).
|
||||||
|
//
|
||||||
|
// NOTE: The resulting point will be normalized.
|
||||||
|
func ScalarBaseMultNonConst(k *ModNScalar, result *JacobianPoint) {
|
||||||
|
bytePoints := s256BytePoints()
|
||||||
|
|
||||||
|
// Point Q = ∞ (point at infinity).
|
||||||
|
var q JacobianPoint
|
||||||
|
|
||||||
|
// curve.bytePoints has all 256 byte points for each 8-bit window. The
|
||||||
|
// strategy is to add up the byte points. This is best understood by
|
||||||
|
// expressing k in base-256 which it already sort of is. Each "digit" in
|
||||||
|
// the 8-bit window can be looked up using bytePoints and added together.
|
||||||
|
var pt JacobianPoint
|
||||||
|
for i, byteVal := range k.Bytes() {
|
||||||
|
p := bytePoints[i][byteVal]
|
||||||
|
pt.X.Set(&p[0])
|
||||||
|
pt.Y.Set(&p[1])
|
||||||
|
pt.Z.SetInt(1)
|
||||||
|
AddNonConst(&q, &pt, &q)
|
||||||
|
}
|
||||||
|
|
||||||
|
result.Set(&q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// isOnCurve returns whether or not the affine point (x,y) is on the curve.
|
||||||
|
func isOnCurve(fx, fy *FieldVal) bool {
|
||||||
|
// Elliptic curve equation for secp256k1 is: y^2 = x^3 + 7
|
||||||
|
y2 := new(FieldVal).SquareVal(fy).Normalize()
|
||||||
|
result := new(FieldVal).SquareVal(fx).Mul(fx).AddInt(7).Normalize()
|
||||||
|
return y2.Equals(result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecompressY attempts to calculate the Y coordinate for the given X coordinate
|
||||||
|
// such that the result pair is a point on the secp256k1 curve. It adjusts Y
|
||||||
|
// based on the desired oddness and returns whether or not it was successful
|
||||||
|
// since not all X coordinates are valid.
|
||||||
|
//
|
||||||
|
// The magnitude of the provided X coordinate field val must be a max of 8 for a
|
||||||
|
// correct result. The resulting Y field val will have a max magnitude of 2.
|
||||||
|
func DecompressY(x *FieldVal, odd bool, resultY *FieldVal) bool {
|
||||||
|
// The curve equation for secp256k1 is: y^2 = x^3 + 7. Thus
|
||||||
|
// y = +-sqrt(x^3 + 7).
|
||||||
|
//
|
||||||
|
// The x coordinate must be invalid if there is no square root for the
|
||||||
|
// calculated rhs because it means the X coordinate is not for a point on
|
||||||
|
// the curve.
|
||||||
|
x3PlusB := new(FieldVal).SquareVal(x).Mul(x).AddInt(7)
|
||||||
|
if hasSqrt := resultY.SquareRootVal(x3PlusB); !hasSqrt {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if resultY.Normalize().IsOdd() != odd {
|
||||||
|
resultY.Negate(1)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
58
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/doc.go
generated
vendored
Normal file
58
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
// Copyright (c) 2013-2014 The btcsuite developers
|
||||||
|
// Copyright (c) 2015-2019 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
/*
|
||||||
|
Package secp256k1 implements optimized secp256k1 elliptic curve operations.
|
||||||
|
|
||||||
|
This package provides an optimized pure Go implementation of elliptic curve
|
||||||
|
cryptography operations over the secp256k1 curve as well as data structures and
|
||||||
|
functions for working with public and private secp256k1 keys. See
|
||||||
|
https://www.secg.org/sec2-v2.pdf for details on the standard.
|
||||||
|
|
||||||
|
In addition, sub packages are provided to produce, verify, parse, and serialize
|
||||||
|
ECDSA signatures and EC-Schnorr-DCRv0 (a custom Schnorr-based signature scheme
|
||||||
|
specific to Decred) signatures. See the README.md files in the relevant sub
|
||||||
|
packages for more details about those aspects.
|
||||||
|
|
||||||
|
An overview of the features provided by this package are as follows:
|
||||||
|
|
||||||
|
- Private key generation, serialization, and parsing
|
||||||
|
- Public key generation, serialization and parsing per ANSI X9.62-1998
|
||||||
|
- Parses uncompressed, compressed, and hybrid public keys
|
||||||
|
- Serializes uncompressed and compressed public keys
|
||||||
|
- Specialized types for performing optimized and constant time field operations
|
||||||
|
- FieldVal type for working modulo the secp256k1 field prime
|
||||||
|
- ModNScalar type for working modulo the secp256k1 group order
|
||||||
|
- Elliptic curve operations in Jacobian projective coordinates
|
||||||
|
- Point addition
|
||||||
|
- Point doubling
|
||||||
|
- Scalar multiplication with an arbitrary point
|
||||||
|
- Scalar multiplication with the base point (group generator)
|
||||||
|
- Point decompression from a given x coordinate
|
||||||
|
- Nonce generation via RFC6979 with support for extra data and version
|
||||||
|
information that can be used to prevent nonce reuse between signing
|
||||||
|
algorithms
|
||||||
|
|
||||||
|
It also provides an implementation of the Go standard library crypto/elliptic
|
||||||
|
Curve interface via the S256 function so that it may be used with other packages
|
||||||
|
in the standard library such as crypto/tls, crypto/x509, and crypto/ecdsa.
|
||||||
|
However, in the case of ECDSA, it is highly recommended to use the ecdsa sub
|
||||||
|
package of this package instead since it is optimized specifically for secp256k1
|
||||||
|
and is significantly faster as a result.
|
||||||
|
|
||||||
|
Although this package was primarily written for dcrd, it has intentionally been
|
||||||
|
designed so it can be used as a standalone package for any projects needing to
|
||||||
|
use optimized secp256k1 elliptic curve cryptography.
|
||||||
|
|
||||||
|
Finally, a comprehensive suite of tests is provided to provide a high level of
|
||||||
|
quality assurance.
|
||||||
|
|
||||||
|
Use of secp256k1 in Decred
|
||||||
|
|
||||||
|
At the time of this writing, the primary public key cryptography in widespread
|
||||||
|
use on the Decred network used to secure coins is based on elliptic curves
|
||||||
|
defined by the secp256k1 domain parameters.
|
||||||
|
*/
|
||||||
|
package secp256k1
|
21
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/ecdh.go
generated
vendored
Normal file
21
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/ecdh.go
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
// Copyright (c) 2015 The btcsuite developers
|
||||||
|
// Copyright (c) 2015-2016 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
// GenerateSharedSecret generates a shared secret based on a private key and a
|
||||||
|
// public key using Diffie-Hellman key exchange (ECDH) (RFC 5903).
|
||||||
|
// RFC5903 Section 9 states we should only return x.
|
||||||
|
//
|
||||||
|
// It is recommended to securily hash the result before using as a cryptographic
|
||||||
|
// key.
|
||||||
|
func GenerateSharedSecret(privkey *PrivateKey, pubkey *PublicKey) []byte {
|
||||||
|
var point, result JacobianPoint
|
||||||
|
pubkey.AsJacobian(&point)
|
||||||
|
ScalarMultNonConst(&privkey.Key, &point, &result)
|
||||||
|
result.ToAffine()
|
||||||
|
xBytes := result.X.Bytes()
|
||||||
|
return xBytes[:]
|
||||||
|
}
|
255
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/ellipticadaptor.go
generated
vendored
Normal file
255
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/ellipticadaptor.go
generated
vendored
Normal file
|
@ -0,0 +1,255 @@
|
||||||
|
// Copyright 2020-2021 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
// References:
|
||||||
|
// [SECG]: Recommended Elliptic Curve Domain Parameters
|
||||||
|
// https://www.secg.org/sec2-v2.pdf
|
||||||
|
//
|
||||||
|
// [GECC]: Guide to Elliptic Curve Cryptography (Hankerson, Menezes, Vanstone)
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/ecdsa"
|
||||||
|
"crypto/elliptic"
|
||||||
|
"math/big"
|
||||||
|
)
|
||||||
|
|
||||||
|
// CurveParams contains the parameters for the secp256k1 curve.
|
||||||
|
type CurveParams struct {
|
||||||
|
// P is the prime used in the secp256k1 field.
|
||||||
|
P *big.Int
|
||||||
|
|
||||||
|
// N is the order of the secp256k1 curve group generated by the base point.
|
||||||
|
N *big.Int
|
||||||
|
|
||||||
|
// Gx and Gy are the x and y coordinate of the base point, respectively.
|
||||||
|
Gx, Gy *big.Int
|
||||||
|
|
||||||
|
// BitSize is the size of the underlying secp256k1 field in bits.
|
||||||
|
BitSize int
|
||||||
|
|
||||||
|
// H is the cofactor of the secp256k1 curve.
|
||||||
|
H int
|
||||||
|
|
||||||
|
// ByteSize is simply the bit size / 8 and is provided for convenience
|
||||||
|
// since it is calculated repeatedly.
|
||||||
|
ByteSize int
|
||||||
|
}
|
||||||
|
|
||||||
|
// Curve parameters taken from [SECG] section 2.4.1.
|
||||||
|
var curveParams = CurveParams{
|
||||||
|
P: fromHex("fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f"),
|
||||||
|
N: fromHex("fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141"),
|
||||||
|
Gx: fromHex("79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798"),
|
||||||
|
Gy: fromHex("483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8"),
|
||||||
|
BitSize: 256,
|
||||||
|
H: 1,
|
||||||
|
ByteSize: 256 / 8,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Params returns the secp256k1 curve parameters for convenience.
|
||||||
|
func Params() *CurveParams {
|
||||||
|
return &curveParams
|
||||||
|
}
|
||||||
|
|
||||||
|
// KoblitzCurve provides an implementation for secp256k1 that fits the ECC Curve
|
||||||
|
// interface from crypto/elliptic.
|
||||||
|
type KoblitzCurve struct {
|
||||||
|
*elliptic.CurveParams
|
||||||
|
}
|
||||||
|
|
||||||
|
// bigAffineToJacobian takes an affine point (x, y) as big integers and converts
|
||||||
|
// it to Jacobian point with Z=1.
|
||||||
|
func bigAffineToJacobian(x, y *big.Int, result *JacobianPoint) {
|
||||||
|
result.X.SetByteSlice(x.Bytes())
|
||||||
|
result.Y.SetByteSlice(y.Bytes())
|
||||||
|
result.Z.SetInt(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// jacobianToBigAffine takes a Jacobian point (x, y, z) as field values and
|
||||||
|
// converts it to an affine point as big integers.
|
||||||
|
func jacobianToBigAffine(point *JacobianPoint) (*big.Int, *big.Int) {
|
||||||
|
point.ToAffine()
|
||||||
|
|
||||||
|
// Convert the field values for the now affine point to big.Ints.
|
||||||
|
x3, y3 := new(big.Int), new(big.Int)
|
||||||
|
x3.SetBytes(point.X.Bytes()[:])
|
||||||
|
y3.SetBytes(point.Y.Bytes()[:])
|
||||||
|
return x3, y3
|
||||||
|
}
|
||||||
|
|
||||||
|
// Params returns the parameters for the curve.
|
||||||
|
//
|
||||||
|
// This is part of the elliptic.Curve interface implementation.
|
||||||
|
func (curve *KoblitzCurve) Params() *elliptic.CurveParams {
|
||||||
|
return curve.CurveParams
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsOnCurve returns whether or not the affine point (x,y) is on the curve.
|
||||||
|
//
|
||||||
|
// This is part of the elliptic.Curve interface implementation. This function
|
||||||
|
// differs from the crypto/elliptic algorithm since a = 0 not -3.
|
||||||
|
func (curve *KoblitzCurve) IsOnCurve(x, y *big.Int) bool {
|
||||||
|
// Convert big ints to a Jacobian point for faster arithmetic.
|
||||||
|
var point JacobianPoint
|
||||||
|
bigAffineToJacobian(x, y, &point)
|
||||||
|
return isOnCurve(&point.X, &point.Y)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add returns the sum of (x1,y1) and (x2,y2).
|
||||||
|
//
|
||||||
|
// This is part of the elliptic.Curve interface implementation.
|
||||||
|
func (curve *KoblitzCurve) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) {
|
||||||
|
// A point at infinity is the identity according to the group law for
|
||||||
|
// elliptic curve cryptography. Thus, ∞ + P = P and P + ∞ = P.
|
||||||
|
if x1.Sign() == 0 && y1.Sign() == 0 {
|
||||||
|
return x2, y2
|
||||||
|
}
|
||||||
|
if x2.Sign() == 0 && y2.Sign() == 0 {
|
||||||
|
return x1, y1
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the affine coordinates from big integers to Jacobian points,
|
||||||
|
// do the point addition in Jacobian projective space, and convert the
|
||||||
|
// Jacobian point back to affine big.Ints.
|
||||||
|
var p1, p2, result JacobianPoint
|
||||||
|
bigAffineToJacobian(x1, y1, &p1)
|
||||||
|
bigAffineToJacobian(x2, y2, &p2)
|
||||||
|
AddNonConst(&p1, &p2, &result)
|
||||||
|
return jacobianToBigAffine(&result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Double returns 2*(x1,y1).
|
||||||
|
//
|
||||||
|
// This is part of the elliptic.Curve interface implementation.
|
||||||
|
func (curve *KoblitzCurve) Double(x1, y1 *big.Int) (*big.Int, *big.Int) {
|
||||||
|
if y1.Sign() == 0 {
|
||||||
|
return new(big.Int), new(big.Int)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert the affine coordinates from big integers to Jacobian points,
|
||||||
|
// do the point doubling in Jacobian projective space, and convert the
|
||||||
|
// Jacobian point back to affine big.Ints.
|
||||||
|
var point, result JacobianPoint
|
||||||
|
bigAffineToJacobian(x1, y1, &point)
|
||||||
|
DoubleNonConst(&point, &result)
|
||||||
|
return jacobianToBigAffine(&result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// moduloReduce reduces k from more than 32 bytes to 32 bytes and under. This
|
||||||
|
// is done by doing a simple modulo curve.N. We can do this since G^N = 1 and
|
||||||
|
// thus any other valid point on the elliptic curve has the same order.
|
||||||
|
func moduloReduce(k []byte) []byte {
|
||||||
|
// Since the order of G is curve.N, we can use a much smaller number by
|
||||||
|
// doing modulo curve.N
|
||||||
|
if len(k) > curveParams.ByteSize {
|
||||||
|
tmpK := new(big.Int).SetBytes(k)
|
||||||
|
tmpK.Mod(tmpK, curveParams.N)
|
||||||
|
return tmpK.Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
return k
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScalarMult returns k*(Bx, By) where k is a big endian integer.
|
||||||
|
//
|
||||||
|
// This is part of the elliptic.Curve interface implementation.
|
||||||
|
func (curve *KoblitzCurve) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) {
|
||||||
|
// Convert the affine coordinates from big integers to Jacobian points,
|
||||||
|
// do the multiplication in Jacobian projective space, and convert the
|
||||||
|
// Jacobian point back to affine big.Ints.
|
||||||
|
var kModN ModNScalar
|
||||||
|
kModN.SetByteSlice(moduloReduce(k))
|
||||||
|
var point, result JacobianPoint
|
||||||
|
bigAffineToJacobian(Bx, By, &point)
|
||||||
|
ScalarMultNonConst(&kModN, &point, &result)
|
||||||
|
return jacobianToBigAffine(&result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScalarBaseMult returns k*G where G is the base point of the group and k is a
|
||||||
|
// big endian integer.
|
||||||
|
//
|
||||||
|
// This is part of the elliptic.Curve interface implementation.
|
||||||
|
func (curve *KoblitzCurve) ScalarBaseMult(k []byte) (*big.Int, *big.Int) {
|
||||||
|
// Perform the multiplication and convert the Jacobian point back to affine
|
||||||
|
// big.Ints.
|
||||||
|
var kModN ModNScalar
|
||||||
|
kModN.SetByteSlice(moduloReduce(k))
|
||||||
|
var result JacobianPoint
|
||||||
|
ScalarBaseMultNonConst(&kModN, &result)
|
||||||
|
return jacobianToBigAffine(&result)
|
||||||
|
}
|
||||||
|
|
||||||
|
// X returns the x coordinate of the public key.
|
||||||
|
func (p *PublicKey) X() *big.Int {
|
||||||
|
return new(big.Int).SetBytes(p.x.Bytes()[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Y returns the y coordinate of the public key.
|
||||||
|
func (p *PublicKey) Y() *big.Int {
|
||||||
|
return new(big.Int).SetBytes(p.y.Bytes()[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToECDSA returns the public key as a *ecdsa.PublicKey.
|
||||||
|
func (p *PublicKey) ToECDSA() *ecdsa.PublicKey {
|
||||||
|
return &ecdsa.PublicKey{
|
||||||
|
Curve: S256(),
|
||||||
|
X: p.X(),
|
||||||
|
Y: p.Y(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToECDSA returns the private key as a *ecdsa.PrivateKey.
|
||||||
|
func (p *PrivateKey) ToECDSA() *ecdsa.PrivateKey {
|
||||||
|
var privKeyBytes [PrivKeyBytesLen]byte
|
||||||
|
p.Key.PutBytes(&privKeyBytes)
|
||||||
|
var result JacobianPoint
|
||||||
|
ScalarBaseMultNonConst(&p.Key, &result)
|
||||||
|
x, y := jacobianToBigAffine(&result)
|
||||||
|
newPrivKey := &ecdsa.PrivateKey{
|
||||||
|
PublicKey: ecdsa.PublicKey{
|
||||||
|
Curve: S256(),
|
||||||
|
X: x,
|
||||||
|
Y: y,
|
||||||
|
},
|
||||||
|
D: new(big.Int).SetBytes(privKeyBytes[:]),
|
||||||
|
}
|
||||||
|
zeroArray32(&privKeyBytes)
|
||||||
|
return newPrivKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// fromHex converts the passed hex string into a big integer pointer and will
|
||||||
|
// panic is there is an error. This is only provided for the hard-coded
|
||||||
|
// constants so errors in the source code can bet detected. It will only (and
|
||||||
|
// must only) be called for initialization purposes.
|
||||||
|
func fromHex(s string) *big.Int {
|
||||||
|
if s == "" {
|
||||||
|
return big.NewInt(0)
|
||||||
|
}
|
||||||
|
r, ok := new(big.Int).SetString(s, 16)
|
||||||
|
if !ok {
|
||||||
|
panic("invalid hex in source file: " + s)
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// secp256k1 is a global instance of the KoblitzCurve implementation which in
|
||||||
|
// turn embeds and implements elliptic.CurveParams.
|
||||||
|
var secp256k1 = &KoblitzCurve{
|
||||||
|
CurveParams: &elliptic.CurveParams{
|
||||||
|
P: curveParams.P,
|
||||||
|
N: curveParams.N,
|
||||||
|
B: fromHex("0000000000000000000000000000000000000000000000000000000000000007"),
|
||||||
|
Gx: curveParams.Gx,
|
||||||
|
Gy: curveParams.Gy,
|
||||||
|
BitSize: curveParams.BitSize,
|
||||||
|
Name: "secp256k1",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// S256 returns a Curve which implements secp256k1.
|
||||||
|
func S256() *KoblitzCurve {
|
||||||
|
return secp256k1
|
||||||
|
}
|
67
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/error.go
generated
vendored
Normal file
67
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/error.go
generated
vendored
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
// Copyright (c) 2020 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
// ErrorKind identifies a kind of error. It has full support for errors.Is and
|
||||||
|
// errors.As, so the caller can directly check against an error kind when
|
||||||
|
// determining the reason for an error.
|
||||||
|
type ErrorKind string
|
||||||
|
|
||||||
|
// These constants are used to identify a specific RuleError.
|
||||||
|
const (
|
||||||
|
// ErrPubKeyInvalidLen indicates that the length of a serialized public
|
||||||
|
// key is not one of the allowed lengths.
|
||||||
|
ErrPubKeyInvalidLen = ErrorKind("ErrPubKeyInvalidLen")
|
||||||
|
|
||||||
|
// ErrPubKeyInvalidFormat indicates an attempt was made to parse a public
|
||||||
|
// key that does not specify one of the supported formats.
|
||||||
|
ErrPubKeyInvalidFormat = ErrorKind("ErrPubKeyInvalidFormat")
|
||||||
|
|
||||||
|
// ErrPubKeyXTooBig indicates that the x coordinate for a public key
|
||||||
|
// is greater than or equal to the prime of the field underlying the group.
|
||||||
|
ErrPubKeyXTooBig = ErrorKind("ErrPubKeyXTooBig")
|
||||||
|
|
||||||
|
// ErrPubKeyYTooBig indicates that the y coordinate for a public key is
|
||||||
|
// greater than or equal to the prime of the field underlying the group.
|
||||||
|
ErrPubKeyYTooBig = ErrorKind("ErrPubKeyYTooBig")
|
||||||
|
|
||||||
|
// ErrPubKeyNotOnCurve indicates that a public key is not a point on the
|
||||||
|
// secp256k1 curve.
|
||||||
|
ErrPubKeyNotOnCurve = ErrorKind("ErrPubKeyNotOnCurve")
|
||||||
|
|
||||||
|
// ErrPubKeyMismatchedOddness indicates that a hybrid public key specified
|
||||||
|
// an oddness of the y coordinate that does not match the actual oddness of
|
||||||
|
// the provided y coordinate.
|
||||||
|
ErrPubKeyMismatchedOddness = ErrorKind("ErrPubKeyMismatchedOddness")
|
||||||
|
)
|
||||||
|
|
||||||
|
// Error satisfies the error interface and prints human-readable errors.
|
||||||
|
func (e ErrorKind) Error() string {
|
||||||
|
return string(e)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error identifies an error related to public key cryptography using a
|
||||||
|
// sec256k1 curve. It has full support for errors.Is and errors.As, so the
|
||||||
|
// caller can ascertain the specific reason for the error by checking
|
||||||
|
// the underlying error.
|
||||||
|
type Error struct {
|
||||||
|
Err error
|
||||||
|
Description string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Error satisfies the error interface and prints human-readable errors.
|
||||||
|
func (e Error) Error() string {
|
||||||
|
return e.Description
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap returns the underlying wrapped error.
|
||||||
|
func (e Error) Unwrap() error {
|
||||||
|
return e.Err
|
||||||
|
}
|
||||||
|
|
||||||
|
// makeError creates an Error given a set of arguments.
|
||||||
|
func makeError(kind ErrorKind, desc string) Error {
|
||||||
|
return Error{Err: kind, Description: desc}
|
||||||
|
}
|
1680
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/field.go
generated
vendored
Normal file
1680
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/field.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
195
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/genstatics.go
generated
vendored
Normal file
195
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/genstatics.go
generated
vendored
Normal file
|
@ -0,0 +1,195 @@
|
||||||
|
// Copyright (c) 2014-2015 The btcsuite developers
|
||||||
|
// Copyright (c) 2015-2021 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// This file is ignored during the regular build due to the following build tag.
|
||||||
|
// This build tag is set during go generate.
|
||||||
|
// +build gensecp256k1
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
// References:
|
||||||
|
// [GECC]: Guide to Elliptic Curve Cryptography (Hankerson, Menezes, Vanstone)
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/binary"
|
||||||
|
"math/big"
|
||||||
|
)
|
||||||
|
|
||||||
|
// compressedBytePoints are dummy points used so the code which generates the
|
||||||
|
// real values can compile.
|
||||||
|
var compressedBytePoints = ""
|
||||||
|
|
||||||
|
// SerializedBytePoints returns a serialized byte slice which contains all of
|
||||||
|
// the possible points per 8-bit window. This is used to when generating
|
||||||
|
// compressedbytepoints.go.
|
||||||
|
func SerializedBytePoints() []byte {
|
||||||
|
// Calculate G^(2^i) for i in 0..255. These are used to avoid recomputing
|
||||||
|
// them for each digit of the 8-bit windows.
|
||||||
|
doublingPoints := make([]JacobianPoint, curveParams.BitSize)
|
||||||
|
var q JacobianPoint
|
||||||
|
bigAffineToJacobian(curveParams.Gx, curveParams.Gy, &q)
|
||||||
|
for i := 0; i < curveParams.BitSize; i++ {
|
||||||
|
// Q = 2*Q.
|
||||||
|
doublingPoints[i] = q
|
||||||
|
DoubleNonConst(&q, &q)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Separate the bits into byte-sized windows.
|
||||||
|
curveByteSize := curveParams.BitSize / 8
|
||||||
|
serialized := make([]byte, curveByteSize*256*2*10*4)
|
||||||
|
offset := 0
|
||||||
|
for byteNum := 0; byteNum < curveByteSize; byteNum++ {
|
||||||
|
// Grab the 8 bits that make up this byte from doubling points.
|
||||||
|
startingBit := 8 * (curveByteSize - byteNum - 1)
|
||||||
|
windowPoints := doublingPoints[startingBit : startingBit+8]
|
||||||
|
|
||||||
|
// Compute all points in this window, convert them to affine, and
|
||||||
|
// serialize them.
|
||||||
|
for i := 0; i < 256; i++ {
|
||||||
|
var point JacobianPoint
|
||||||
|
for bit := 0; bit < 8; bit++ {
|
||||||
|
if i>>uint(bit)&1 == 1 {
|
||||||
|
AddNonConst(&point, &windowPoints[bit], &point)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
point.ToAffine()
|
||||||
|
|
||||||
|
for i := 0; i < len(point.X.n); i++ {
|
||||||
|
binary.LittleEndian.PutUint32(serialized[offset:], point.X.n[i])
|
||||||
|
offset += 4
|
||||||
|
}
|
||||||
|
for i := 0; i < len(point.Y.n); i++ {
|
||||||
|
binary.LittleEndian.PutUint32(serialized[offset:], point.Y.n[i])
|
||||||
|
offset += 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return serialized
|
||||||
|
}
|
||||||
|
|
||||||
|
// sqrt returns the square root of the provided big integer using Newton's
|
||||||
|
// method. It's only compiled and used during generation of pre-computed
|
||||||
|
// values, so speed is not a huge concern.
|
||||||
|
func sqrt(n *big.Int) *big.Int {
|
||||||
|
// Initial guess = 2^(log_2(n)/2)
|
||||||
|
guess := big.NewInt(2)
|
||||||
|
guess.Exp(guess, big.NewInt(int64(n.BitLen()/2)), nil)
|
||||||
|
|
||||||
|
// Now refine using Newton's method.
|
||||||
|
big2 := big.NewInt(2)
|
||||||
|
prevGuess := big.NewInt(0)
|
||||||
|
for {
|
||||||
|
prevGuess.Set(guess)
|
||||||
|
guess.Add(guess, new(big.Int).Div(n, guess))
|
||||||
|
guess.Div(guess, big2)
|
||||||
|
if guess.Cmp(prevGuess) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return guess
|
||||||
|
}
|
||||||
|
|
||||||
|
// EndomorphismVectors runs the first 3 steps of algorithm 3.74 from [GECC] to
|
||||||
|
// generate the linearly independent vectors needed to generate a balanced
|
||||||
|
// length-two representation of a multiplier such that k = k1 + k2λ (mod N) and
|
||||||
|
// returns them. Since the values will always be the same given the fact that N
|
||||||
|
// and λ are fixed, the final results can be accelerated by storing the
|
||||||
|
// precomputed values.
|
||||||
|
func EndomorphismVectors() (a1, b1, a2, b2 *big.Int) {
|
||||||
|
bigMinus1 := big.NewInt(-1)
|
||||||
|
|
||||||
|
// This section uses an extended Euclidean algorithm to generate a
|
||||||
|
// sequence of equations:
|
||||||
|
// s[i] * N + t[i] * λ = r[i]
|
||||||
|
|
||||||
|
nSqrt := sqrt(curveParams.N)
|
||||||
|
u, v := new(big.Int).Set(curveParams.N), new(big.Int).Set(endomorphismLambda)
|
||||||
|
x1, y1 := big.NewInt(1), big.NewInt(0)
|
||||||
|
x2, y2 := big.NewInt(0), big.NewInt(1)
|
||||||
|
q, r := new(big.Int), new(big.Int)
|
||||||
|
qu, qx1, qy1 := new(big.Int), new(big.Int), new(big.Int)
|
||||||
|
s, t := new(big.Int), new(big.Int)
|
||||||
|
ri, ti := new(big.Int), new(big.Int)
|
||||||
|
a1, b1, a2, b2 = new(big.Int), new(big.Int), new(big.Int), new(big.Int)
|
||||||
|
found, oneMore := false, false
|
||||||
|
for u.Sign() != 0 {
|
||||||
|
// q = v/u
|
||||||
|
q.Div(v, u)
|
||||||
|
|
||||||
|
// r = v - q*u
|
||||||
|
qu.Mul(q, u)
|
||||||
|
r.Sub(v, qu)
|
||||||
|
|
||||||
|
// s = x2 - q*x1
|
||||||
|
qx1.Mul(q, x1)
|
||||||
|
s.Sub(x2, qx1)
|
||||||
|
|
||||||
|
// t = y2 - q*y1
|
||||||
|
qy1.Mul(q, y1)
|
||||||
|
t.Sub(y2, qy1)
|
||||||
|
|
||||||
|
// v = u, u = r, x2 = x1, x1 = s, y2 = y1, y1 = t
|
||||||
|
v.Set(u)
|
||||||
|
u.Set(r)
|
||||||
|
x2.Set(x1)
|
||||||
|
x1.Set(s)
|
||||||
|
y2.Set(y1)
|
||||||
|
y1.Set(t)
|
||||||
|
|
||||||
|
// As soon as the remainder is less than the sqrt of n, the
|
||||||
|
// values of a1 and b1 are known.
|
||||||
|
if !found && r.Cmp(nSqrt) < 0 {
|
||||||
|
// When this condition executes ri and ti represent the
|
||||||
|
// r[i] and t[i] values such that i is the greatest
|
||||||
|
// index for which r >= sqrt(n). Meanwhile, the current
|
||||||
|
// r and t values are r[i+1] and t[i+1], respectively.
|
||||||
|
|
||||||
|
// a1 = r[i+1], b1 = -t[i+1]
|
||||||
|
a1.Set(r)
|
||||||
|
b1.Mul(t, bigMinus1)
|
||||||
|
found = true
|
||||||
|
oneMore = true
|
||||||
|
|
||||||
|
// Skip to the next iteration so ri and ti are not
|
||||||
|
// modified.
|
||||||
|
continue
|
||||||
|
|
||||||
|
} else if oneMore {
|
||||||
|
// When this condition executes ri and ti still
|
||||||
|
// represent the r[i] and t[i] values while the current
|
||||||
|
// r and t are r[i+2] and t[i+2], respectively.
|
||||||
|
|
||||||
|
// sum1 = r[i]^2 + t[i]^2
|
||||||
|
rSquared := new(big.Int).Mul(ri, ri)
|
||||||
|
tSquared := new(big.Int).Mul(ti, ti)
|
||||||
|
sum1 := new(big.Int).Add(rSquared, tSquared)
|
||||||
|
|
||||||
|
// sum2 = r[i+2]^2 + t[i+2]^2
|
||||||
|
r2Squared := new(big.Int).Mul(r, r)
|
||||||
|
t2Squared := new(big.Int).Mul(t, t)
|
||||||
|
sum2 := new(big.Int).Add(r2Squared, t2Squared)
|
||||||
|
|
||||||
|
// if (r[i]^2 + t[i]^2) <= (r[i+2]^2 + t[i+2]^2)
|
||||||
|
if sum1.Cmp(sum2) <= 0 {
|
||||||
|
// a2 = r[i], b2 = -t[i]
|
||||||
|
a2.Set(ri)
|
||||||
|
b2.Mul(ti, bigMinus1)
|
||||||
|
} else {
|
||||||
|
// a2 = r[i+2], b2 = -t[i+2]
|
||||||
|
a2.Set(r)
|
||||||
|
b2.Mul(t, bigMinus1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// All done.
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
ri.Set(r)
|
||||||
|
ti.Set(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
return a1, b1, a2, b2
|
||||||
|
}
|
91
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/loadprecomputed.go
generated
vendored
Normal file
91
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/loadprecomputed.go
generated
vendored
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
// Copyright 2015 The btcsuite developers
|
||||||
|
// Copyright (c) 2015-2021 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"compress/zlib"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/binary"
|
||||||
|
"io/ioutil"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:generate go run -tags gensecp256k1 genprecomps.go
|
||||||
|
|
||||||
|
// bytePointTable describes a table used to house pre-computed values for
|
||||||
|
// accelerating scalar base multiplication.
|
||||||
|
type bytePointTable [32][256][2]FieldVal
|
||||||
|
|
||||||
|
// s256BytePoints houses pre-computed values used to accelerate scalar base
|
||||||
|
// multiplication such that they are only loaded on first use.
|
||||||
|
var s256BytePoints = func() func() *bytePointTable {
|
||||||
|
// mustLoadBytePoints decompresses and deserializes the pre-computed byte
|
||||||
|
// points used to accelerate scalar base multiplication for the secp256k1
|
||||||
|
// curve.
|
||||||
|
//
|
||||||
|
// This approach is used since it allows the compile to use significantly
|
||||||
|
// less ram and be performed much faster than it is with hard-coding the
|
||||||
|
// final in-memory data structure. At the same time, it is quite fast to
|
||||||
|
// generate the in-memory data structure on first use with this approach
|
||||||
|
// versus computing the table.
|
||||||
|
//
|
||||||
|
// It will panic on any errors because the data is hard coded and thus any
|
||||||
|
// errors means something is wrong in the source code.
|
||||||
|
var data *bytePointTable
|
||||||
|
mustLoadBytePoints := func() {
|
||||||
|
// There will be no byte points to load when generating them.
|
||||||
|
bp := compressedBytePoints
|
||||||
|
if len(bp) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decompress the pre-computed table used to accelerate scalar base
|
||||||
|
// multiplication.
|
||||||
|
decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(bp))
|
||||||
|
r, err := zlib.NewReader(decoder)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
serialized, err := ioutil.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deserialize the precomputed byte points and set the memory table to
|
||||||
|
// them.
|
||||||
|
offset := 0
|
||||||
|
var bytePoints bytePointTable
|
||||||
|
for byteNum := 0; byteNum < len(bytePoints); byteNum++ {
|
||||||
|
// All points in this window.
|
||||||
|
for i := 0; i < len(bytePoints[byteNum]); i++ {
|
||||||
|
px := &bytePoints[byteNum][i][0]
|
||||||
|
py := &bytePoints[byteNum][i][1]
|
||||||
|
for i := 0; i < len(px.n); i++ {
|
||||||
|
px.n[i] = binary.LittleEndian.Uint32(serialized[offset:])
|
||||||
|
offset += 4
|
||||||
|
}
|
||||||
|
for i := 0; i < len(py.n); i++ {
|
||||||
|
py.n[i] = binary.LittleEndian.Uint32(serialized[offset:])
|
||||||
|
offset += 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
data = &bytePoints
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return a closure that initializes the data on first access. This is done
|
||||||
|
// because the table takes a non-trivial amount of memory and initializing
|
||||||
|
// it unconditionally would cause anything that imports the package, either
|
||||||
|
// directly, or indirectly via transitive deps, to use that memory even if
|
||||||
|
// the caller never accesses any parts of the package that actually needs
|
||||||
|
// access to it.
|
||||||
|
var loadBytePointsOnce sync.Once
|
||||||
|
return func() *bytePointTable {
|
||||||
|
loadBytePointsOnce.Do(mustLoadBytePoints)
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
}()
|
1088
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/modnscalar.go
generated
vendored
Normal file
1088
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/modnscalar.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
263
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/nonce.go
generated
vendored
Normal file
263
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/nonce.go
generated
vendored
Normal file
|
@ -0,0 +1,263 @@
|
||||||
|
// Copyright (c) 2013-2014 The btcsuite developers
|
||||||
|
// Copyright (c) 2015-2020 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/sha256"
|
||||||
|
"hash"
|
||||||
|
)
|
||||||
|
|
||||||
|
// References:
|
||||||
|
// [GECC]: Guide to Elliptic Curve Cryptography (Hankerson, Menezes, Vanstone)
|
||||||
|
//
|
||||||
|
// [ISO/IEC 8825-1]: Information technology — ASN.1 encoding rules:
|
||||||
|
// Specification of Basic Encoding Rules (BER), Canonical Encoding Rules
|
||||||
|
// (CER) and Distinguished Encoding Rules (DER)
|
||||||
|
//
|
||||||
|
// [SEC1]: Elliptic Curve Cryptography (May 31, 2009, Version 2.0)
|
||||||
|
// https://www.secg.org/sec1-v2.pdf
|
||||||
|
|
||||||
|
var (
|
||||||
|
// singleZero is used during RFC6979 nonce generation. It is provided
|
||||||
|
// here to avoid the need to create it multiple times.
|
||||||
|
singleZero = []byte{0x00}
|
||||||
|
|
||||||
|
// zeroInitializer is used during RFC6979 nonce generation. It is provided
|
||||||
|
// here to avoid the need to create it multiple times.
|
||||||
|
zeroInitializer = bytes.Repeat([]byte{0x00}, sha256.BlockSize)
|
||||||
|
|
||||||
|
// singleOne is used during RFC6979 nonce generation. It is provided
|
||||||
|
// here to avoid the need to create it multiple times.
|
||||||
|
singleOne = []byte{0x01}
|
||||||
|
|
||||||
|
// oneInitializer is used during RFC6979 nonce generation. It is provided
|
||||||
|
// here to avoid the need to create it multiple times.
|
||||||
|
oneInitializer = bytes.Repeat([]byte{0x01}, sha256.Size)
|
||||||
|
)
|
||||||
|
|
||||||
|
// hmacsha256 implements a resettable version of HMAC-SHA256.
|
||||||
|
type hmacsha256 struct {
|
||||||
|
inner, outer hash.Hash
|
||||||
|
ipad, opad [sha256.BlockSize]byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write adds data to the running hash.
|
||||||
|
func (h *hmacsha256) Write(p []byte) {
|
||||||
|
h.inner.Write(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
// initKey initializes the HMAC-SHA256 instance to the provided key.
|
||||||
|
func (h *hmacsha256) initKey(key []byte) {
|
||||||
|
// Hash the key if it is too large.
|
||||||
|
if len(key) > sha256.BlockSize {
|
||||||
|
h.outer.Write(key)
|
||||||
|
key = h.outer.Sum(nil)
|
||||||
|
}
|
||||||
|
copy(h.ipad[:], key)
|
||||||
|
copy(h.opad[:], key)
|
||||||
|
for i := range h.ipad {
|
||||||
|
h.ipad[i] ^= 0x36
|
||||||
|
}
|
||||||
|
for i := range h.opad {
|
||||||
|
h.opad[i] ^= 0x5c
|
||||||
|
}
|
||||||
|
h.inner.Write(h.ipad[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResetKey resets the HMAC-SHA256 to its initial state and then initializes it
|
||||||
|
// with the provided key. It is equivalent to creating a new instance with the
|
||||||
|
// provided key without allocating more memory.
|
||||||
|
func (h *hmacsha256) ResetKey(key []byte) {
|
||||||
|
h.inner.Reset()
|
||||||
|
h.outer.Reset()
|
||||||
|
copy(h.ipad[:], zeroInitializer)
|
||||||
|
copy(h.opad[:], zeroInitializer)
|
||||||
|
h.initKey(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resets the HMAC-SHA256 to its initial state using the current key.
|
||||||
|
func (h *hmacsha256) Reset() {
|
||||||
|
h.inner.Reset()
|
||||||
|
h.inner.Write(h.ipad[:])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sum returns the hash of the written data.
|
||||||
|
func (h *hmacsha256) Sum() []byte {
|
||||||
|
h.outer.Reset()
|
||||||
|
h.outer.Write(h.opad[:])
|
||||||
|
h.outer.Write(h.inner.Sum(nil))
|
||||||
|
return h.outer.Sum(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// newHMACSHA256 returns a new HMAC-SHA256 hasher using the provided key.
|
||||||
|
func newHMACSHA256(key []byte) *hmacsha256 {
|
||||||
|
h := new(hmacsha256)
|
||||||
|
h.inner = sha256.New()
|
||||||
|
h.outer = sha256.New()
|
||||||
|
h.initKey(key)
|
||||||
|
return h
|
||||||
|
}
|
||||||
|
|
||||||
|
// NonceRFC6979 generates a nonce deterministically according to RFC 6979 using
|
||||||
|
// HMAC-SHA256 for the hashing function. It takes a 32-byte hash as an input
|
||||||
|
// and returns a 32-byte nonce to be used for deterministic signing. The extra
|
||||||
|
// and version arguments are optional, but allow additional data to be added to
|
||||||
|
// the input of the HMAC. When provided, the extra data must be 32-bytes and
|
||||||
|
// version must be 16 bytes or they will be ignored.
|
||||||
|
//
|
||||||
|
// Finally, the extraIterations parameter provides a method to produce a stream
|
||||||
|
// of deterministic nonces to ensure the signing code is able to produce a nonce
|
||||||
|
// that results in a valid signature in the extremely unlikely event the
|
||||||
|
// original nonce produced results in an invalid signature (e.g. R == 0).
|
||||||
|
// Signing code should start with 0 and increment it if necessary.
|
||||||
|
func NonceRFC6979(privKey []byte, hash []byte, extra []byte, version []byte, extraIterations uint32) *ModNScalar {
|
||||||
|
// Input to HMAC is the 32-byte private key and the 32-byte hash. In
|
||||||
|
// addition, it may include the optional 32-byte extra data and 16-byte
|
||||||
|
// version. Create a fixed-size array to avoid extra allocs and slice it
|
||||||
|
// properly.
|
||||||
|
const (
|
||||||
|
privKeyLen = 32
|
||||||
|
hashLen = 32
|
||||||
|
extraLen = 32
|
||||||
|
versionLen = 16
|
||||||
|
)
|
||||||
|
var keyBuf [privKeyLen + hashLen + extraLen + versionLen]byte
|
||||||
|
|
||||||
|
// Truncate rightmost bytes of private key and hash if they are too long and
|
||||||
|
// leave left padding of zeros when they're too short.
|
||||||
|
if len(privKey) > privKeyLen {
|
||||||
|
privKey = privKey[:privKeyLen]
|
||||||
|
}
|
||||||
|
if len(hash) > hashLen {
|
||||||
|
hash = hash[:hashLen]
|
||||||
|
}
|
||||||
|
offset := privKeyLen - len(privKey) // Zero left padding if needed.
|
||||||
|
offset += copy(keyBuf[offset:], privKey)
|
||||||
|
offset += hashLen - len(hash) // Zero left padding if needed.
|
||||||
|
offset += copy(keyBuf[offset:], hash)
|
||||||
|
if len(extra) == extraLen {
|
||||||
|
offset += copy(keyBuf[offset:], extra)
|
||||||
|
if len(version) == versionLen {
|
||||||
|
offset += copy(keyBuf[offset:], version)
|
||||||
|
}
|
||||||
|
} else if len(version) == versionLen {
|
||||||
|
// When the version was specified, but not the extra data, leave the
|
||||||
|
// extra data portion all zero.
|
||||||
|
offset += privKeyLen
|
||||||
|
offset += copy(keyBuf[offset:], version)
|
||||||
|
}
|
||||||
|
key := keyBuf[:offset]
|
||||||
|
|
||||||
|
// Step B.
|
||||||
|
//
|
||||||
|
// V = 0x01 0x01 0x01 ... 0x01 such that the length of V, in bits, is
|
||||||
|
// equal to 8*ceil(hashLen/8).
|
||||||
|
//
|
||||||
|
// Note that since the hash length is a multiple of 8 for the chosen hash
|
||||||
|
// function in this optimized implementation, the result is just the hash
|
||||||
|
// length, so avoid the extra calculations. Also, since it isn't modified,
|
||||||
|
// start with a global value.
|
||||||
|
v := oneInitializer
|
||||||
|
|
||||||
|
// Step C (Go zeroes all allocated memory).
|
||||||
|
//
|
||||||
|
// K = 0x00 0x00 0x00 ... 0x00 such that the length of K, in bits, is
|
||||||
|
// equal to 8*ceil(hashLen/8).
|
||||||
|
//
|
||||||
|
// As above, since the hash length is a multiple of 8 for the chosen hash
|
||||||
|
// function in this optimized implementation, the result is just the hash
|
||||||
|
// length, so avoid the extra calculations.
|
||||||
|
k := zeroInitializer[:hashLen]
|
||||||
|
|
||||||
|
// Step D.
|
||||||
|
//
|
||||||
|
// K = HMAC_K(V || 0x00 || int2octets(x) || bits2octets(h1))
|
||||||
|
//
|
||||||
|
// Note that key is the "int2octets(x) || bits2octets(h1)" portion along
|
||||||
|
// with potential additional data as described by section 3.6 of the RFC.
|
||||||
|
hasher := newHMACSHA256(k)
|
||||||
|
hasher.Write(oneInitializer)
|
||||||
|
hasher.Write(singleZero[:])
|
||||||
|
hasher.Write(key)
|
||||||
|
k = hasher.Sum()
|
||||||
|
|
||||||
|
// Step E.
|
||||||
|
//
|
||||||
|
// V = HMAC_K(V)
|
||||||
|
hasher.ResetKey(k)
|
||||||
|
hasher.Write(v)
|
||||||
|
v = hasher.Sum()
|
||||||
|
|
||||||
|
// Step F.
|
||||||
|
//
|
||||||
|
// K = HMAC_K(V || 0x01 || int2octets(x) || bits2octets(h1))
|
||||||
|
//
|
||||||
|
// Note that key is the "int2octets(x) || bits2octets(h1)" portion along
|
||||||
|
// with potential additional data as described by section 3.6 of the RFC.
|
||||||
|
hasher.Reset()
|
||||||
|
hasher.Write(v)
|
||||||
|
hasher.Write(singleOne[:])
|
||||||
|
hasher.Write(key[:])
|
||||||
|
k = hasher.Sum()
|
||||||
|
|
||||||
|
// Step G.
|
||||||
|
//
|
||||||
|
// V = HMAC_K(V)
|
||||||
|
hasher.ResetKey(k)
|
||||||
|
hasher.Write(v)
|
||||||
|
v = hasher.Sum()
|
||||||
|
|
||||||
|
// Step H.
|
||||||
|
//
|
||||||
|
// Repeat until the value is nonzero and less than the curve order.
|
||||||
|
var generated uint32
|
||||||
|
for {
|
||||||
|
// Step H1 and H2.
|
||||||
|
//
|
||||||
|
// Set T to the empty sequence. The length of T (in bits) is denoted
|
||||||
|
// tlen; thus, at that point, tlen = 0.
|
||||||
|
//
|
||||||
|
// While tlen < qlen, do the following:
|
||||||
|
// V = HMAC_K(V)
|
||||||
|
// T = T || V
|
||||||
|
//
|
||||||
|
// Note that because the hash function output is the same length as the
|
||||||
|
// private key in this optimized implementation, there is no need to
|
||||||
|
// loop or create an intermediate T.
|
||||||
|
hasher.Reset()
|
||||||
|
hasher.Write(v)
|
||||||
|
v = hasher.Sum()
|
||||||
|
|
||||||
|
// Step H3.
|
||||||
|
//
|
||||||
|
// k = bits2int(T)
|
||||||
|
// If k is within the range [1,q-1], return it.
|
||||||
|
//
|
||||||
|
// Otherwise, compute:
|
||||||
|
// K = HMAC_K(V || 0x00)
|
||||||
|
// V = HMAC_K(V)
|
||||||
|
var secret ModNScalar
|
||||||
|
overflow := secret.SetByteSlice(v)
|
||||||
|
if !overflow && !secret.IsZero() {
|
||||||
|
generated++
|
||||||
|
if generated > extraIterations {
|
||||||
|
return &secret
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// K = HMAC_K(V || 0x00)
|
||||||
|
hasher.Reset()
|
||||||
|
hasher.Write(v)
|
||||||
|
hasher.Write(singleZero[:])
|
||||||
|
k = hasher.Sum()
|
||||||
|
|
||||||
|
// V = HMAC_K(V)
|
||||||
|
hasher.ResetKey(k)
|
||||||
|
hasher.Write(v)
|
||||||
|
v = hasher.Sum()
|
||||||
|
}
|
||||||
|
}
|
77
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/privkey.go
generated
vendored
Normal file
77
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/privkey.go
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
// Copyright (c) 2013-2014 The btcsuite developers
|
||||||
|
// Copyright (c) 2015-2020 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/ecdsa"
|
||||||
|
"crypto/rand"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PrivateKey provides facilities for working with secp256k1 private keys within
|
||||||
|
// this package and includes functionality such as serializing and parsing them
|
||||||
|
// as well as computing their associated public key.
|
||||||
|
type PrivateKey struct {
|
||||||
|
Key ModNScalar
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPrivateKey instantiates a new private key from a scalar encoded as a
|
||||||
|
// big integer.
|
||||||
|
func NewPrivateKey(key *ModNScalar) *PrivateKey {
|
||||||
|
return &PrivateKey{Key: *key}
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrivKeyFromBytes returns a private based on the provided byte slice which is
|
||||||
|
// interpreted as an unsigned 256-bit big-endian integer in the range [0, N-1],
|
||||||
|
// where N is the order of the curve.
|
||||||
|
//
|
||||||
|
// Note that this means passing a slice with more than 32 bytes is truncated and
|
||||||
|
// that truncated value is reduced modulo N. It is up to the caller to either
|
||||||
|
// provide a value in the appropriate range or choose to accept the described
|
||||||
|
// behavior.
|
||||||
|
//
|
||||||
|
// Typically callers should simply make use of GeneratePrivateKey when creating
|
||||||
|
// private keys which properly handles generation of appropriate values.
|
||||||
|
func PrivKeyFromBytes(privKeyBytes []byte) *PrivateKey {
|
||||||
|
var privKey PrivateKey
|
||||||
|
privKey.Key.SetByteSlice(privKeyBytes)
|
||||||
|
return &privKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// GeneratePrivateKey returns a private key that is suitable for use with
|
||||||
|
// secp256k1.
|
||||||
|
func GeneratePrivateKey() (*PrivateKey, error) {
|
||||||
|
key, err := ecdsa.GenerateKey(S256(), rand.Reader)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return PrivKeyFromBytes(key.D.Bytes()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// PubKey computes and returns the public key corresponding to this private key.
|
||||||
|
func (p *PrivateKey) PubKey() *PublicKey {
|
||||||
|
var result JacobianPoint
|
||||||
|
ScalarBaseMultNonConst(&p.Key, &result)
|
||||||
|
result.ToAffine()
|
||||||
|
return NewPublicKey(&result.X, &result.Y)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Zero manually clears the memory associated with the private key. This can be
|
||||||
|
// used to explicitly clear key material from memory for enhanced security
|
||||||
|
// against memory scraping.
|
||||||
|
func (p *PrivateKey) Zero() {
|
||||||
|
p.Key.Zero()
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrivKeyBytesLen defines the length in bytes of a serialized private key.
|
||||||
|
const PrivKeyBytesLen = 32
|
||||||
|
|
||||||
|
// Serialize returns the private key as a 256-bit big-endian binary-encoded
|
||||||
|
// number, padded to a length of 32 bytes.
|
||||||
|
func (p PrivateKey) Serialize() []byte {
|
||||||
|
var privKeyBytes [PrivKeyBytesLen]byte
|
||||||
|
p.Key.PutBytes(&privKeyBytes)
|
||||||
|
return privKeyBytes[:]
|
||||||
|
}
|
232
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/pubkey.go
generated
vendored
Normal file
232
vendor/github.com/decred/dcrd/dcrec/secp256k1/v4/pubkey.go
generated
vendored
Normal file
|
@ -0,0 +1,232 @@
|
||||||
|
// Copyright (c) 2013-2014 The btcsuite developers
|
||||||
|
// Copyright (c) 2015-2021 The Decred developers
|
||||||
|
// Use of this source code is governed by an ISC
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package secp256k1
|
||||||
|
|
||||||
|
// References:
|
||||||
|
// [SEC1] Elliptic Curve Cryptography
|
||||||
|
// https://www.secg.org/sec1-v2.pdf
|
||||||
|
//
|
||||||
|
// [SEC2] Recommended Elliptic Curve Domain Parameters
|
||||||
|
// https://www.secg.org/sec2-v2.pdf
|
||||||
|
//
|
||||||
|
// [ANSI X9.62-1998] Public Key Cryptography For The Financial Services
|
||||||
|
// Industry: The Elliptic Curve Digital Signature Algorithm (ECDSA)
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// PubKeyBytesLenCompressed is the number of bytes of a serialized
|
||||||
|
// compressed public key.
|
||||||
|
PubKeyBytesLenCompressed = 33
|
||||||
|
|
||||||
|
// PubKeyBytesLenUncompressed is the number of bytes of a serialized
|
||||||
|
// uncompressed public key.
|
||||||
|
PubKeyBytesLenUncompressed = 65
|
||||||
|
|
||||||
|
// PubKeyFormatCompressedEven is the identifier prefix byte for a public key
|
||||||
|
// whose Y coordinate is even when serialized in the compressed format per
|
||||||
|
// section 2.3.4 of [SEC1](https://secg.org/sec1-v2.pdf#subsubsection.2.3.4).
|
||||||
|
PubKeyFormatCompressedEven byte = 0x02
|
||||||
|
|
||||||
|
// PubKeyFormatCompressedOdd is the identifier prefix byte for a public key
|
||||||
|
// whose Y coordinate is odd when serialized in the compressed format per
|
||||||
|
// section 2.3.4 of [SEC1](https://secg.org/sec1-v2.pdf#subsubsection.2.3.4).
|
||||||
|
PubKeyFormatCompressedOdd byte = 0x03
|
||||||
|
|
||||||
|
// PubKeyFormatUncompressed is the identifier prefix byte for a public key
|
||||||
|
// when serialized according in the uncompressed format per section 2.3.3 of
|
||||||
|
// [SEC1](https://secg.org/sec1-v2.pdf#subsubsection.2.3.3).
|
||||||
|
PubKeyFormatUncompressed byte = 0x04
|
||||||
|
|
||||||
|
// PubKeyFormatHybridEven is the identifier prefix byte for a public key
|
||||||
|
// whose Y coordinate is even when serialized according to the hybrid format
|
||||||
|
// per section 4.3.6 of [ANSI X9.62-1998].
|
||||||
|
//
|
||||||
|
// NOTE: This format makes little sense in practice an therefore this
|
||||||
|
// package will not produce public keys serialized in this format. However,
|
||||||
|
// it will parse them since they exist in the wild.
|
||||||
|
PubKeyFormatHybridEven byte = 0x06
|
||||||
|
|
||||||
|
// PubKeyFormatHybridOdd is the identifier prefix byte for a public key
|
||||||
|
// whose Y coordingate is odd when serialized according to the hybrid format
|
||||||
|
// per section 4.3.6 of [ANSI X9.62-1998].
|
||||||
|
//
|
||||||
|
// NOTE: This format makes little sense in practice an therefore this
|
||||||
|
// package will not produce public keys serialized in this format. However,
|
||||||
|
// it will parse them since they exist in the wild.
|
||||||
|
PubKeyFormatHybridOdd byte = 0x07
|
||||||
|
)
|
||||||
|
|
||||||
|
// PublicKey provides facilities for efficiently working with secp256k1 public
|
||||||
|
// keys within this package and includes functions to serialize in both
|
||||||
|
// uncompressed and compressed SEC (Standards for Efficient Cryptography)
|
||||||
|
// formats.
|
||||||
|
type PublicKey struct {
|
||||||
|
x FieldVal
|
||||||
|
y FieldVal
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPublicKey instantiates a new public key with the given x and y
|
||||||
|
// coordinates.
|
||||||
|
//
|
||||||
|
// It should be noted that, unlike ParsePubKey, since this accepts arbitrary x
|
||||||
|
// and y coordinates, it allows creation of public keys that are not valid
|
||||||
|
// points on the secp256k1 curve. The IsOnCurve method of the returned instance
|
||||||
|
// can be used to determine validity.
|
||||||
|
func NewPublicKey(x, y *FieldVal) *PublicKey {
|
||||||
|
var pubKey PublicKey
|
||||||
|
pubKey.x.Set(x)
|
||||||
|
pubKey.y.Set(y)
|
||||||
|
return &pubKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParsePubKey parses a secp256k1 public key encoded according to the format
|
||||||
|
// specified by ANSI X9.62-1998, which means it is also compatible with the
|
||||||
|
// SEC (Standards for Efficient Cryptography) specification which is a subset of
|
||||||
|
// the former. In other words, it supports the uncompressed, compressed, and
|
||||||
|
// hybrid formats as follows:
|
||||||
|
//
|
||||||
|
// Compressed:
|
||||||
|
// <format byte = 0x02/0x03><32-byte X coordinate>
|
||||||
|
// Uncompressed:
|
||||||
|
// <format byte = 0x04><32-byte X coordinate><32-byte Y coordinate>
|
||||||
|
// Hybrid:
|
||||||
|
// <format byte = 0x05/0x06><32-byte X coordinate><32-byte Y coordinate>
|
||||||
|
//
|
||||||
|
// NOTE: The hybrid format makes little sense in practice an therefore this
|
||||||
|
// package will not produce public keys serialized in this format. However,
|
||||||
|
// this function will properly parse them since they exist in the wild.
|
||||||
|
func ParsePubKey(serialized []byte) (key *PublicKey, err error) {
|
||||||
|
var x, y FieldVal
|
||||||
|
switch len(serialized) {
|
||||||
|
case PubKeyBytesLenUncompressed:
|
||||||
|
// Reject unsupported public key formats for the given length.
|
||||||
|
format := serialized[0]
|
||||||
|
switch format {
|
||||||
|
case PubKeyFormatUncompressed:
|
||||||
|
case PubKeyFormatHybridEven, PubKeyFormatHybridOdd:
|
||||||
|
default:
|
||||||
|
str := fmt.Sprintf("invalid public key: unsupported format: %x",
|
||||||
|
format)
|
||||||
|
return nil, makeError(ErrPubKeyInvalidFormat, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the x and y coordinates while ensuring that they are in the
|
||||||
|
// allowed range.
|
||||||
|
if overflow := x.SetByteSlice(serialized[1:33]); overflow {
|
||||||
|
str := "invalid public key: x >= field prime"
|
||||||
|
return nil, makeError(ErrPubKeyXTooBig, str)
|
||||||
|
}
|
||||||
|
if overflow := y.SetByteSlice(serialized[33:]); overflow {
|
||||||
|
str := "invalid public key: y >= field prime"
|
||||||
|
return nil, makeError(ErrPubKeyYTooBig, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure the oddness of the y coordinate matches the specified format
|
||||||
|
// for hybrid public keys.
|
||||||
|
if format == PubKeyFormatHybridEven || format == PubKeyFormatHybridOdd {
|
||||||
|
wantOddY := format == PubKeyFormatHybridOdd
|
||||||
|
if y.IsOdd() != wantOddY {
|
||||||
|
str := fmt.Sprintf("invalid public key: y oddness does not "+
|
||||||
|
"match specified value of %v", wantOddY)
|
||||||
|
return nil, makeError(ErrPubKeyMismatchedOddness, str)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reject public keys that are not on the secp256k1 curve.
|
||||||
|
if !isOnCurve(&x, &y) {
|
||||||
|
str := fmt.Sprintf("invalid public key: [%v,%v] not on secp256k1 "+
|
||||||
|
"curve", x, y)
|
||||||
|
return nil, makeError(ErrPubKeyNotOnCurve, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
case PubKeyBytesLenCompressed:
|
||||||
|
// Reject unsupported public key formats for the given length.
|
||||||
|
format := serialized[0]
|
||||||
|
switch format {
|
||||||
|
case PubKeyFormatCompressedEven, PubKeyFormatCompressedOdd:
|
||||||
|
default:
|
||||||
|
str := fmt.Sprintf("invalid public key: unsupported format: %x",
|
||||||
|
format)
|
||||||
|
return nil, makeError(ErrPubKeyInvalidFormat, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse the x coordinate while ensuring that it is in the allowed
|
||||||
|
// range.
|
||||||
|
if overflow := x.SetByteSlice(serialized[1:33]); overflow {
|
||||||
|
str := "invalid public key: x >= field prime"
|
||||||
|
return nil, makeError(ErrPubKeyXTooBig, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt to calculate the y coordinate for the given x coordinate such
|
||||||
|
// that the result pair is a point on the secp256k1 curve and the
|
||||||
|
// solution with desired oddness is chosen.
|
||||||
|
wantOddY := format == PubKeyFormatCompressedOdd
|
||||||
|
if !DecompressY(&x, wantOddY, &y) {
|
||||||
|
str := fmt.Sprintf("invalid public key: x coordinate %v is not on "+
|
||||||
|
"the secp256k1 curve", x)
|
||||||
|
return nil, makeError(ErrPubKeyNotOnCurve, str)
|
||||||
|
}
|
||||||
|
y.Normalize()
|
||||||
|
|
||||||
|
default:
|
||||||
|
str := fmt.Sprintf("malformed public key: invalid length: %d",
|
||||||
|
len(serialized))
|
||||||
|
return nil, makeError(ErrPubKeyInvalidLen, str)
|
||||||
|
}
|
||||||
|
|
||||||
|
return NewPublicKey(&x, &y), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SerializeUncompressed serializes a public key in the 65-byte uncompressed
|
||||||
|
// format.
|
||||||
|
func (p PublicKey) SerializeUncompressed() []byte {
|
||||||
|
// 0x04 || 32-byte x coordinate || 32-byte y coordinate
|
||||||
|
var b [PubKeyBytesLenUncompressed]byte
|
||||||
|
b[0] = PubKeyFormatUncompressed
|
||||||
|
p.x.PutBytesUnchecked(b[1:33])
|
||||||
|
p.y.PutBytesUnchecked(b[33:65])
|
||||||
|
return b[:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// SerializeCompressed serializes a public key in the 33-byte compressed format.
|
||||||
|
func (p PublicKey) SerializeCompressed() []byte {
|
||||||
|
// Choose the format byte depending on the oddness of the Y coordinate.
|
||||||
|
format := PubKeyFormatCompressedEven
|
||||||
|
if p.y.IsOdd() {
|
||||||
|
format = PubKeyFormatCompressedOdd
|
||||||
|
}
|
||||||
|
|
||||||
|
// 0x02 or 0x03 || 32-byte x coordinate
|
||||||
|
var b [PubKeyBytesLenCompressed]byte
|
||||||
|
b[0] = format
|
||||||
|
p.x.PutBytesUnchecked(b[1:33])
|
||||||
|
return b[:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEqual compares this PublicKey instance to the one passed, returning true if
|
||||||
|
// both PublicKeys are equivalent. A PublicKey is equivalent to another, if they
|
||||||
|
// both have the same X and Y coordinate.
|
||||||
|
func (p *PublicKey) IsEqual(otherPubKey *PublicKey) bool {
|
||||||
|
return p.x.Equals(&otherPubKey.x) && p.y.Equals(&otherPubKey.y)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AsJacobian converts the public key into a Jacobian point with Z=1 and stores
|
||||||
|
// the result in the provided result param. This allows the public key to be
|
||||||
|
// treated a Jacobian point in the secp256k1 group in calculations.
|
||||||
|
func (p *PublicKey) AsJacobian(result *JacobianPoint) {
|
||||||
|
result.X.Set(&p.x)
|
||||||
|
result.Y.Set(&p.y)
|
||||||
|
result.Z.SetInt(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsOnCurve returns whether or not the public key represents a point on the
|
||||||
|
// secp256k1 curve.
|
||||||
|
func (p *PublicKey) IsOnCurve() bool {
|
||||||
|
return isOnCurve(&p.x, &p.y)
|
||||||
|
}
|
3
vendor/github.com/go-chi/jwtauth/v5/.gitignore
generated
vendored
Normal file
3
vendor/github.com/go-chi/jwtauth/v5/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
vendor/
|
||||||
|
Gopkg.lock
|
||||||
|
.idea/
|
20
vendor/github.com/go-chi/jwtauth/v5/LICENSE
generated
vendored
Normal file
20
vendor/github.com/go-chi/jwtauth/v5/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
Copyright (c) 2015-Present https://github.com/go-chi authors
|
||||||
|
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
111
vendor/github.com/go-chi/jwtauth/v5/README.md
generated
vendored
Normal file
111
vendor/github.com/go-chi/jwtauth/v5/README.md
generated
vendored
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
# jwtauth - JWT authentication middleware for HTTP services
|
||||||
|
|
||||||
|
[![GoDoc Widget]][godoc]
|
||||||
|
|
||||||
|
The `jwtauth` http middleware package provides a simple way to verify a JWT token
|
||||||
|
from a http request and send the result down the request context (`context.Context`).
|
||||||
|
|
||||||
|
Please note, `jwtauth` works with any Go http router, but resides under the go-chi group
|
||||||
|
for maintenance and organization - its only 3rd party dependency is the underlying jwt library
|
||||||
|
"github.com/lestrrat-go/jwx".
|
||||||
|
|
||||||
|
In a complete JWT-authentication flow, you'll first capture the token from a http
|
||||||
|
request, decode it, verify it and then validate that its correctly signed and hasn't
|
||||||
|
expired - the `jwtauth.Verifier` middleware handler takes care of all of that. The
|
||||||
|
`jwtauth.Verifier` will set the context values on keys `jwtauth.TokenCtxKey` and
|
||||||
|
`jwtauth.ErrorCtxKey`.
|
||||||
|
|
||||||
|
Next, it's up to an authentication handler to respond or continue processing after the
|
||||||
|
`jwtauth.Verifier`. The `jwtauth.Authenticator` middleware responds with a 401 Unauthorized
|
||||||
|
plain-text payload for all unverified tokens and passes the good ones through. You can
|
||||||
|
also copy the Authenticator and customize it to handle invalid tokens to better fit
|
||||||
|
your flow (ie. with a JSON error response body).
|
||||||
|
|
||||||
|
By default, the `Verifier` will search for a JWT token in a http request, in the order:
|
||||||
|
|
||||||
|
1. 'Authorization: BEARER T' request header
|
||||||
|
2. 'jwt' Cookie value
|
||||||
|
|
||||||
|
The first JWT string that is found as an authorization header
|
||||||
|
or cookie header is then decoded by the `lestrrat-go/jwx` library and a jwt.Token
|
||||||
|
object is set on the request context. In the case of a signature decoding error
|
||||||
|
the Verifier will also set the error on the request context.
|
||||||
|
|
||||||
|
The Verifier always calls the next http handler in sequence, which can either
|
||||||
|
be the generic `jwtauth.Authenticator` middleware or your own custom handler
|
||||||
|
which checks the request context jwt token and error to prepare a custom
|
||||||
|
http response.
|
||||||
|
|
||||||
|
Note: jwtauth supports custom verification sequences for finding a token
|
||||||
|
from a request by using the `Verify` middleware instantiator directly. The default
|
||||||
|
`Verifier` is instantiated by calling `Verify(ja, TokenFromHeader, TokenFromCookie)`.
|
||||||
|
|
||||||
|
# Usage
|
||||||
|
|
||||||
|
See the full [example](https://github.com/go-chi/jwtauth/blob/master/_example/main.go).
|
||||||
|
|
||||||
|
```go
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/go-chi/chi/v5"
|
||||||
|
"github.com/go-chi/jwtauth/v5"
|
||||||
|
)
|
||||||
|
|
||||||
|
var tokenAuth *jwtauth.JWTAuth
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
tokenAuth = jwtauth.New("HS256", []byte("secret"), nil)
|
||||||
|
|
||||||
|
// For debugging/example purposes, we generate and print
|
||||||
|
// a sample jwt token with claims `user_id:123` here:
|
||||||
|
_, tokenString, _ := tokenAuth.Encode(map[string]interface{}{"user_id": 123})
|
||||||
|
fmt.Printf("DEBUG: a sample jwt is %s\n\n", tokenString)
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
addr := ":3333"
|
||||||
|
fmt.Printf("Starting server on %v\n", addr)
|
||||||
|
http.ListenAndServe(addr, router())
|
||||||
|
}
|
||||||
|
|
||||||
|
func router() http.Handler {
|
||||||
|
r := chi.NewRouter()
|
||||||
|
|
||||||
|
// Protected routes
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
// Seek, verify and validate JWT tokens
|
||||||
|
r.Use(jwtauth.Verifier(tokenAuth))
|
||||||
|
|
||||||
|
// Handle valid / invalid tokens. In this example, we use
|
||||||
|
// the provided authenticator middleware, but you can write your
|
||||||
|
// own very easily, look at the Authenticator method in jwtauth.go
|
||||||
|
// and tweak it, its not scary.
|
||||||
|
r.Use(jwtauth.Authenticator)
|
||||||
|
|
||||||
|
r.Get("/admin", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
_, claims, _ := jwtauth.FromContext(r.Context())
|
||||||
|
w.Write([]byte(fmt.Sprintf("protected area. hi %v", claims["user_id"])))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
// Public routes
|
||||||
|
r.Group(func(r chi.Router) {
|
||||||
|
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Write([]byte("welcome anonymous"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
# LICENSE
|
||||||
|
|
||||||
|
[MIT](/LICENSE)
|
||||||
|
|
||||||
|
[godoc]: https://godoc.org/github.com/go-chi/jwtauth
|
||||||
|
[godoc widget]: https://godoc.org/github.com/go-chi/jwtauth?status.svg
|
288
vendor/github.com/go-chi/jwtauth/v5/jwtauth.go
generated
vendored
Normal file
288
vendor/github.com/go-chi/jwtauth/v5/jwtauth.go
generated
vendored
Normal file
|
@ -0,0 +1,288 @@
|
||||||
|
package jwtauth
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/lestrrat-go/jwx/jwa"
|
||||||
|
"github.com/lestrrat-go/jwx/jwt"
|
||||||
|
)
|
||||||
|
|
||||||
|
type JWTAuth struct {
|
||||||
|
alg jwa.SignatureAlgorithm
|
||||||
|
signKey interface{} // private-key
|
||||||
|
verifyKey interface{} // public-key, only used by RSA and ECDSA algorithms
|
||||||
|
verifier jwt.ParseOption
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
TokenCtxKey = &contextKey{"Token"}
|
||||||
|
ErrorCtxKey = &contextKey{"Error"}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrUnauthorized = errors.New("token is unauthorized")
|
||||||
|
ErrExpired = errors.New("token is expired")
|
||||||
|
ErrNBFInvalid = errors.New("token nbf validation failed")
|
||||||
|
ErrIATInvalid = errors.New("token iat validation failed")
|
||||||
|
ErrNoTokenFound = errors.New("no token found")
|
||||||
|
ErrAlgoInvalid = errors.New("algorithm mismatch")
|
||||||
|
)
|
||||||
|
|
||||||
|
func New(alg string, signKey interface{}, verifyKey interface{}) *JWTAuth {
|
||||||
|
ja := &JWTAuth{alg: jwa.SignatureAlgorithm(alg), signKey: signKey, verifyKey: verifyKey}
|
||||||
|
|
||||||
|
if ja.verifyKey != nil {
|
||||||
|
ja.verifier = jwt.WithVerify(ja.alg, ja.verifyKey)
|
||||||
|
} else {
|
||||||
|
ja.verifier = jwt.WithVerify(ja.alg, ja.signKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
return ja
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verifier http middleware handler will verify a JWT string from a http request.
|
||||||
|
//
|
||||||
|
// Verifier will search for a JWT token in a http request, in the order:
|
||||||
|
// 1. 'jwt' URI query parameter
|
||||||
|
// 2. 'Authorization: BEARER T' request header
|
||||||
|
// 3. Cookie 'jwt' value
|
||||||
|
//
|
||||||
|
// The first JWT string that is found as a query parameter, authorization header
|
||||||
|
// or cookie header is then decoded by the `jwt-go` library and a *jwt.Token
|
||||||
|
// object is set on the request context. In the case of a signature decoding error
|
||||||
|
// the Verifier will also set the error on the request context.
|
||||||
|
//
|
||||||
|
// The Verifier always calls the next http handler in sequence, which can either
|
||||||
|
// be the generic `jwtauth.Authenticator` middleware or your own custom handler
|
||||||
|
// which checks the request context jwt token and error to prepare a custom
|
||||||
|
// http response.
|
||||||
|
func Verifier(ja *JWTAuth) func(http.Handler) http.Handler {
|
||||||
|
return func(next http.Handler) http.Handler {
|
||||||
|
return Verify(ja, TokenFromHeader, TokenFromCookie)(next)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Verify(ja *JWTAuth, findTokenFns ...func(r *http.Request) string) func(http.Handler) http.Handler {
|
||||||
|
return func(next http.Handler) http.Handler {
|
||||||
|
hfn := func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
token, err := VerifyRequest(ja, r, findTokenFns...)
|
||||||
|
ctx = NewContext(ctx, token, err)
|
||||||
|
next.ServeHTTP(w, r.WithContext(ctx))
|
||||||
|
}
|
||||||
|
return http.HandlerFunc(hfn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func VerifyRequest(ja *JWTAuth, r *http.Request, findTokenFns ...func(r *http.Request) string) (jwt.Token, error) {
|
||||||
|
var tokenString string
|
||||||
|
|
||||||
|
// Extract token string from the request by calling token find functions in
|
||||||
|
// the order they where provided. Further extraction stops if a function
|
||||||
|
// returns a non-empty string.
|
||||||
|
for _, fn := range findTokenFns {
|
||||||
|
tokenString = fn(r)
|
||||||
|
if tokenString != "" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if tokenString == "" {
|
||||||
|
return nil, ErrNoTokenFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return VerifyToken(ja, tokenString)
|
||||||
|
}
|
||||||
|
|
||||||
|
func VerifyToken(ja *JWTAuth, tokenString string) (jwt.Token, error) {
|
||||||
|
// Decode & verify the token
|
||||||
|
token, err := ja.Decode(tokenString)
|
||||||
|
if err != nil {
|
||||||
|
return token, ErrorReason(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if token == nil {
|
||||||
|
return nil, ErrUnauthorized
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := jwt.Validate(token); err != nil {
|
||||||
|
return token, ErrorReason(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valid!
|
||||||
|
return token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ja *JWTAuth) Encode(claims map[string]interface{}) (t jwt.Token, tokenString string, err error) {
|
||||||
|
t = jwt.New()
|
||||||
|
for k, v := range claims {
|
||||||
|
t.Set(k, v)
|
||||||
|
}
|
||||||
|
payload, err := ja.sign(t)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
tokenString = string(payload)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ja *JWTAuth) Decode(tokenString string) (jwt.Token, error) {
|
||||||
|
return ja.parse([]byte(tokenString))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ja *JWTAuth) sign(token jwt.Token) ([]byte, error) {
|
||||||
|
return jwt.Sign(token, ja.alg, ja.signKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ja *JWTAuth) parse(payload []byte) (jwt.Token, error) {
|
||||||
|
return jwt.Parse(payload, ja.verifier)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrorReason will normalize the error message from the underlining
|
||||||
|
// jwt library
|
||||||
|
func ErrorReason(err error) error {
|
||||||
|
switch err.Error() {
|
||||||
|
case "exp not satisfied", ErrExpired.Error():
|
||||||
|
return ErrExpired
|
||||||
|
case "iat not satisfied", ErrIATInvalid.Error():
|
||||||
|
return ErrIATInvalid
|
||||||
|
case "nbf not satisfied", ErrNBFInvalid.Error():
|
||||||
|
return ErrNBFInvalid
|
||||||
|
default:
|
||||||
|
return ErrUnauthorized
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Authenticator is a default authentication middleware to enforce access from the
|
||||||
|
// Verifier middleware request context values. The Authenticator sends a 401 Unauthorized
|
||||||
|
// response for any unverified tokens and passes the good ones through. It's just fine
|
||||||
|
// until you decide to write something similar and customize your client response.
|
||||||
|
func Authenticator(next http.Handler) http.Handler {
|
||||||
|
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
token, _, err := FromContext(r.Context())
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if token == nil || jwt.Validate(token) != nil {
|
||||||
|
http.Error(w, http.StatusText(401), 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Token is authenticated, pass it through
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewContext(ctx context.Context, t jwt.Token, err error) context.Context {
|
||||||
|
ctx = context.WithValue(ctx, TokenCtxKey, t)
|
||||||
|
ctx = context.WithValue(ctx, ErrorCtxKey, err)
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func FromContext(ctx context.Context) (jwt.Token, map[string]interface{}, error) {
|
||||||
|
token, _ := ctx.Value(TokenCtxKey).(jwt.Token)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
var claims map[string]interface{}
|
||||||
|
|
||||||
|
if token != nil {
|
||||||
|
claims, err = token.AsMap(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
return token, nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
claims = map[string]interface{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
err, _ = ctx.Value(ErrorCtxKey).(error)
|
||||||
|
|
||||||
|
return token, claims, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnixTime returns the given time in UTC milliseconds
|
||||||
|
func UnixTime(tm time.Time) int64 {
|
||||||
|
return tm.UTC().Unix()
|
||||||
|
}
|
||||||
|
|
||||||
|
// EpochNow is a helper function that returns the NumericDate time value used by the spec
|
||||||
|
func EpochNow() int64 {
|
||||||
|
return time.Now().UTC().Unix()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExpireIn is a helper function to return calculated time in the future for "exp" claim
|
||||||
|
func ExpireIn(tm time.Duration) int64 {
|
||||||
|
return EpochNow() + int64(tm.Seconds())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set issued at ("iat") to specified time in the claims
|
||||||
|
func SetIssuedAt(claims map[string]interface{}, tm time.Time) {
|
||||||
|
claims["iat"] = tm.UTC().Unix()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set issued at ("iat") to present time in the claims
|
||||||
|
func SetIssuedNow(claims map[string]interface{}) {
|
||||||
|
claims["iat"] = EpochNow()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set expiry ("exp") in the claims
|
||||||
|
func SetExpiry(claims map[string]interface{}, tm time.Time) {
|
||||||
|
claims["exp"] = tm.UTC().Unix()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set expiry ("exp") in the claims to some duration from the present time
|
||||||
|
func SetExpiryIn(claims map[string]interface{}, tm time.Duration) {
|
||||||
|
claims["exp"] = ExpireIn(tm)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenFromCookie tries to retreive the token string from a cookie named
|
||||||
|
// "jwt".
|
||||||
|
func TokenFromCookie(r *http.Request) string {
|
||||||
|
cookie, err := r.Cookie("jwt")
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return cookie.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenFromHeader tries to retreive the token string from the
|
||||||
|
// "Authorization" reqeust header: "Authorization: BEARER T".
|
||||||
|
func TokenFromHeader(r *http.Request) string {
|
||||||
|
// Get token from authorization header.
|
||||||
|
bearer := r.Header.Get("Authorization")
|
||||||
|
if len(bearer) > 7 && strings.ToUpper(bearer[0:6]) == "BEARER" {
|
||||||
|
return bearer[7:]
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenFromQuery tries to retreive the token string from the "jwt" URI
|
||||||
|
// query parameter.
|
||||||
|
//
|
||||||
|
// To use it, build our own middleware handler, such as:
|
||||||
|
//
|
||||||
|
// func Verifier(ja *JWTAuth) func(http.Handler) http.Handler {
|
||||||
|
// return func(next http.Handler) http.Handler {
|
||||||
|
// return Verify(ja, TokenFromQuery, TokenFromHeader, TokenFromCookie)(next)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
func TokenFromQuery(r *http.Request) string {
|
||||||
|
// Get token from query param named "jwt".
|
||||||
|
return r.URL.Query().Get("jwt")
|
||||||
|
}
|
||||||
|
|
||||||
|
// contextKey is a value for use with context.WithValue. It's used as
|
||||||
|
// a pointer so it fits in an interface{} without allocation. This technique
|
||||||
|
// for defining context keys was copied from Go 1.7's new use of context in net/http.
|
||||||
|
type contextKey struct {
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (k *contextKey) String() string {
|
||||||
|
return "jwtauth context value " + k.name
|
||||||
|
}
|
32
vendor/github.com/goccy/go-json/.codecov.yml
generated
vendored
Normal file
32
vendor/github.com/goccy/go-json/.codecov.yml
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
codecov:
|
||||||
|
require_ci_to_pass: yes
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
precision: 2
|
||||||
|
round: down
|
||||||
|
range: "70...100"
|
||||||
|
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: 70%
|
||||||
|
threshold: 2%
|
||||||
|
patch: off
|
||||||
|
changes: no
|
||||||
|
|
||||||
|
parsers:
|
||||||
|
gcov:
|
||||||
|
branch_detection:
|
||||||
|
conditional: yes
|
||||||
|
loop: yes
|
||||||
|
method: no
|
||||||
|
macro: no
|
||||||
|
|
||||||
|
comment:
|
||||||
|
layout: "header,diff"
|
||||||
|
behavior: default
|
||||||
|
require_changes: no
|
||||||
|
|
||||||
|
ignore:
|
||||||
|
- internal/encoder/vm_color
|
||||||
|
- internal/encoder/vm_color_indent
|
2
vendor/github.com/goccy/go-json/.gitignore
generated
vendored
Normal file
2
vendor/github.com/goccy/go-json/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
cover.html
|
||||||
|
cover.out
|
75
vendor/github.com/goccy/go-json/.golangci.yml
generated
vendored
Normal file
75
vendor/github.com/goccy/go-json/.golangci.yml
generated
vendored
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
run:
|
||||||
|
skip-files:
|
||||||
|
- encode_optype.go
|
||||||
|
- ".*_test\\.go$"
|
||||||
|
|
||||||
|
linters-settings:
|
||||||
|
govet:
|
||||||
|
enable-all: true
|
||||||
|
disable:
|
||||||
|
- shadow
|
||||||
|
|
||||||
|
linters:
|
||||||
|
enable-all: true
|
||||||
|
disable:
|
||||||
|
- dogsled
|
||||||
|
- dupl
|
||||||
|
- exhaustive
|
||||||
|
- exhaustivestruct
|
||||||
|
- errorlint
|
||||||
|
- forbidigo
|
||||||
|
- funlen
|
||||||
|
- gci
|
||||||
|
- gochecknoglobals
|
||||||
|
- gochecknoinits
|
||||||
|
- gocognit
|
||||||
|
- gocritic
|
||||||
|
- gocyclo
|
||||||
|
- godot
|
||||||
|
- godox
|
||||||
|
- goerr113
|
||||||
|
- gofumpt
|
||||||
|
- gomnd
|
||||||
|
- gosec
|
||||||
|
- ifshort
|
||||||
|
- lll
|
||||||
|
- makezero
|
||||||
|
- nakedret
|
||||||
|
- nestif
|
||||||
|
- nlreturn
|
||||||
|
- paralleltest
|
||||||
|
- testpackage
|
||||||
|
- thelper
|
||||||
|
- wrapcheck
|
||||||
|
- interfacer
|
||||||
|
- lll
|
||||||
|
- nakedret
|
||||||
|
- nestif
|
||||||
|
- nlreturn
|
||||||
|
- testpackage
|
||||||
|
- wsl
|
||||||
|
|
||||||
|
issues:
|
||||||
|
exclude-rules:
|
||||||
|
# not needed
|
||||||
|
- path: /*.go
|
||||||
|
text: "ST1003: should not use underscores in package names"
|
||||||
|
linters:
|
||||||
|
- stylecheck
|
||||||
|
- path: /*.go
|
||||||
|
text: "don't use an underscore in package name"
|
||||||
|
linters:
|
||||||
|
- golint
|
||||||
|
- path: rtype.go
|
||||||
|
linters:
|
||||||
|
- golint
|
||||||
|
- stylecheck
|
||||||
|
- path: error.go
|
||||||
|
linters:
|
||||||
|
- staticcheck
|
||||||
|
|
||||||
|
# Maximum issues count per one linter. Set to 0 to disable. Default is 50.
|
||||||
|
max-issues-per-linter: 0
|
||||||
|
|
||||||
|
# Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
|
||||||
|
max-same-issues: 0
|
247
vendor/github.com/goccy/go-json/CHANGELOG.md
generated
vendored
Normal file
247
vendor/github.com/goccy/go-json/CHANGELOG.md
generated
vendored
Normal file
|
@ -0,0 +1,247 @@
|
||||||
|
# v0.7.6 - 2021/08/13
|
||||||
|
|
||||||
|
* Fix nil slice assignment ( #276 )
|
||||||
|
* Improve error message ( #277 )
|
||||||
|
|
||||||
|
# v0.7.5 - 2021/08/12
|
||||||
|
|
||||||
|
* Fix encoding of embedded struct with tags ( #265 )
|
||||||
|
* Fix encoding of embedded struct that isn't first field ( #272 )
|
||||||
|
* Fix decoding of binary type with escaped char ( #273 )
|
||||||
|
|
||||||
|
# v0.7.4 - 2021/07/06
|
||||||
|
|
||||||
|
* Fix encoding of indirect layout structure ( #264 )
|
||||||
|
|
||||||
|
# v0.7.3 - 2021/06/29
|
||||||
|
|
||||||
|
* Fix encoding of pointer type in empty interface ( #262 )
|
||||||
|
|
||||||
|
# v0.7.2 - 2021/06/26
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Add decoder for func type to fix decoding of nil function value ( #257 )
|
||||||
|
* Fix stream decoding of []byte type ( #258 )
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
* Improve decoding performance of map[string]interface{} type ( use `mapassign_faststr` ) ( #256 )
|
||||||
|
* Improve encoding performance of empty interface type ( remove recursive calling of `vm.Run` ) ( #259 )
|
||||||
|
|
||||||
|
### Benchmark
|
||||||
|
|
||||||
|
* Add bytedance/sonic as benchmark target ( #254 )
|
||||||
|
|
||||||
|
# v0.7.1 - 2021/06/18
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Fix error when unmarshal empty array ( #253 )
|
||||||
|
|
||||||
|
# v0.7.0 - 2021/06/12
|
||||||
|
|
||||||
|
### Support context for MarshalJSON and UnmarshalJSON ( #248 )
|
||||||
|
|
||||||
|
* json.MarshalContext(context.Context, interface{}, ...json.EncodeOption) ([]byte, error)
|
||||||
|
* json.NewEncoder(io.Writer).EncodeContext(context.Context, interface{}, ...json.EncodeOption) error
|
||||||
|
* json.UnmarshalContext(context.Context, []byte, interface{}, ...json.DecodeOption) error
|
||||||
|
* json.NewDecoder(io.Reader).DecodeContext(context.Context, interface{}) error
|
||||||
|
|
||||||
|
```go
|
||||||
|
type MarshalerContext interface {
|
||||||
|
MarshalJSON(context.Context) ([]byte, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type UnmarshalerContext interface {
|
||||||
|
UnmarshalJSON(context.Context, []byte) error
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Add DecodeFieldPriorityFirstWin option ( #242 )
|
||||||
|
|
||||||
|
In the default behavior, go-json, like encoding/json, will reflect the result of the last evaluation when a field with the same name exists. I've added new options to allow you to change this behavior. `json.DecodeFieldPriorityFirstWin` option reflects the result of the first evaluation if a field with the same name exists. This behavior has a performance advantage as it allows the subsequent strings to be skipped if all fields have been evaluated.
|
||||||
|
|
||||||
|
### Fix encoder
|
||||||
|
|
||||||
|
* Fix indent number contains recursive type ( #249 )
|
||||||
|
* Fix encoding of using empty interface as map key ( #244 )
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Fix decoding fields containing escaped characters ( #237 )
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
* Move some tests to subdirectory ( #243 )
|
||||||
|
* Refactor package layout for decoder ( #238 )
|
||||||
|
|
||||||
|
# v0.6.1 - 2021/06/02
|
||||||
|
|
||||||
|
### Fix encoder
|
||||||
|
|
||||||
|
* Fix value of totalLength for encoding ( #236 )
|
||||||
|
|
||||||
|
# v0.6.0 - 2021/06/01
|
||||||
|
|
||||||
|
### Support Colorize option for encoding (#233)
|
||||||
|
|
||||||
|
```go
|
||||||
|
b, err := json.MarshalWithOption(v, json.Colorize(json.DefaultColorScheme))
|
||||||
|
if err != nil {
|
||||||
|
...
|
||||||
|
}
|
||||||
|
fmt.Println(string(b)) // print colored json
|
||||||
|
```
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
* Fix opcode layout - Adjust memory layout of the opcode to 128 bytes in a 64-bit environment ( #230 )
|
||||||
|
* Refactor encode option ( #231 )
|
||||||
|
* Refactor escape string ( #232 )
|
||||||
|
|
||||||
|
# v0.5.1 - 2021/5/20
|
||||||
|
|
||||||
|
### Optimization
|
||||||
|
|
||||||
|
* Add type addrShift to enable bigger encoder/decoder cache ( #213 )
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Keep original reference of slice element ( #229 )
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
* Refactor Debug mode for encoding ( #226 )
|
||||||
|
* Generate VM sources for encoding ( #227 )
|
||||||
|
* Refactor validator for null/true/false for decoding ( #221 )
|
||||||
|
|
||||||
|
# v0.5.0 - 2021/5/9
|
||||||
|
|
||||||
|
### Supports using omitempty and string tags at the same time ( #216 )
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Fix stream decoder for unicode char ( #215 )
|
||||||
|
* Fix decoding of slice element ( #219 )
|
||||||
|
* Fix calculating of buffer length for stream decoder ( #220 )
|
||||||
|
|
||||||
|
### Refactor
|
||||||
|
|
||||||
|
* replace skipWhiteSpace goto by loop ( #212 )
|
||||||
|
|
||||||
|
# v0.4.14 - 2021/5/4
|
||||||
|
|
||||||
|
### Benchmark
|
||||||
|
|
||||||
|
* Add valyala/fastjson to benchmark ( #193 )
|
||||||
|
* Add benchmark task for CI ( #211 )
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Fix decoding of slice with unmarshal json type ( #198 )
|
||||||
|
* Fix decoding of null value for interface type that does not implement Unmarshaler ( #205 )
|
||||||
|
* Fix decoding of null value to []byte by json.Unmarshal ( #206 )
|
||||||
|
* Fix decoding of backslash char at the end of string ( #207 )
|
||||||
|
* Fix stream decoder for null/true/false value ( #208 )
|
||||||
|
* Fix stream decoder for slow reader ( #211 )
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
|
||||||
|
* If cap of slice is enough, reuse slice data for compatibility with encoding/json ( #200 )
|
||||||
|
|
||||||
|
# v0.4.13 - 2021/4/20
|
||||||
|
|
||||||
|
### Fix json.Compact and json.Indent
|
||||||
|
|
||||||
|
* Support validation the input buffer for json.Compact and json.Indent ( #189 )
|
||||||
|
* Optimize json.Compact and json.Indent ( improve memory footprint ) ( #190 )
|
||||||
|
|
||||||
|
# v0.4.12 - 2021/4/15
|
||||||
|
|
||||||
|
### Fix encoder
|
||||||
|
|
||||||
|
* Fix unnecessary indent for empty slice type ( #181 )
|
||||||
|
* Fix encoding of omitempty feature for the slice or interface type ( #183 )
|
||||||
|
* Fix encoding custom types zero values with omitempty when marshaller exists ( #187 )
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Fix decoder for invalid top level value ( #184 )
|
||||||
|
* Fix decoder for invalid number value ( #185 )
|
||||||
|
|
||||||
|
# v0.4.11 - 2021/4/3
|
||||||
|
|
||||||
|
* Improve decoder performance for interface type
|
||||||
|
|
||||||
|
# v0.4.10 - 2021/4/2
|
||||||
|
|
||||||
|
### Fix encoder
|
||||||
|
|
||||||
|
* Fixed a bug when encoding slice and map containing recursive structures
|
||||||
|
* Fixed a logic to determine if indirect reference
|
||||||
|
|
||||||
|
# v0.4.9 - 2021/3/29
|
||||||
|
|
||||||
|
### Add debug mode
|
||||||
|
|
||||||
|
If you use `json.MarshalWithOption(v, json.Debug())` and `panic` occurred in `go-json`, produces debug information to console.
|
||||||
|
|
||||||
|
### Support a new feature to compatible with encoding/json
|
||||||
|
|
||||||
|
- invalid UTF-8 is coerced to valid UTF-8 ( without performance down )
|
||||||
|
|
||||||
|
### Fix encoder
|
||||||
|
|
||||||
|
- Fixed handling of MarshalJSON of function type
|
||||||
|
|
||||||
|
### Fix decoding of slice of pointer type
|
||||||
|
|
||||||
|
If there is a pointer value, go-json will use it. (This behavior is necessary to achieve the ability to prioritize pre-filled values). However, since slices are reused internally, there was a bug that referred to the previous pointer value. Therefore, it is not necessary to refer to the pointer value in advance for the slice element, so we explicitly initialize slice element by `nil`.
|
||||||
|
|
||||||
|
# v0.4.8 - 2021/3/21
|
||||||
|
|
||||||
|
### Reduce memory usage at compile time
|
||||||
|
|
||||||
|
* go-json have used about 2GB of memory at compile time, but now it can compile with about less than 550MB.
|
||||||
|
|
||||||
|
### Fix any encoder's bug
|
||||||
|
|
||||||
|
* Add many test cases for encoder
|
||||||
|
* Fix composite type ( slice/array/map )
|
||||||
|
* Fix pointer types
|
||||||
|
* Fix encoding of MarshalJSON or MarshalText or json.Number type
|
||||||
|
|
||||||
|
### Refactor encoder
|
||||||
|
|
||||||
|
* Change package layout for reducing memory usage at compile
|
||||||
|
* Remove anonymous and only operation
|
||||||
|
* Remove root property from encodeCompileContext and opcode
|
||||||
|
|
||||||
|
### Fix CI
|
||||||
|
|
||||||
|
* Add Go 1.16
|
||||||
|
* Remove Go 1.13
|
||||||
|
* Fix `make cover` task
|
||||||
|
|
||||||
|
### Number/Delim/Token/RawMessage use the types defined in encoding/json by type alias
|
||||||
|
|
||||||
|
# v0.4.7 - 2021/02/22
|
||||||
|
|
||||||
|
### Fix decoder
|
||||||
|
|
||||||
|
* Fix decoding of deep recursive structure
|
||||||
|
* Fix decoding of embedded unexported pointer field
|
||||||
|
* Fix invalid test case
|
||||||
|
* Fix decoding of invalid value
|
||||||
|
* Fix decoding of prefilled value
|
||||||
|
* Fix not being able to return UnmarshalTypeError when it should be returned
|
||||||
|
* Fix decoding of null value
|
||||||
|
* Fix decoding of type of null string
|
||||||
|
* Use pre allocated pointer if exists it at decoding
|
||||||
|
|
||||||
|
### Reduce memory usage at compile
|
||||||
|
|
||||||
|
* Integrate int/int8/int16/int32/int64 and uint/uint8/uint16/uint32/uint64 operation to reduce memory usage at compile
|
||||||
|
|
||||||
|
### Remove unnecessary optype
|
21
vendor/github.com/goccy/go-json/LICENSE
generated
vendored
Normal file
21
vendor/github.com/goccy/go-json/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2020 Masaaki Goshima
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
39
vendor/github.com/goccy/go-json/Makefile
generated
vendored
Normal file
39
vendor/github.com/goccy/go-json/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
PKG := github.com/goccy/go-json
|
||||||
|
|
||||||
|
BIN_DIR := $(CURDIR)/bin
|
||||||
|
PKGS := $(shell go list ./... | grep -v internal/cmd|grep -v test)
|
||||||
|
COVER_PKGS := $(foreach pkg,$(PKGS),$(subst $(PKG),.,$(pkg)))
|
||||||
|
|
||||||
|
COMMA := ,
|
||||||
|
EMPTY :=
|
||||||
|
SPACE := $(EMPTY) $(EMPTY)
|
||||||
|
COVERPKG_OPT := $(subst $(SPACE),$(COMMA),$(COVER_PKGS))
|
||||||
|
|
||||||
|
$(BIN_DIR):
|
||||||
|
@mkdir -p $(BIN_DIR)
|
||||||
|
|
||||||
|
.PHONY: cover
|
||||||
|
cover:
|
||||||
|
go test -coverpkg=$(COVERPKG_OPT) -coverprofile=cover.out ./...
|
||||||
|
|
||||||
|
.PHONY: cover-html
|
||||||
|
cover-html: cover
|
||||||
|
go tool cover -html=cover.out
|
||||||
|
|
||||||
|
.PHONY: lint
|
||||||
|
lint: golangci-lint
|
||||||
|
golangci-lint run
|
||||||
|
|
||||||
|
golangci-lint: | $(BIN_DIR)
|
||||||
|
@{ \
|
||||||
|
set -e; \
|
||||||
|
GOLANGCI_LINT_TMP_DIR=$$(mktemp -d); \
|
||||||
|
cd $$GOLANGCI_LINT_TMP_DIR; \
|
||||||
|
go mod init tmp; \
|
||||||
|
GOBIN=$(BIN_DIR) go get github.com/golangci/golangci-lint/cmd/golangci-lint@v1.36.0; \
|
||||||
|
rm -rf $$GOLANGCI_LINT_TMP_DIR; \
|
||||||
|
}
|
||||||
|
|
||||||
|
.PHONY: generate
|
||||||
|
generate:
|
||||||
|
go generate ./internal/...
|
529
vendor/github.com/goccy/go-json/README.md
generated
vendored
Normal file
529
vendor/github.com/goccy/go-json/README.md
generated
vendored
Normal file
|
@ -0,0 +1,529 @@
|
||||||
|
# go-json
|
||||||
|
|
||||||
|
![Go](https://github.com/goccy/go-json/workflows/Go/badge.svg)
|
||||||
|
[![GoDoc](https://godoc.org/github.com/goccy/go-json?status.svg)](https://pkg.go.dev/github.com/goccy/go-json?tab=doc)
|
||||||
|
[![codecov](https://codecov.io/gh/goccy/go-json/branch/master/graph/badge.svg)](https://codecov.io/gh/goccy/go-json)
|
||||||
|
|
||||||
|
Fast JSON encoder/decoder compatible with encoding/json for Go
|
||||||
|
|
||||||
|
<img width="400px" src="https://user-images.githubusercontent.com/209884/92572337-42b42900-f2bf-11ea-973a-c74a359553a5.png"></img>
|
||||||
|
|
||||||
|
# Roadmap
|
||||||
|
|
||||||
|
```
|
||||||
|
* version ( expected release date )
|
||||||
|
|
||||||
|
* v0.7.0
|
||||||
|
|
|
||||||
|
| while maintaining compatibility with encoding/json, we will add convenient APIs
|
||||||
|
|
|
||||||
|
v
|
||||||
|
* v1.0.0
|
||||||
|
```
|
||||||
|
|
||||||
|
We are accepting requests for features that will be implemented between v0.7.0 and v.1.0.0.
|
||||||
|
If you have the API you need, please submit your issue [here](https://github.com/goccy/go-json/issues).
|
||||||
|
For example, I'm thinking of supporting `context.Context` of `json.Marshaler` and decoding using JSON Path.
|
||||||
|
|
||||||
|
# Features
|
||||||
|
|
||||||
|
- Drop-in replacement of `encoding/json`
|
||||||
|
- Fast ( See [Benchmark section](https://github.com/goccy/go-json#benchmarks) )
|
||||||
|
- Flexible customization with options
|
||||||
|
- Coloring the encoded string
|
||||||
|
- Can propagate context.Context to `MarshalJSON` or `UnmarshalJSON`
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
```
|
||||||
|
go get github.com/goccy/go-json
|
||||||
|
```
|
||||||
|
|
||||||
|
# How to use
|
||||||
|
|
||||||
|
Replace import statement from `encoding/json` to `github.com/goccy/go-json`
|
||||||
|
|
||||||
|
```
|
||||||
|
-import "encoding/json"
|
||||||
|
+import "github.com/goccy/go-json"
|
||||||
|
```
|
||||||
|
|
||||||
|
# JSON library comparison
|
||||||
|
|
||||||
|
| name | encoder | decoder | compatible with `encoding/json` |
|
||||||
|
| :----: | :------: | :-----: | :-----------------------------: |
|
||||||
|
| encoding/json | yes | yes | N/A |
|
||||||
|
| [json-iterator/go](https://github.com/json-iterator/go) | yes | yes | partial |
|
||||||
|
| [easyjson](https://github.com/mailru/easyjson) | yes | yes | no |
|
||||||
|
| [gojay](https://github.com/francoispqt/gojay) | yes | yes | no |
|
||||||
|
| [segmentio/encoding/json](https://github.com/segmentio/encoding/tree/master/json) | yes | yes | partial |
|
||||||
|
| [jettison](https://github.com/wI2L/jettison) | yes | no | no |
|
||||||
|
| [simdjson-go](https://github.com/minio/simdjson-go) | no | yes | no |
|
||||||
|
| goccy/go-json | yes | yes | yes |
|
||||||
|
|
||||||
|
- `json-iterator/go` isn't compatible with `encoding/json` in many ways (e.g. https://github.com/json-iterator/go/issues/229 ), but it hasn't been supported for a long time.
|
||||||
|
- `segmentio/encoding/json` is well supported for encoders, but some are not supported for decoder APIs such as `Token` ( streaming decode )
|
||||||
|
|
||||||
|
## Other libraries
|
||||||
|
|
||||||
|
- [jingo](https://github.com/bet365/jingo)
|
||||||
|
|
||||||
|
I tried the benchmark but it didn't work.
|
||||||
|
Also, it seems to panic when it receives an unexpected value because there is no error handling...
|
||||||
|
|
||||||
|
- [ffjson](https://github.com/pquerna/ffjson)
|
||||||
|
|
||||||
|
Benchmarking gave very slow results.
|
||||||
|
It seems that it is assumed that the user will use the buffer pool properly.
|
||||||
|
Also, development seems to have already stopped
|
||||||
|
|
||||||
|
# Benchmarks
|
||||||
|
|
||||||
|
```
|
||||||
|
$ cd benchmarks
|
||||||
|
$ go test -bench .
|
||||||
|
```
|
||||||
|
|
||||||
|
## Encode
|
||||||
|
|
||||||
|
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126758-0845cb00-68f5-11eb-8db7-086fcf9bcfaa.png"></img>
|
||||||
|
<img width="700px" src="https://user-images.githubusercontent.com/209884/107126757-07ad3480-68f5-11eb-87aa-858cc5eacfcb.png"></img>
|
||||||
|
|
||||||
|
## Decode
|
||||||
|
|
||||||
|
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979944-bd1d6d80-7002-11eb-944b-9d17b6674e3f.png">
|
||||||
|
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979931-b989e680-7002-11eb-87a0-66fc22d90dd4.png">
|
||||||
|
<img width="700" alt="" src="https://user-images.githubusercontent.com/209884/107979940-bc84d700-7002-11eb-9647-869bbc25c9d9.png">
|
||||||
|
|
||||||
|
|
||||||
|
# Fuzzing
|
||||||
|
|
||||||
|
[go-json-fuzz](https://github.com/goccy/go-json-fuzz) is the repository for fuzzing tests.
|
||||||
|
If you run the test in this repository and find a bug, please commit to corpus to go-json-fuzz and report the issue to [go-json](https://github.com/goccy/go-json/issues).
|
||||||
|
|
||||||
|
# How it works
|
||||||
|
|
||||||
|
`go-json` is very fast in both encoding and decoding compared to other libraries.
|
||||||
|
It's easier to implement by using automatic code generation for performance or by using a dedicated interface, but `go-json` dares to stick to compatibility with `encoding/json` and is the simple interface. Despite this, we are developing with the aim of being the fastest library.
|
||||||
|
|
||||||
|
Here, we explain the various speed-up techniques implemented by `go-json`.
|
||||||
|
|
||||||
|
## Basic technique
|
||||||
|
|
||||||
|
The techniques listed here are the ones used by most of the libraries listed above.
|
||||||
|
|
||||||
|
### Buffer reuse
|
||||||
|
|
||||||
|
Since the only value required for the result of `json.Marshal(interface{}) ([]byte, error)` is `[]byte`, the only value that must be allocated during encoding is the return value `[]byte` .
|
||||||
|
|
||||||
|
Also, as the number of allocations increases, the performance will be affected, so the number of allocations should be kept as low as possible when creating `[]byte`.
|
||||||
|
|
||||||
|
Therefore, there is a technique to reduce the number of times a new buffer must be allocated by reusing the buffer used for the previous encoding by using `sync.Pool`.
|
||||||
|
|
||||||
|
Finally, you allocate a buffer that is as long as the resulting buffer and copy the contents into it, you only need to allocate the buffer once in theory.
|
||||||
|
|
||||||
|
```go
|
||||||
|
type buffer struct {
|
||||||
|
data []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
var bufPool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return &buffer{data: make([]byte, 0, 1024)}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := bufPool.Get().(*buffer)
|
||||||
|
data := encode(buf.data) // reuse buf.data
|
||||||
|
|
||||||
|
newBuf := make([]byte, len(data))
|
||||||
|
copy(newBuf, buf)
|
||||||
|
|
||||||
|
buf.data = data
|
||||||
|
bufPool.Put(buf)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Elimination of reflection
|
||||||
|
|
||||||
|
As you know, the reflection operation is very slow.
|
||||||
|
|
||||||
|
Therefore, using the fact that the address position where the type information is stored is fixed for each binary ( we call this `typeptr` ),
|
||||||
|
we can use the address in the type information to call a pre-built optimized process.
|
||||||
|
|
||||||
|
For example, you can get the address to the type information from `interface{}` as follows and you can use that information to call a process that does not have reflection.
|
||||||
|
|
||||||
|
To process without reflection, pass a pointer (`unsafe.Pointer`) to the value is stored.
|
||||||
|
|
||||||
|
```go
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ unsafe.Pointer
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
var typeToEncoder = map[uintptr]func(unsafe.Pointer)([]byte, error){}
|
||||||
|
|
||||||
|
func Marshal(v interface{}) ([]byte, error) {
|
||||||
|
iface := (*emptyInterface)(unsafe.Pointer(&v)
|
||||||
|
typeptr := uintptr(iface.typ)
|
||||||
|
if enc, exists := typeToEncoder[typeptr]; exists {
|
||||||
|
return enc(iface.ptr)
|
||||||
|
}
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
※ In reality, `typeToEncoder` can be referenced by multiple goroutines, so exclusive control is required.
|
||||||
|
|
||||||
|
## Unique speed-up technique
|
||||||
|
|
||||||
|
## Encoder
|
||||||
|
|
||||||
|
### Do not escape arguments of `Marshal`
|
||||||
|
|
||||||
|
`json.Marshal` and `json.Unmarshal` receive `interface{}` value and they perform type determination dynamically to process.
|
||||||
|
In normal case, you need to use the `reflect` library to determine the type dynamically, but since `reflect.Type` is defined as `interface`, when you call the method of `reflect.Type`, The reflect's argument is escaped.
|
||||||
|
|
||||||
|
Therefore, the arguments for `Marshal` and `Unmarshal` are always escape to the heap.
|
||||||
|
However, `go-json` can use the feature of `reflect.Type` while avoiding escaping.
|
||||||
|
|
||||||
|
`reflect.Type` is defined as `interface`, but in reality `reflect.Type` is implemented only by the structure `rtype` defined in the `reflect` package.
|
||||||
|
For this reason, to date `reflect.Type` is the same as `*reflect.rtype`.
|
||||||
|
|
||||||
|
Therefore, by directly handling `*reflect.rtype`, which is an implementation of `reflect.Type`, it is possible to avoid escaping because it changes from `interface` to using `struct`.
|
||||||
|
|
||||||
|
The technique for working with `*reflect.rtype` directly from `go-json` is implemented at [rtype.go](https://github.com/goccy/go-json/blob/master/internal/runtime/rtype.go)
|
||||||
|
|
||||||
|
Also, the same technique is cut out as a library ( https://github.com/goccy/go-reflect )
|
||||||
|
|
||||||
|
Initially this feature was the default behavior of `go-json`.
|
||||||
|
But after careful testing, I found that I passed a large value to `json.Marshal()` and if the argument could not be assigned to the stack, it could not be properly escaped to the heap (a bug in the Go compiler).
|
||||||
|
|
||||||
|
Therefore, this feature will be provided as an **optional** until this issue is resolved.
|
||||||
|
|
||||||
|
To use it, add `NoEscape` like `MarshalNoEscape()`
|
||||||
|
|
||||||
|
### Encoding using opcode sequence
|
||||||
|
|
||||||
|
I explained that you can use `typeptr` to call a pre-built process from type information.
|
||||||
|
|
||||||
|
In other libraries, this dedicated process is processed by making it an function calling like anonymous function, but function calls are inherently slow processes and should be avoided as much as possible.
|
||||||
|
|
||||||
|
Therefore, `go-json` adopted the Instruction-based execution processing system, which is also used to implement virtual machines for programming language.
|
||||||
|
|
||||||
|
If it is the first type to encode, create the opcode ( instruction ) sequence required for encoding.
|
||||||
|
From the second time onward, use `typeptr` to get the cached pre-built opcode sequence and encode it based on it. An example of the opcode sequence is shown below.
|
||||||
|
|
||||||
|
```go
|
||||||
|
json.Marshal(struct{
|
||||||
|
X int `json:"x"`
|
||||||
|
Y string `json:"y"`
|
||||||
|
}{X: 1, Y: "hello"})
|
||||||
|
```
|
||||||
|
|
||||||
|
When encoding a structure like the one above, create a sequence of opcodes like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
- opStructFieldHead ( `{` )
|
||||||
|
- opStructFieldInt ( `"x": 1,` )
|
||||||
|
- opStructFieldString ( `"y": "hello"` )
|
||||||
|
- opStructEnd ( `}` )
|
||||||
|
- opEnd
|
||||||
|
```
|
||||||
|
|
||||||
|
※ When processing each operation, write the letters on the right.
|
||||||
|
|
||||||
|
In addition, each opcode is managed by the following structure (
|
||||||
|
Pseudo code ).
|
||||||
|
|
||||||
|
```go
|
||||||
|
type opType int
|
||||||
|
const (
|
||||||
|
opStructFieldHead opType = iota
|
||||||
|
opStructFieldInt
|
||||||
|
opStructFieldStirng
|
||||||
|
opStructEnd
|
||||||
|
opEnd
|
||||||
|
)
|
||||||
|
type opcode struct {
|
||||||
|
op opType
|
||||||
|
key []byte
|
||||||
|
next *opcode
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The process of encoding using the opcode sequence is roughly implemented as follows.
|
||||||
|
|
||||||
|
```go
|
||||||
|
func encode(code *opcode, b []byte, p unsafe.Pointer) ([]byte, error) {
|
||||||
|
for {
|
||||||
|
switch code.op {
|
||||||
|
case opStructFieldHead:
|
||||||
|
b = append(b, '{')
|
||||||
|
code = code.next
|
||||||
|
case opStructFieldInt:
|
||||||
|
b = append(b, code.key...)
|
||||||
|
b = appendInt((*int)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||||
|
code = code.next
|
||||||
|
case opStructFieldString:
|
||||||
|
b = append(b, code.key...)
|
||||||
|
b = appendString((*string)(unsafe.Pointer(uintptr(p)+code.offset)))
|
||||||
|
code = code.next
|
||||||
|
case opStructEnd:
|
||||||
|
b = append(b, '}')
|
||||||
|
code = code.next
|
||||||
|
case opEnd:
|
||||||
|
goto END
|
||||||
|
}
|
||||||
|
}
|
||||||
|
END:
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
In this way, the huge `switch-case` is used to encode by manipulating the linked list opcodes to avoid unnecessary function calls.
|
||||||
|
|
||||||
|
### Opcode sequence optimization
|
||||||
|
|
||||||
|
One of the advantages of encoding using the opcode sequence is the ease of optimization.
|
||||||
|
The opcode sequence mentioned above is actually converted into the following optimized operations and used.
|
||||||
|
|
||||||
|
```
|
||||||
|
- opStructFieldHeadInt ( `{"x": 1,` )
|
||||||
|
- opStructEndString ( `"y": "hello"}` )
|
||||||
|
- opEnd
|
||||||
|
```
|
||||||
|
|
||||||
|
It has been reduced from 5 opcodes to 3 opcodes !
|
||||||
|
Reducing the number of opcodees means reducing the number of branches with `switch-case`.
|
||||||
|
In other words, the closer the number of operations is to 1, the faster the processing can be performed.
|
||||||
|
|
||||||
|
In `go-json`, optimization to reduce the number of opcodes itself like the above and it speeds up by preparing opcodes with optimized paths.
|
||||||
|
|
||||||
|
### Change recursive call from CALL to JMP
|
||||||
|
|
||||||
|
Recursive processing is required during encoding if the type is defined recursively as follows:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type T struct {
|
||||||
|
X int
|
||||||
|
U *U
|
||||||
|
}
|
||||||
|
|
||||||
|
type U struct {
|
||||||
|
T *T
|
||||||
|
}
|
||||||
|
|
||||||
|
b, err := json.Marshal(&T{
|
||||||
|
X: 1,
|
||||||
|
U: &U{
|
||||||
|
T: &T{
|
||||||
|
X: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
fmt.Println(string(b)) // {"X":1,"U":{"T":{"X":2,"U":null}}}
|
||||||
|
```
|
||||||
|
|
||||||
|
In `go-json`, recursive processing is processed by the operation type of ` opStructFieldRecursive`.
|
||||||
|
|
||||||
|
In this operation, after acquiring the opcode sequence used for recursive processing, the function is **not** called recursively as it is, but the necessary values are saved by itself and implemented by moving to the next operation.
|
||||||
|
|
||||||
|
The technique of implementing recursive processing with the `JMP` operation while avoiding the `CALL` operation is a famous technique for implementing a high-speed virtual machine.
|
||||||
|
|
||||||
|
For more details, please refer to [the article](https://engineering.mercari.com/blog/entry/1599563768-081104c850) ( but Japanese only ).
|
||||||
|
|
||||||
|
### Dispatch by typeptr from map to slice
|
||||||
|
|
||||||
|
When retrieving the data cached from the type information by `typeptr`, we usually use map.
|
||||||
|
Map requires exclusive control, so use `sync.Map` for a naive implementation.
|
||||||
|
|
||||||
|
However, this is slow, so it's a good idea to use the `atomic` package for exclusive control as implemented by `segmentio/encoding/json` ( https://github.com/segmentio/encoding/blob/master/json/codec.go#L41-L55 ).
|
||||||
|
|
||||||
|
This implementation slows down the set instead of speeding up the get, but it works well because of the nature of the library, it encodes much more for the same type.
|
||||||
|
|
||||||
|
However, as a result of profiling, I noticed that `runtime.mapaccess2` accounts for a significant percentage of the execution time. So I thought if I could change the lookup from map to slice.
|
||||||
|
|
||||||
|
There is an API named `typelinks` defined in the `runtime` package that the `reflect` package uses internally.
|
||||||
|
This allows you to get all the type information defined in the binary at runtime.
|
||||||
|
|
||||||
|
The fact that all type information can be acquired means that by constructing slices in advance with the acquired total number of type information, it is possible to look up with the value of `typeptr` without worrying about out-of-range access.
|
||||||
|
|
||||||
|
However, if there is too much type information, it will use a lot of memory, so by default we will only use this optimization if the slice size fits within **2Mib** .
|
||||||
|
|
||||||
|
If this approach is not available, it will fall back to the `atomic` based process described above.
|
||||||
|
|
||||||
|
If you want to know more, please refer to the implementation [here](https://github.com/goccy/go-json/blob/master/internal/runtime/type.go#L36-L100)
|
||||||
|
|
||||||
|
## Decoder
|
||||||
|
|
||||||
|
### Dispatch by typeptr from map to slice
|
||||||
|
|
||||||
|
Like the encoder, the decoder also uses typeptr to call the dedicated process.
|
||||||
|
|
||||||
|
### Faster termination character inspection using NUL character
|
||||||
|
|
||||||
|
In order to decode, you have to traverse the input buffer character by position.
|
||||||
|
At that time, if you check whether the buffer has reached the end, it will be very slow.
|
||||||
|
|
||||||
|
`buf` : `[]byte` type variable. holds the string passed to the decoder
|
||||||
|
`cursor` : `int64` type variable. holds the current read position
|
||||||
|
|
||||||
|
```go
|
||||||
|
buflen := len(buf)
|
||||||
|
for ; cursor < buflen; cursor++ { // compare cursor and buflen at all times, it is so slow.
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\r', '\t':
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Therefore, by adding the `NUL` (`\000`) character to the end of the read buffer as shown below, it is possible to check the termination character at the same time as other characters.
|
||||||
|
|
||||||
|
```go
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\r', '\t':
|
||||||
|
case '\000':
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use Boundary Check Elimination
|
||||||
|
|
||||||
|
Due to the `NUL` character optimization, the Go compiler does a boundary check every time, even though `buf[cursor]` does not cause out-of-range access.
|
||||||
|
|
||||||
|
Therefore, `go-json` eliminates boundary check by fetching characters for hotspot by pointer operation. For example, the following code.
|
||||||
|
|
||||||
|
```go
|
||||||
|
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||||
|
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||||
|
}
|
||||||
|
|
||||||
|
p := (*sliceHeader)(&unsafe.Pointer(buf)).data
|
||||||
|
for {
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case ' ', '\n', '\r', '\t':
|
||||||
|
case '\000':
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Checking the existence of fields of struct using Bitmaps
|
||||||
|
|
||||||
|
I found by the profiling result, in the struct decode, lookup process for field was taking a long time.
|
||||||
|
|
||||||
|
For example, consider decoding a string like `{"a":1,"b":2,"c":3}` into the following structure:
|
||||||
|
|
||||||
|
```go
|
||||||
|
type T struct {
|
||||||
|
A int `json:"a"`
|
||||||
|
B int `json:"b"`
|
||||||
|
C int `json:"c"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
At this time, it was found that it takes a lot of time to acquire the decoding process corresponding to the field from the field name as shown below during the decoding process.
|
||||||
|
|
||||||
|
```go
|
||||||
|
fieldName := decodeKey(buf, cursor) // "a" or "b" or "c"
|
||||||
|
decoder, exists := fieldToDecoderMap[fieldName] // so slow
|
||||||
|
if exists {
|
||||||
|
decoder(buf, cursor)
|
||||||
|
} else {
|
||||||
|
skipValue(buf, cursor)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
To improve this process, `json-iterator/go` is optimized so that it can be branched by switch-case when the number of fields in the structure is 10 or less (switch-case is faster than map). However, there is a risk of hash collision because the value hashed by the FNV algorithm is used for conditional branching. Also, `gojay` processes this part at high speed by letting the library user yourself write `switch-case`.
|
||||||
|
|
||||||
|
|
||||||
|
`go-json` considers and implements a new approach that is different from these. I call this **bitmap field optimization**.
|
||||||
|
|
||||||
|
The range of values per character can be represented by `[256]byte`. Also, if the number of fields in the structure is 8 or less, `int8` type can represent the state of each field.
|
||||||
|
In other words, it has the following structure.
|
||||||
|
|
||||||
|
- Base ( 8bit ): `00000000`
|
||||||
|
- Key "a": `00000001` ( assign key "a" to the first bit )
|
||||||
|
- Key "b": `00000010` ( assign key "b" to the second bit )
|
||||||
|
- Key "c": `00000100` ( assign key "c" to the third bit )
|
||||||
|
|
||||||
|
Bitmap structure is the following
|
||||||
|
|
||||||
|
```
|
||||||
|
| key index(0) |
|
||||||
|
------------------------
|
||||||
|
0 | 00000000 |
|
||||||
|
1 | 00000000 |
|
||||||
|
~~ | |
|
||||||
|
97 (a) | 00000001 |
|
||||||
|
98 (b) | 00000010 |
|
||||||
|
99 (c) | 00000100 |
|
||||||
|
~~ | |
|
||||||
|
255 | 00000000 |
|
||||||
|
```
|
||||||
|
|
||||||
|
You can think of this as a Bitmap with a height of `256` and a width of the maximum string length in the field name.
|
||||||
|
In other words, it can be represented by the following type .
|
||||||
|
|
||||||
|
```go
|
||||||
|
[maxFieldKeyLength][256]int8
|
||||||
|
```
|
||||||
|
|
||||||
|
When decoding a field character, check whether the corresponding character exists by referring to the pre-built bitmap like the following.
|
||||||
|
|
||||||
|
```go
|
||||||
|
var curBit int8 = math.MaxInt8 // 11111111
|
||||||
|
|
||||||
|
c := char(buf, cursor)
|
||||||
|
bit := bitmap[keyIdx][c]
|
||||||
|
curBit &= bit
|
||||||
|
if curBit == 0 {
|
||||||
|
// not found field
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If `curBit` is not `0` until the end of the field string, then the string is
|
||||||
|
You may have hit one of the fields.
|
||||||
|
But the possibility is that if the decoded string is shorter than the field string, you will get a false hit.
|
||||||
|
|
||||||
|
- input: `{"a":1}`
|
||||||
|
```go
|
||||||
|
type T struct {
|
||||||
|
X int `json:"abc"`
|
||||||
|
}
|
||||||
|
```
|
||||||
|
※ Since `a` is shorter than `abc`, it can decode to the end of the field character without `curBit` being 0.
|
||||||
|
|
||||||
|
Rest assured. In this case, it doesn't matter because you can tell if you hit by comparing the string length of `a` with the string length of `abc`.
|
||||||
|
|
||||||
|
Finally, calculate the position of the bit where `1` is set and get the corresponding value, and you're done.
|
||||||
|
|
||||||
|
Using this technique, field lookups are possible with only bitwise operations and access to slices.
|
||||||
|
|
||||||
|
`go-json` uses a similar technique for fields with 9 or more and 16 or less fields. At this time, Bitmap is constructed as `[maxKeyLen][256]int16` type.
|
||||||
|
|
||||||
|
Currently, this optimization is not performed when the maximum length of the field name is long (specifically, 64 bytes or more) in addition to the limitation of the number of fields from the viewpoint of saving memory usage.
|
||||||
|
|
||||||
|
### Others
|
||||||
|
|
||||||
|
I have done a lot of other optimizations. I will find time to write about them. If you have any questions about what's written here or other optimizations, please visit the `#go-json` channel on `gophers.slack.com` .
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
Regarding the story of go-json, there are the following articles in Japanese only.
|
||||||
|
|
||||||
|
- https://speakerdeck.com/goccy/zui-su-falsejsonraiburariwoqiu-mete
|
||||||
|
- https://engineering.mercari.com/blog/entry/1599563768-081104c850/
|
||||||
|
|
||||||
|
# Looking for Sponsors
|
||||||
|
|
||||||
|
I'm looking for sponsors this library. This library is being developed as a personal project in my spare time. If you want a quick response or problem resolution when using this library in your project, please register as a [sponsor](https://github.com/sponsors/goccy). I will cooperate as much as possible. Of course, this library is developed as an MIT license, so you can use it freely for free.
|
||||||
|
|
||||||
|
# License
|
||||||
|
|
||||||
|
MIT
|
68
vendor/github.com/goccy/go-json/color.go
generated
vendored
Normal file
68
vendor/github.com/goccy/go-json/color.go
generated
vendored
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
package json
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
)
|
||||||
|
|
||||||
|
type (
|
||||||
|
ColorFormat = encoder.ColorFormat
|
||||||
|
ColorScheme = encoder.ColorScheme
|
||||||
|
)
|
||||||
|
|
||||||
|
const escape = "\x1b"
|
||||||
|
|
||||||
|
type colorAttr int
|
||||||
|
|
||||||
|
//nolint:deadcode,varcheck
|
||||||
|
const (
|
||||||
|
fgBlackColor colorAttr = iota + 30
|
||||||
|
fgRedColor
|
||||||
|
fgGreenColor
|
||||||
|
fgYellowColor
|
||||||
|
fgBlueColor
|
||||||
|
fgMagentaColor
|
||||||
|
fgCyanColor
|
||||||
|
fgWhiteColor
|
||||||
|
)
|
||||||
|
|
||||||
|
//nolint:deadcode,varcheck
|
||||||
|
const (
|
||||||
|
fgHiBlackColor colorAttr = iota + 90
|
||||||
|
fgHiRedColor
|
||||||
|
fgHiGreenColor
|
||||||
|
fgHiYellowColor
|
||||||
|
fgHiBlueColor
|
||||||
|
fgHiMagentaColor
|
||||||
|
fgHiCyanColor
|
||||||
|
fgHiWhiteColor
|
||||||
|
)
|
||||||
|
|
||||||
|
func createColorFormat(attr colorAttr) ColorFormat {
|
||||||
|
return ColorFormat{
|
||||||
|
Header: wrapColor(attr),
|
||||||
|
Footer: resetColor(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func wrapColor(attr colorAttr) string {
|
||||||
|
return fmt.Sprintf("%s[%dm", escape, attr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func resetColor() string {
|
||||||
|
return wrapColor(colorAttr(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
DefaultColorScheme = &ColorScheme{
|
||||||
|
Int: createColorFormat(fgHiMagentaColor),
|
||||||
|
Uint: createColorFormat(fgHiMagentaColor),
|
||||||
|
Float: createColorFormat(fgHiMagentaColor),
|
||||||
|
Bool: createColorFormat(fgHiYellowColor),
|
||||||
|
String: createColorFormat(fgHiGreenColor),
|
||||||
|
Binary: createColorFormat(fgHiRedColor),
|
||||||
|
ObjectKey: createColorFormat(fgHiCyanColor),
|
||||||
|
Null: createColorFormat(fgBlueColor),
|
||||||
|
}
|
||||||
|
)
|
232
vendor/github.com/goccy/go-json/decode.go
generated
vendored
Normal file
232
vendor/github.com/goccy/go-json/decode.go
generated
vendored
Normal file
|
@ -0,0 +1,232 @@
|
||||||
|
package json
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/decoder"
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Decoder struct {
|
||||||
|
s *decoder.Stream
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
nul = '\000'
|
||||||
|
)
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshal(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||||
|
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||||
|
copy(src, data)
|
||||||
|
|
||||||
|
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||||
|
|
||||||
|
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
ctx := decoder.TakeRuntimeContext()
|
||||||
|
ctx.Buf = src
|
||||||
|
ctx.Option.Flags = 0
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(ctx.Option)
|
||||||
|
}
|
||||||
|
cursor, err := dec.Decode(ctx, 0, 0, header.ptr)
|
||||||
|
if err != nil {
|
||||||
|
decoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
decoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return validateEndBuf(src, cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshalContext(ctx context.Context, data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||||
|
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||||
|
copy(src, data)
|
||||||
|
|
||||||
|
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||||
|
|
||||||
|
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
rctx := decoder.TakeRuntimeContext()
|
||||||
|
rctx.Buf = src
|
||||||
|
rctx.Option.Flags = 0
|
||||||
|
rctx.Option.Flags |= decoder.ContextOption
|
||||||
|
rctx.Option.Context = ctx
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(rctx.Option)
|
||||||
|
}
|
||||||
|
cursor, err := dec.Decode(rctx, 0, 0, header.ptr)
|
||||||
|
if err != nil {
|
||||||
|
decoder.ReleaseRuntimeContext(rctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
decoder.ReleaseRuntimeContext(rctx)
|
||||||
|
return validateEndBuf(src, cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshalNoEscape(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||||
|
src := make([]byte, len(data)+1) // append nul byte to the end
|
||||||
|
copy(src, data)
|
||||||
|
|
||||||
|
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||||
|
|
||||||
|
if err := validateType(header.typ, uintptr(header.ptr)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dec, err := decoder.CompileToGetDecoder(header.typ)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := decoder.TakeRuntimeContext()
|
||||||
|
ctx.Buf = src
|
||||||
|
ctx.Option.Flags = 0
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(ctx.Option)
|
||||||
|
}
|
||||||
|
cursor, err := dec.Decode(ctx, 0, 0, noescape(header.ptr))
|
||||||
|
if err != nil {
|
||||||
|
decoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
decoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return validateEndBuf(src, cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateEndBuf(src []byte, cursor int64) error {
|
||||||
|
for {
|
||||||
|
switch src[cursor] {
|
||||||
|
case ' ', '\t', '\n', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case nul:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.ErrSyntax(
|
||||||
|
fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]),
|
||||||
|
cursor+1,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:staticcheck
|
||||||
|
//go:nosplit
|
||||||
|
func noescape(p unsafe.Pointer) unsafe.Pointer {
|
||||||
|
x := uintptr(p)
|
||||||
|
return unsafe.Pointer(x ^ 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateType(typ *runtime.Type, p uintptr) error {
|
||||||
|
if typ == nil || typ.Kind() != reflect.Ptr || p == 0 {
|
||||||
|
return &InvalidUnmarshalError{Type: runtime.RType2Type(typ)}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewDecoder returns a new decoder that reads from r.
|
||||||
|
//
|
||||||
|
// The decoder introduces its own buffering and may
|
||||||
|
// read data from r beyond the JSON values requested.
|
||||||
|
func NewDecoder(r io.Reader) *Decoder {
|
||||||
|
s := decoder.NewStream(r)
|
||||||
|
return &Decoder{
|
||||||
|
s: s,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Buffered returns a reader of the data remaining in the Decoder's
|
||||||
|
// buffer. The reader is valid until the next call to Decode.
|
||||||
|
func (d *Decoder) Buffered() io.Reader {
|
||||||
|
return d.s.Buffered()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode reads the next JSON-encoded value from its
|
||||||
|
// input and stores it in the value pointed to by v.
|
||||||
|
//
|
||||||
|
// See the documentation for Unmarshal for details about
|
||||||
|
// the conversion of JSON into a Go value.
|
||||||
|
func (d *Decoder) Decode(v interface{}) error {
|
||||||
|
return d.DecodeWithOption(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecodeContext reads the next JSON-encoded value from its
|
||||||
|
// input and stores it in the value pointed to by v with context.Context.
|
||||||
|
func (d *Decoder) DecodeContext(ctx context.Context, v interface{}) error {
|
||||||
|
d.s.Option.Flags |= decoder.ContextOption
|
||||||
|
d.s.Option.Context = ctx
|
||||||
|
return d.DecodeWithOption(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Decoder) DecodeWithOption(v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||||
|
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||||
|
typ := header.typ
|
||||||
|
ptr := uintptr(header.ptr)
|
||||||
|
typeptr := uintptr(unsafe.Pointer(typ))
|
||||||
|
// noescape trick for header.typ ( reflect.*rtype )
|
||||||
|
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||||
|
|
||||||
|
if err := validateType(copiedType, ptr); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
dec, err := decoder.CompileToGetDecoder(typ)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := d.s.PrepareForDecode(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
s := d.s
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(s.Option)
|
||||||
|
}
|
||||||
|
if err := dec.DecodeStream(s, 0, header.ptr); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
s.Reset()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Decoder) More() bool {
|
||||||
|
return d.s.More()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Decoder) Token() (Token, error) {
|
||||||
|
return d.s.Token()
|
||||||
|
}
|
||||||
|
|
||||||
|
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||||
|
// is a struct and the input contains object keys which do not match any
|
||||||
|
// non-ignored, exported fields in the destination.
|
||||||
|
func (d *Decoder) DisallowUnknownFields() {
|
||||||
|
d.s.DisallowUnknownFields = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Decoder) InputOffset() int64 {
|
||||||
|
return d.s.TotalOffset()
|
||||||
|
}
|
||||||
|
|
||||||
|
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||||
|
// Number instead of as a float64.
|
||||||
|
func (d *Decoder) UseNumber() {
|
||||||
|
d.s.UseNumber = true
|
||||||
|
}
|
13
vendor/github.com/goccy/go-json/docker-compose.yml
generated
vendored
Normal file
13
vendor/github.com/goccy/go-json/docker-compose.yml
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
version: '2'
|
||||||
|
services:
|
||||||
|
go-json:
|
||||||
|
image: golang:1.16
|
||||||
|
volumes:
|
||||||
|
- '.:/go/src/go-json'
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: 620M
|
||||||
|
working_dir: /go/src/go-json
|
||||||
|
command: |
|
||||||
|
sh -c "go test -c . && ls go-json.test"
|
323
vendor/github.com/goccy/go-json/encode.go
generated
vendored
Normal file
323
vendor/github.com/goccy/go-json/encode.go
generated
vendored
Normal file
|
@ -0,0 +1,323 @@
|
||||||
|
package json
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
"github.com/goccy/go-json/internal/encoder/vm"
|
||||||
|
"github.com/goccy/go-json/internal/encoder/vm_color"
|
||||||
|
"github.com/goccy/go-json/internal/encoder/vm_color_indent"
|
||||||
|
"github.com/goccy/go-json/internal/encoder/vm_indent"
|
||||||
|
)
|
||||||
|
|
||||||
|
// An Encoder writes JSON values to an output stream.
|
||||||
|
type Encoder struct {
|
||||||
|
w io.Writer
|
||||||
|
enabledIndent bool
|
||||||
|
enabledHTMLEscape bool
|
||||||
|
prefix string
|
||||||
|
indentStr string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewEncoder returns a new encoder that writes to w.
|
||||||
|
func NewEncoder(w io.Writer) *Encoder {
|
||||||
|
return &Encoder{w: w, enabledHTMLEscape: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encode writes the JSON encoding of v to the stream, followed by a newline character.
|
||||||
|
//
|
||||||
|
// See the documentation for Marshal for details about the conversion of Go values to JSON.
|
||||||
|
func (e *Encoder) Encode(v interface{}) error {
|
||||||
|
return e.EncodeWithOption(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncodeWithOption call Encode with EncodeOption.
|
||||||
|
func (e *Encoder) EncodeWithOption(v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||||
|
ctx := encoder.TakeRuntimeContext()
|
||||||
|
ctx.Option.Flag = 0
|
||||||
|
|
||||||
|
err := e.encodeWithOption(ctx, v, optFuncs...)
|
||||||
|
|
||||||
|
encoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncodeContext call Encode with context.Context and EncodeOption.
|
||||||
|
func (e *Encoder) EncodeContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||||
|
rctx := encoder.TakeRuntimeContext()
|
||||||
|
rctx.Option.Flag = 0
|
||||||
|
rctx.Option.Flag |= encoder.ContextOption
|
||||||
|
rctx.Option.Context = ctx
|
||||||
|
|
||||||
|
err := e.encodeWithOption(rctx, v, optFuncs...)
|
||||||
|
|
||||||
|
encoder.ReleaseRuntimeContext(rctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Encoder) encodeWithOption(ctx *encoder.RuntimeContext, v interface{}, optFuncs ...EncodeOptionFunc) error {
|
||||||
|
if e.enabledHTMLEscape {
|
||||||
|
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||||
|
}
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(ctx.Option)
|
||||||
|
}
|
||||||
|
var (
|
||||||
|
buf []byte
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
if e.enabledIndent {
|
||||||
|
buf, err = encodeIndent(ctx, v, e.prefix, e.indentStr)
|
||||||
|
} else {
|
||||||
|
buf, err = encode(ctx, v)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if e.enabledIndent {
|
||||||
|
buf = buf[:len(buf)-2]
|
||||||
|
} else {
|
||||||
|
buf = buf[:len(buf)-1]
|
||||||
|
}
|
||||||
|
buf = append(buf, '\n')
|
||||||
|
if _, err := e.w.Write(buf); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetEscapeHTML specifies whether problematic HTML characters should be escaped inside JSON quoted strings.
|
||||||
|
// The default behavior is to escape &, <, and > to \u0026, \u003c, and \u003e to avoid certain safety problems that can arise when embedding JSON in HTML.
|
||||||
|
//
|
||||||
|
// In non-HTML settings where the escaping interferes with the readability of the output, SetEscapeHTML(false) disables this behavior.
|
||||||
|
func (e *Encoder) SetEscapeHTML(on bool) {
|
||||||
|
e.enabledHTMLEscape = on
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetIndent instructs the encoder to format each subsequent encoded value as if indented by the package-level function Indent(dst, src, prefix, indent).
|
||||||
|
// Calling SetIndent("", "") disables indentation.
|
||||||
|
func (e *Encoder) SetIndent(prefix, indent string) {
|
||||||
|
if prefix == "" && indent == "" {
|
||||||
|
e.enabledIndent = false
|
||||||
|
return
|
||||||
|
}
|
||||||
|
e.prefix = prefix
|
||||||
|
e.indentStr = indent
|
||||||
|
e.enabledIndent = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||||
|
rctx := encoder.TakeRuntimeContext()
|
||||||
|
rctx.Option.Flag = 0
|
||||||
|
rctx.Option.Flag = encoder.HTMLEscapeOption | encoder.ContextOption
|
||||||
|
rctx.Option.Context = ctx
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(rctx.Option)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf, err := encode(rctx, v)
|
||||||
|
if err != nil {
|
||||||
|
encoder.ReleaseRuntimeContext(rctx)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||||
|
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||||
|
// dst buffer size and src buffer size are differrent.
|
||||||
|
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||||
|
buf = buf[:len(buf)-1]
|
||||||
|
copied := make([]byte, len(buf))
|
||||||
|
copy(copied, buf)
|
||||||
|
|
||||||
|
encoder.ReleaseRuntimeContext(rctx)
|
||||||
|
return copied, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshal(v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||||
|
ctx := encoder.TakeRuntimeContext()
|
||||||
|
|
||||||
|
ctx.Option.Flag = 0
|
||||||
|
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(ctx.Option)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf, err := encode(ctx, v)
|
||||||
|
if err != nil {
|
||||||
|
encoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||||
|
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||||
|
// dst buffer size and src buffer size are differrent.
|
||||||
|
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||||
|
buf = buf[:len(buf)-1]
|
||||||
|
copied := make([]byte, len(buf))
|
||||||
|
copy(copied, buf)
|
||||||
|
|
||||||
|
encoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return copied, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalNoEscape(v interface{}) ([]byte, error) {
|
||||||
|
ctx := encoder.TakeRuntimeContext()
|
||||||
|
|
||||||
|
ctx.Option.Flag = 0
|
||||||
|
ctx.Option.Flag |= encoder.HTMLEscapeOption
|
||||||
|
|
||||||
|
buf, err := encodeNoEscape(ctx, v)
|
||||||
|
if err != nil {
|
||||||
|
encoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// this line exists to escape call of `runtime.makeslicecopy` .
|
||||||
|
// if use `make([]byte, len(buf)-1)` and `copy(copied, buf)`,
|
||||||
|
// dst buffer size and src buffer size are differrent.
|
||||||
|
// in this case, compiler uses `runtime.makeslicecopy`, but it is slow.
|
||||||
|
buf = buf[:len(buf)-1]
|
||||||
|
copied := make([]byte, len(buf))
|
||||||
|
copy(copied, buf)
|
||||||
|
|
||||||
|
encoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return copied, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalIndent(v interface{}, prefix, indent string, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||||
|
ctx := encoder.TakeRuntimeContext()
|
||||||
|
|
||||||
|
ctx.Option.Flag = 0
|
||||||
|
ctx.Option.Flag |= (encoder.HTMLEscapeOption | encoder.IndentOption)
|
||||||
|
for _, optFunc := range optFuncs {
|
||||||
|
optFunc(ctx.Option)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf, err := encodeIndent(ctx, v, prefix, indent)
|
||||||
|
if err != nil {
|
||||||
|
encoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
buf = buf[:len(buf)-2]
|
||||||
|
copied := make([]byte, len(buf))
|
||||||
|
copy(copied, buf)
|
||||||
|
|
||||||
|
encoder.ReleaseRuntimeContext(ctx)
|
||||||
|
return copied, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func encode(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||||
|
b := ctx.Buf[:0]
|
||||||
|
if v == nil {
|
||||||
|
b = encoder.AppendNull(ctx, b)
|
||||||
|
b = encoder.AppendComma(ctx, b)
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||||
|
typ := header.typ
|
||||||
|
|
||||||
|
typeptr := uintptr(unsafe.Pointer(typ))
|
||||||
|
codeSet, err := encoder.CompileToGetCodeSet(typeptr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p := uintptr(header.ptr)
|
||||||
|
ctx.Init(p, codeSet.CodeLength)
|
||||||
|
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||||
|
|
||||||
|
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ctx.Buf = buf
|
||||||
|
return buf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func encodeNoEscape(ctx *encoder.RuntimeContext, v interface{}) ([]byte, error) {
|
||||||
|
b := ctx.Buf[:0]
|
||||||
|
if v == nil {
|
||||||
|
b = encoder.AppendNull(ctx, b)
|
||||||
|
b = encoder.AppendComma(ctx, b)
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||||
|
typ := header.typ
|
||||||
|
|
||||||
|
typeptr := uintptr(unsafe.Pointer(typ))
|
||||||
|
codeSet, err := encoder.CompileToGetCodeSet(typeptr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p := uintptr(header.ptr)
|
||||||
|
ctx.Init(p, codeSet.CodeLength)
|
||||||
|
buf, err := encodeRunCode(ctx, b, codeSet)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Buf = buf
|
||||||
|
return buf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func encodeIndent(ctx *encoder.RuntimeContext, v interface{}, prefix, indent string) ([]byte, error) {
|
||||||
|
b := ctx.Buf[:0]
|
||||||
|
if v == nil {
|
||||||
|
b = encoder.AppendNull(ctx, b)
|
||||||
|
b = encoder.AppendCommaIndent(ctx, b)
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
header := (*emptyInterface)(unsafe.Pointer(&v))
|
||||||
|
typ := header.typ
|
||||||
|
|
||||||
|
typeptr := uintptr(unsafe.Pointer(typ))
|
||||||
|
codeSet, err := encoder.CompileToGetCodeSet(typeptr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p := uintptr(header.ptr)
|
||||||
|
ctx.Init(p, codeSet.CodeLength)
|
||||||
|
buf, err := encodeRunIndentCode(ctx, b, codeSet, prefix, indent)
|
||||||
|
|
||||||
|
ctx.KeepRefs = append(ctx.KeepRefs, header.ptr)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.Buf = buf
|
||||||
|
return buf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func encodeRunCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||||
|
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||||
|
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||||
|
return vm_color.DebugRun(ctx, b, codeSet)
|
||||||
|
}
|
||||||
|
return vm.DebugRun(ctx, b, codeSet)
|
||||||
|
}
|
||||||
|
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||||
|
return vm_color.Run(ctx, b, codeSet)
|
||||||
|
}
|
||||||
|
return vm.Run(ctx, b, codeSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
func encodeRunIndentCode(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet, prefix, indent string) ([]byte, error) {
|
||||||
|
ctx.Prefix = []byte(prefix)
|
||||||
|
ctx.IndentStr = []byte(indent)
|
||||||
|
if (ctx.Option.Flag & encoder.DebugOption) != 0 {
|
||||||
|
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||||
|
return vm_color_indent.DebugRun(ctx, b, codeSet)
|
||||||
|
}
|
||||||
|
return vm_indent.DebugRun(ctx, b, codeSet)
|
||||||
|
}
|
||||||
|
if (ctx.Option.Flag & encoder.ColorizeOption) != 0 {
|
||||||
|
return vm_color_indent.Run(ctx, b, codeSet)
|
||||||
|
}
|
||||||
|
return vm_indent.Run(ctx, b, codeSet)
|
||||||
|
}
|
39
vendor/github.com/goccy/go-json/error.go
generated
vendored
Normal file
39
vendor/github.com/goccy/go-json/error.go
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
package json
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Before Go 1.2, an InvalidUTF8Error was returned by Marshal when
|
||||||
|
// attempting to encode a string value with invalid UTF-8 sequences.
|
||||||
|
// As of Go 1.2, Marshal instead coerces the string to valid UTF-8 by
|
||||||
|
// replacing invalid bytes with the Unicode replacement rune U+FFFD.
|
||||||
|
//
|
||||||
|
// Deprecated: No longer used; kept for compatibility.
|
||||||
|
type InvalidUTF8Error = errors.InvalidUTF8Error
|
||||||
|
|
||||||
|
// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
|
||||||
|
// (The argument to Unmarshal must be a non-nil pointer.)
|
||||||
|
type InvalidUnmarshalError = errors.InvalidUnmarshalError
|
||||||
|
|
||||||
|
// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method.
|
||||||
|
type MarshalerError = errors.MarshalerError
|
||||||
|
|
||||||
|
// A SyntaxError is a description of a JSON syntax error.
|
||||||
|
type SyntaxError = errors.SyntaxError
|
||||||
|
|
||||||
|
// An UnmarshalFieldError describes a JSON object key that
|
||||||
|
// led to an unexported (and therefore unwritable) struct field.
|
||||||
|
//
|
||||||
|
// Deprecated: No longer used; kept for compatibility.
|
||||||
|
type UnmarshalFieldError = errors.UnmarshalFieldError
|
||||||
|
|
||||||
|
// An UnmarshalTypeError describes a JSON value that was
|
||||||
|
// not appropriate for a value of a specific Go type.
|
||||||
|
type UnmarshalTypeError = errors.UnmarshalTypeError
|
||||||
|
|
||||||
|
// An UnsupportedTypeError is returned by Marshal when attempting
|
||||||
|
// to encode an unsupported value type.
|
||||||
|
type UnsupportedTypeError = errors.UnsupportedTypeError
|
||||||
|
|
||||||
|
type UnsupportedValueError = errors.UnsupportedValueError
|
37
vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go
generated
vendored
Normal file
37
vendor/github.com/goccy/go-json/internal/decoder/anonymous_field.go
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type anonymousFieldDecoder struct {
|
||||||
|
structType *runtime.Type
|
||||||
|
offset uintptr
|
||||||
|
dec Decoder
|
||||||
|
}
|
||||||
|
|
||||||
|
func newAnonymousFieldDecoder(structType *runtime.Type, offset uintptr, dec Decoder) *anonymousFieldDecoder {
|
||||||
|
return &anonymousFieldDecoder{
|
||||||
|
structType: structType,
|
||||||
|
offset: offset,
|
||||||
|
dec: dec,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *anonymousFieldDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
if *(*unsafe.Pointer)(p) == nil {
|
||||||
|
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||||
|
}
|
||||||
|
p = *(*unsafe.Pointer)(p)
|
||||||
|
return d.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *anonymousFieldDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
if *(*unsafe.Pointer)(p) == nil {
|
||||||
|
*(*unsafe.Pointer)(p) = unsafe_New(d.structType)
|
||||||
|
}
|
||||||
|
p = *(*unsafe.Pointer)(p)
|
||||||
|
return d.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+d.offset))
|
||||||
|
}
|
169
vendor/github.com/goccy/go-json/internal/decoder/array.go
generated
vendored
Normal file
169
vendor/github.com/goccy/go-json/internal/decoder/array.go
generated
vendored
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type arrayDecoder struct {
|
||||||
|
elemType *runtime.Type
|
||||||
|
size uintptr
|
||||||
|
valueDecoder Decoder
|
||||||
|
alen int
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
zeroValue unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func newArrayDecoder(dec Decoder, elemType *runtime.Type, alen int, structName, fieldName string) *arrayDecoder {
|
||||||
|
zeroValue := *(*unsafe.Pointer)(unsafe_New(elemType))
|
||||||
|
return &arrayDecoder{
|
||||||
|
valueDecoder: dec,
|
||||||
|
elemType: elemType,
|
||||||
|
size: elemType.Size(),
|
||||||
|
alen: alen,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
zeroValue: zeroValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *arrayDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case '[':
|
||||||
|
idx := 0
|
||||||
|
s.cursor++
|
||||||
|
if s.skipWhiteSpace() == ']' {
|
||||||
|
for idx < d.alen {
|
||||||
|
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
if idx < d.alen {
|
||||||
|
if err := d.valueDecoder.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
idx++
|
||||||
|
switch s.skipWhiteSpace() {
|
||||||
|
case ']':
|
||||||
|
for idx < d.alen {
|
||||||
|
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
case ',':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
default:
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
default:
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("array", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *arrayDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return cursor, nil
|
||||||
|
case '[':
|
||||||
|
idx := 0
|
||||||
|
cursor++
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if buf[cursor] == ']' {
|
||||||
|
for idx < d.alen {
|
||||||
|
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
if idx < d.alen {
|
||||||
|
c, err := d.valueDecoder.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+uintptr(idx)*d.size))
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
} else {
|
||||||
|
c, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
}
|
||||||
|
idx++
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ']':
|
||||||
|
for idx < d.alen {
|
||||||
|
*(*unsafe.Pointer)(unsafe.Pointer(uintptr(p) + uintptr(idx)*d.size)) = d.zeroValue
|
||||||
|
idx++
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
case ',':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
default:
|
||||||
|
return 0, errors.ErrInvalidCharacter(buf[cursor], "array", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("array", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
78
vendor/github.com/goccy/go-json/internal/decoder/bool.go
generated
vendored
Normal file
78
vendor/github.com/goccy/go-json/internal/decoder/bool.go
generated
vendored
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type boolDecoder struct {
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBoolDecoder(structName, fieldName string) *boolDecoder {
|
||||||
|
return &boolDecoder{structName: structName, fieldName: fieldName}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *boolDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
c := s.skipWhiteSpace()
|
||||||
|
for {
|
||||||
|
switch c {
|
||||||
|
case 't':
|
||||||
|
if err := trueBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
**(**bool)(unsafe.Pointer(&p)) = true
|
||||||
|
return nil
|
||||||
|
case 'f':
|
||||||
|
if err := falseBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
**(**bool)(unsafe.Pointer(&p)) = false
|
||||||
|
return nil
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
c = s.char()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("bool", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *boolDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
switch buf[cursor] {
|
||||||
|
case 't':
|
||||||
|
if err := validateTrue(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
**(**bool)(unsafe.Pointer(&p)) = true
|
||||||
|
return cursor, nil
|
||||||
|
case 'f':
|
||||||
|
if err := validateFalse(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 5
|
||||||
|
**(**bool)(unsafe.Pointer(&p)) = false
|
||||||
|
return cursor, nil
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("bool", cursor)
|
||||||
|
}
|
114
vendor/github.com/goccy/go-json/internal/decoder/bytes.go
generated
vendored
Normal file
114
vendor/github.com/goccy/go-json/internal/decoder/bytes.go
generated
vendored
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type bytesDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
sliceDecoder Decoder
|
||||||
|
stringDecoder *stringDecoder
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func byteUnmarshalerSliceDecoder(typ *runtime.Type, structName string, fieldName string) Decoder {
|
||||||
|
var unmarshalDecoder Decoder
|
||||||
|
switch {
|
||||||
|
case runtime.PtrTo(typ).Implements(unmarshalJSONType):
|
||||||
|
unmarshalDecoder = newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||||
|
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||||
|
unmarshalDecoder = newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName)
|
||||||
|
}
|
||||||
|
if unmarshalDecoder == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return newSliceDecoder(unmarshalDecoder, typ, 1, structName, fieldName)
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBytesDecoder(typ *runtime.Type, structName string, fieldName string) *bytesDecoder {
|
||||||
|
return &bytesDecoder{
|
||||||
|
typ: typ,
|
||||||
|
sliceDecoder: byteUnmarshalerSliceDecoder(typ, structName, fieldName),
|
||||||
|
stringDecoder: newStringDecoder(structName, fieldName),
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *bytesDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
bytes, err := d.decodeStreamBinary(s, depth, p)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
s.reset()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||||
|
buf := make([]byte, decodedLen)
|
||||||
|
n, err := base64.StdEncoding.Decode(buf, bytes)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*[]byte)(p) = buf[:n]
|
||||||
|
s.reset()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *bytesDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
bytes, c, err := d.decodeBinary(ctx, cursor, depth, p)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
decodedLen := base64.StdEncoding.DecodedLen(len(bytes))
|
||||||
|
b := make([]byte, decodedLen)
|
||||||
|
n, err := base64.StdEncoding.Decode(b, bytes)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
*(*[]byte)(p) = b[:n]
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *bytesDecoder) decodeStreamBinary(s *Stream, depth int64, p unsafe.Pointer) ([]byte, error) {
|
||||||
|
c := s.skipWhiteSpace()
|
||||||
|
if c == '[' {
|
||||||
|
if d.sliceDecoder == nil {
|
||||||
|
return nil, &errors.UnmarshalTypeError{
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err := d.sliceDecoder.DecodeStream(s, depth, p)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return d.stringDecoder.decodeStreamByte(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *bytesDecoder) decodeBinary(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) ([]byte, int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if buf[cursor] == '[' {
|
||||||
|
if d.sliceDecoder == nil {
|
||||||
|
return nil, 0, &errors.UnmarshalTypeError{
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: cursor,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
c, err := d.sliceDecoder.Decode(ctx, cursor, depth, p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
return nil, c, nil
|
||||||
|
}
|
||||||
|
return d.stringDecoder.decodeByte(buf, cursor)
|
||||||
|
}
|
510
vendor/github.com/goccy/go-json/internal/decoder/compile.go
generated
vendored
Normal file
510
vendor/github.com/goccy/go-json/internal/decoder/compile.go
generated
vendored
Normal file
|
@ -0,0 +1,510 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
|
"unicode"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
jsonNumberType = reflect.TypeOf(json.Number(""))
|
||||||
|
typeAddr *runtime.TypeAddr
|
||||||
|
cachedDecoderMap unsafe.Pointer // map[uintptr]decoder
|
||||||
|
cachedDecoder []Decoder
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
typeAddr = runtime.AnalyzeTypeAddr()
|
||||||
|
if typeAddr == nil {
|
||||||
|
typeAddr = &runtime.TypeAddr{}
|
||||||
|
}
|
||||||
|
cachedDecoder = make([]Decoder, typeAddr.AddrRange>>typeAddr.AddrShift)
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadDecoderMap() map[uintptr]Decoder {
|
||||||
|
p := atomic.LoadPointer(&cachedDecoderMap)
|
||||||
|
return *(*map[uintptr]Decoder)(unsafe.Pointer(&p))
|
||||||
|
}
|
||||||
|
|
||||||
|
func storeDecoder(typ uintptr, dec Decoder, m map[uintptr]Decoder) {
|
||||||
|
newDecoderMap := make(map[uintptr]Decoder, len(m)+1)
|
||||||
|
newDecoderMap[typ] = dec
|
||||||
|
|
||||||
|
for k, v := range m {
|
||||||
|
newDecoderMap[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
atomic.StorePointer(&cachedDecoderMap, *(*unsafe.Pointer)(unsafe.Pointer(&newDecoderMap)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileToGetDecoderSlowPath(typeptr uintptr, typ *runtime.Type) (Decoder, error) {
|
||||||
|
decoderMap := loadDecoderMap()
|
||||||
|
if dec, exists := decoderMap[typeptr]; exists {
|
||||||
|
return dec, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
storeDecoder(typeptr, dec, decoderMap)
|
||||||
|
return dec, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileHead(typ *runtime.Type, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
switch {
|
||||||
|
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||||
|
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||||
|
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||||
|
return newUnmarshalTextDecoder(runtime.PtrTo(typ), "", ""), nil
|
||||||
|
}
|
||||||
|
return compile(typ.Elem(), "", "", structTypeToDecoder)
|
||||||
|
}
|
||||||
|
|
||||||
|
func compile(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
switch {
|
||||||
|
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||||
|
return newUnmarshalJSONDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||||
|
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||||
|
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
switch typ.Kind() {
|
||||||
|
case reflect.Ptr:
|
||||||
|
return compilePtr(typ, structName, fieldName, structTypeToDecoder)
|
||||||
|
case reflect.Struct:
|
||||||
|
return compileStruct(typ, structName, fieldName, structTypeToDecoder)
|
||||||
|
case reflect.Slice:
|
||||||
|
elem := typ.Elem()
|
||||||
|
if elem.Kind() == reflect.Uint8 {
|
||||||
|
return compileBytes(elem, structName, fieldName)
|
||||||
|
}
|
||||||
|
return compileSlice(typ, structName, fieldName, structTypeToDecoder)
|
||||||
|
case reflect.Array:
|
||||||
|
return compileArray(typ, structName, fieldName, structTypeToDecoder)
|
||||||
|
case reflect.Map:
|
||||||
|
return compileMap(typ, structName, fieldName, structTypeToDecoder)
|
||||||
|
case reflect.Interface:
|
||||||
|
return compileInterface(typ, structName, fieldName)
|
||||||
|
case reflect.Uintptr:
|
||||||
|
return compileUint(typ, structName, fieldName)
|
||||||
|
case reflect.Int:
|
||||||
|
return compileInt(typ, structName, fieldName)
|
||||||
|
case reflect.Int8:
|
||||||
|
return compileInt8(typ, structName, fieldName)
|
||||||
|
case reflect.Int16:
|
||||||
|
return compileInt16(typ, structName, fieldName)
|
||||||
|
case reflect.Int32:
|
||||||
|
return compileInt32(typ, structName, fieldName)
|
||||||
|
case reflect.Int64:
|
||||||
|
return compileInt64(typ, structName, fieldName)
|
||||||
|
case reflect.Uint:
|
||||||
|
return compileUint(typ, structName, fieldName)
|
||||||
|
case reflect.Uint8:
|
||||||
|
return compileUint8(typ, structName, fieldName)
|
||||||
|
case reflect.Uint16:
|
||||||
|
return compileUint16(typ, structName, fieldName)
|
||||||
|
case reflect.Uint32:
|
||||||
|
return compileUint32(typ, structName, fieldName)
|
||||||
|
case reflect.Uint64:
|
||||||
|
return compileUint64(typ, structName, fieldName)
|
||||||
|
case reflect.String:
|
||||||
|
return compileString(typ, structName, fieldName)
|
||||||
|
case reflect.Bool:
|
||||||
|
return compileBool(structName, fieldName)
|
||||||
|
case reflect.Float32:
|
||||||
|
return compileFloat32(structName, fieldName)
|
||||||
|
case reflect.Float64:
|
||||||
|
return compileFloat64(structName, fieldName)
|
||||||
|
case reflect.Func:
|
||||||
|
return compileFunc(typ, structName, fieldName)
|
||||||
|
}
|
||||||
|
return nil, &errors.UnmarshalTypeError{
|
||||||
|
Value: "object",
|
||||||
|
Type: runtime.RType2Type(typ),
|
||||||
|
Offset: 0,
|
||||||
|
Struct: structName,
|
||||||
|
Field: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isStringTagSupportedType(typ *runtime.Type) bool {
|
||||||
|
switch {
|
||||||
|
case implementsUnmarshalJSONType(runtime.PtrTo(typ)):
|
||||||
|
return false
|
||||||
|
case runtime.PtrTo(typ).Implements(unmarshalTextType):
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
switch typ.Kind() {
|
||||||
|
case reflect.Map:
|
||||||
|
return false
|
||||||
|
case reflect.Slice:
|
||||||
|
return false
|
||||||
|
case reflect.Array:
|
||||||
|
return false
|
||||||
|
case reflect.Struct:
|
||||||
|
return false
|
||||||
|
case reflect.Interface:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileMapKey(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
if runtime.PtrTo(typ).Implements(unmarshalTextType) {
|
||||||
|
return newUnmarshalTextDecoder(runtime.PtrTo(typ), structName, fieldName), nil
|
||||||
|
}
|
||||||
|
dec, err := compile(typ, structName, fieldName, structTypeToDecoder)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
switch t := dec.(type) {
|
||||||
|
case *stringDecoder, *interfaceDecoder:
|
||||||
|
return dec, nil
|
||||||
|
case *boolDecoder, *intDecoder, *uintDecoder, *numberDecoder:
|
||||||
|
return newWrappedStringDecoder(typ, dec, structName, fieldName), nil
|
||||||
|
case *ptrDecoder:
|
||||||
|
dec = t.dec
|
||||||
|
default:
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
return nil, &errors.UnmarshalTypeError{
|
||||||
|
Value: "object",
|
||||||
|
Type: runtime.RType2Type(typ),
|
||||||
|
Offset: 0,
|
||||||
|
Struct: structName,
|
||||||
|
Field: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func compilePtr(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
dec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return newPtrDecoder(dec, typ.Elem(), structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileInt(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||||
|
*(*int)(p) = int(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileInt8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||||
|
*(*int8)(p) = int8(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileInt16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||||
|
*(*int16)(p) = int16(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileInt32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||||
|
*(*int32)(p) = int32(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileInt64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newIntDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v int64) {
|
||||||
|
*(*int64)(p) = v
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileUint(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||||
|
*(*uint)(p) = uint(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileUint8(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||||
|
*(*uint8)(p) = uint8(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileUint16(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||||
|
*(*uint16)(p) = uint16(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileUint32(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||||
|
*(*uint32)(p) = uint32(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileUint64(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newUintDecoder(typ, structName, fieldName, func(p unsafe.Pointer, v uint64) {
|
||||||
|
*(*uint64)(p) = v
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileFloat32(structName, fieldName string) (Decoder, error) {
|
||||||
|
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||||
|
*(*float32)(p) = float32(v)
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileFloat64(structName, fieldName string) (Decoder, error) {
|
||||||
|
return newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||||
|
*(*float64)(p) = v
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileString(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
if typ == runtime.Type2RType(jsonNumberType) {
|
||||||
|
return newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||||
|
*(*json.Number)(p) = v
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
return newStringDecoder(structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileBool(structName, fieldName string) (Decoder, error) {
|
||||||
|
return newBoolDecoder(structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileBytes(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newBytesDecoder(typ, structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileSlice(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
elem := typ.Elem()
|
||||||
|
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return newSliceDecoder(decoder, elem, elem.Size(), structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileArray(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
elem := typ.Elem()
|
||||||
|
decoder, err := compile(elem, structName, fieldName, structTypeToDecoder)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return newArrayDecoder(decoder, elem, typ.Len(), structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileMap(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
keyDec, err := compileMapKey(typ.Key(), structName, fieldName, structTypeToDecoder)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
valueDec, err := compile(typ.Elem(), structName, fieldName, structTypeToDecoder)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return newMapDecoder(typ, typ.Key(), keyDec, typ.Elem(), valueDec, structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileInterface(typ *runtime.Type, structName, fieldName string) (Decoder, error) {
|
||||||
|
return newInterfaceDecoder(typ, structName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileFunc(typ *runtime.Type, strutName, fieldName string) (Decoder, error) {
|
||||||
|
return newFuncDecoder(typ, strutName, fieldName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func removeConflictFields(fieldMap map[string]*structFieldSet, conflictedMap map[string]struct{}, dec *structDecoder, field reflect.StructField) {
|
||||||
|
for k, v := range dec.fieldMap {
|
||||||
|
if _, exists := conflictedMap[k]; exists {
|
||||||
|
// already conflicted key
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
set, exists := fieldMap[k]
|
||||||
|
if !exists {
|
||||||
|
fieldSet := &structFieldSet{
|
||||||
|
dec: v.dec,
|
||||||
|
offset: field.Offset + v.offset,
|
||||||
|
isTaggedKey: v.isTaggedKey,
|
||||||
|
key: k,
|
||||||
|
keyLen: int64(len(k)),
|
||||||
|
}
|
||||||
|
fieldMap[k] = fieldSet
|
||||||
|
lower := strings.ToLower(k)
|
||||||
|
if _, exists := fieldMap[lower]; !exists {
|
||||||
|
fieldMap[lower] = fieldSet
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if set.isTaggedKey {
|
||||||
|
if v.isTaggedKey {
|
||||||
|
// conflict tag key
|
||||||
|
delete(fieldMap, k)
|
||||||
|
delete(fieldMap, strings.ToLower(k))
|
||||||
|
conflictedMap[k] = struct{}{}
|
||||||
|
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if v.isTaggedKey {
|
||||||
|
fieldSet := &structFieldSet{
|
||||||
|
dec: v.dec,
|
||||||
|
offset: field.Offset + v.offset,
|
||||||
|
isTaggedKey: v.isTaggedKey,
|
||||||
|
key: k,
|
||||||
|
keyLen: int64(len(k)),
|
||||||
|
}
|
||||||
|
fieldMap[k] = fieldSet
|
||||||
|
lower := strings.ToLower(k)
|
||||||
|
if _, exists := fieldMap[lower]; !exists {
|
||||||
|
fieldMap[lower] = fieldSet
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// conflict tag key
|
||||||
|
delete(fieldMap, k)
|
||||||
|
delete(fieldMap, strings.ToLower(k))
|
||||||
|
conflictedMap[k] = struct{}{}
|
||||||
|
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func compileStruct(typ *runtime.Type, structName, fieldName string, structTypeToDecoder map[uintptr]Decoder) (Decoder, error) {
|
||||||
|
fieldNum := typ.NumField()
|
||||||
|
conflictedMap := map[string]struct{}{}
|
||||||
|
fieldMap := map[string]*structFieldSet{}
|
||||||
|
typeptr := uintptr(unsafe.Pointer(typ))
|
||||||
|
if dec, exists := structTypeToDecoder[typeptr]; exists {
|
||||||
|
return dec, nil
|
||||||
|
}
|
||||||
|
structDec := newStructDecoder(structName, fieldName, fieldMap)
|
||||||
|
structTypeToDecoder[typeptr] = structDec
|
||||||
|
structName = typ.Name()
|
||||||
|
for i := 0; i < fieldNum; i++ {
|
||||||
|
field := typ.Field(i)
|
||||||
|
if runtime.IsIgnoredStructField(field) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
isUnexportedField := unicode.IsLower([]rune(field.Name)[0])
|
||||||
|
tag := runtime.StructTagFromField(field)
|
||||||
|
dec, err := compile(runtime.Type2RType(field.Type), structName, field.Name, structTypeToDecoder)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if field.Anonymous && !tag.IsTaggedKey {
|
||||||
|
if stDec, ok := dec.(*structDecoder); ok {
|
||||||
|
if runtime.Type2RType(field.Type) == typ {
|
||||||
|
// recursive definition
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
removeConflictFields(fieldMap, conflictedMap, stDec, field)
|
||||||
|
} else if pdec, ok := dec.(*ptrDecoder); ok {
|
||||||
|
contentDec := pdec.contentDecoder()
|
||||||
|
if pdec.typ == typ {
|
||||||
|
// recursive definition
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var fieldSetErr error
|
||||||
|
if isUnexportedField {
|
||||||
|
fieldSetErr = fmt.Errorf(
|
||||||
|
"json: cannot set embedded pointer to unexported struct: %v",
|
||||||
|
field.Type.Elem(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
if dec, ok := contentDec.(*structDecoder); ok {
|
||||||
|
for k, v := range dec.fieldMap {
|
||||||
|
if _, exists := conflictedMap[k]; exists {
|
||||||
|
// already conflicted key
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
set, exists := fieldMap[k]
|
||||||
|
if !exists {
|
||||||
|
fieldSet := &structFieldSet{
|
||||||
|
dec: newAnonymousFieldDecoder(pdec.typ, v.offset, v.dec),
|
||||||
|
offset: field.Offset,
|
||||||
|
isTaggedKey: v.isTaggedKey,
|
||||||
|
key: k,
|
||||||
|
keyLen: int64(len(k)),
|
||||||
|
err: fieldSetErr,
|
||||||
|
}
|
||||||
|
fieldMap[k] = fieldSet
|
||||||
|
lower := strings.ToLower(k)
|
||||||
|
if _, exists := fieldMap[lower]; !exists {
|
||||||
|
fieldMap[lower] = fieldSet
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if set.isTaggedKey {
|
||||||
|
if v.isTaggedKey {
|
||||||
|
// conflict tag key
|
||||||
|
delete(fieldMap, k)
|
||||||
|
delete(fieldMap, strings.ToLower(k))
|
||||||
|
conflictedMap[k] = struct{}{}
|
||||||
|
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if v.isTaggedKey {
|
||||||
|
fieldSet := &structFieldSet{
|
||||||
|
dec: newAnonymousFieldDecoder(pdec.typ, v.offset, v.dec),
|
||||||
|
offset: field.Offset,
|
||||||
|
isTaggedKey: v.isTaggedKey,
|
||||||
|
key: k,
|
||||||
|
keyLen: int64(len(k)),
|
||||||
|
err: fieldSetErr,
|
||||||
|
}
|
||||||
|
fieldMap[k] = fieldSet
|
||||||
|
lower := strings.ToLower(k)
|
||||||
|
if _, exists := fieldMap[lower]; !exists {
|
||||||
|
fieldMap[lower] = fieldSet
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// conflict tag key
|
||||||
|
delete(fieldMap, k)
|
||||||
|
delete(fieldMap, strings.ToLower(k))
|
||||||
|
conflictedMap[k] = struct{}{}
|
||||||
|
conflictedMap[strings.ToLower(k)] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if tag.IsString && isStringTagSupportedType(runtime.Type2RType(field.Type)) {
|
||||||
|
dec = newWrappedStringDecoder(runtime.Type2RType(field.Type), dec, structName, field.Name)
|
||||||
|
}
|
||||||
|
var key string
|
||||||
|
if tag.Key != "" {
|
||||||
|
key = tag.Key
|
||||||
|
} else {
|
||||||
|
key = field.Name
|
||||||
|
}
|
||||||
|
fieldSet := &structFieldSet{
|
||||||
|
dec: dec,
|
||||||
|
offset: field.Offset,
|
||||||
|
isTaggedKey: tag.IsTaggedKey,
|
||||||
|
key: key,
|
||||||
|
keyLen: int64(len(key)),
|
||||||
|
}
|
||||||
|
fieldMap[key] = fieldSet
|
||||||
|
lower := strings.ToLower(key)
|
||||||
|
if _, exists := fieldMap[lower]; !exists {
|
||||||
|
fieldMap[lower] = fieldSet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
delete(structTypeToDecoder, typeptr)
|
||||||
|
structDec.tryOptimize()
|
||||||
|
return structDec, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func implementsUnmarshalJSONType(typ *runtime.Type) bool {
|
||||||
|
return typ.Implements(unmarshalJSONType) || typ.Implements(unmarshalJSONContextType)
|
||||||
|
}
|
28
vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go
generated
vendored
Normal file
28
vendor/github.com/goccy/go-json/internal/decoder/compile_norace.go
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
// +build !race
|
||||||
|
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||||
|
typeptr := uintptr(unsafe.Pointer(typ))
|
||||||
|
if typeptr > typeAddr.MaxTypeAddr {
|
||||||
|
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||||
|
if dec := cachedDecoder[index]; dec != nil {
|
||||||
|
return dec, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
cachedDecoder[index] = dec
|
||||||
|
return dec, nil
|
||||||
|
}
|
36
vendor/github.com/goccy/go-json/internal/decoder/compile_race.go
generated
vendored
Normal file
36
vendor/github.com/goccy/go-json/internal/decoder/compile_race.go
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
// +build race
|
||||||
|
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
var decMu sync.RWMutex
|
||||||
|
|
||||||
|
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
|
||||||
|
typeptr := uintptr(unsafe.Pointer(typ))
|
||||||
|
if typeptr > typeAddr.MaxTypeAddr {
|
||||||
|
return compileToGetDecoderSlowPath(typeptr, typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||||
|
decMu.RLock()
|
||||||
|
if dec := cachedDecoder[index]; dec != nil {
|
||||||
|
decMu.RUnlock()
|
||||||
|
return dec, nil
|
||||||
|
}
|
||||||
|
decMu.RUnlock()
|
||||||
|
|
||||||
|
dec, err := compileHead(typ, map[uintptr]Decoder{})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
decMu.Lock()
|
||||||
|
cachedDecoder[index] = dec
|
||||||
|
decMu.Unlock()
|
||||||
|
return dec, nil
|
||||||
|
}
|
254
vendor/github.com/goccy/go-json/internal/decoder/context.go
generated
vendored
Normal file
254
vendor/github.com/goccy/go-json/internal/decoder/context.go
generated
vendored
Normal file
|
@ -0,0 +1,254 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RuntimeContext struct {
|
||||||
|
Buf []byte
|
||||||
|
Option *Option
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
runtimeContextPool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return &RuntimeContext{
|
||||||
|
Option: &Option{},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func TakeRuntimeContext() *RuntimeContext {
|
||||||
|
return runtimeContextPool.Get().(*RuntimeContext)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReleaseRuntimeContext(ctx *RuntimeContext) {
|
||||||
|
runtimeContextPool.Put(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
isWhiteSpace = [256]bool{}
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
isWhiteSpace[' '] = true
|
||||||
|
isWhiteSpace['\n'] = true
|
||||||
|
isWhiteSpace['\t'] = true
|
||||||
|
isWhiteSpace['\r'] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func char(ptr unsafe.Pointer, offset int64) byte {
|
||||||
|
return *(*byte)(unsafe.Pointer(uintptr(ptr) + uintptr(offset)))
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipWhiteSpace(buf []byte, cursor int64) int64 {
|
||||||
|
for isWhiteSpace[buf[cursor]] {
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
return cursor
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipObject(buf []byte, cursor, depth int64) (int64, error) {
|
||||||
|
braceCount := 1
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case '{':
|
||||||
|
braceCount++
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
case '}':
|
||||||
|
depth--
|
||||||
|
braceCount--
|
||||||
|
if braceCount == 0 {
|
||||||
|
return cursor + 1, nil
|
||||||
|
}
|
||||||
|
case '[':
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
case ']':
|
||||||
|
depth--
|
||||||
|
case '"':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch buf[cursor] {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if buf[cursor] == nul {
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
goto SWITCH_OUT
|
||||||
|
case nul:
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case nul:
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("object of object", cursor)
|
||||||
|
}
|
||||||
|
SWITCH_OUT:
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipArray(buf []byte, cursor, depth int64) (int64, error) {
|
||||||
|
bracketCount := 1
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case '[':
|
||||||
|
bracketCount++
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
case ']':
|
||||||
|
bracketCount--
|
||||||
|
depth--
|
||||||
|
if bracketCount == 0 {
|
||||||
|
return cursor + 1, nil
|
||||||
|
}
|
||||||
|
case '{':
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
case '}':
|
||||||
|
depth--
|
||||||
|
case '"':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch buf[cursor] {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if buf[cursor] == nul {
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
goto SWITCH_OUT
|
||||||
|
case nul:
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case nul:
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("array of object", cursor)
|
||||||
|
}
|
||||||
|
SWITCH_OUT:
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipValue(buf []byte, cursor, depth int64) (int64, error) {
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\t', '\n', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case '{':
|
||||||
|
return skipObject(buf, cursor+1, depth+1)
|
||||||
|
case '[':
|
||||||
|
return skipArray(buf, cursor+1, depth+1)
|
||||||
|
case '"':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch buf[cursor] {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if buf[cursor] == nul {
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
return cursor + 1, nil
|
||||||
|
case nul:
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
if floatTable[buf[cursor]] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return cursor, nil
|
||||||
|
case 't':
|
||||||
|
if err := validateTrue(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return cursor, nil
|
||||||
|
case 'f':
|
||||||
|
if err := validateFalse(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 5
|
||||||
|
return cursor, nil
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return cursor, nil
|
||||||
|
default:
|
||||||
|
return cursor, errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateTrue(buf []byte, cursor int64) error {
|
||||||
|
if cursor+3 >= int64(len(buf)) {
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("true", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+1] != 'r' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+1], "true", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+2] != 'u' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+2], "true", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+3] != 'e' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+3], "true", cursor)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateFalse(buf []byte, cursor int64) error {
|
||||||
|
if cursor+4 >= int64(len(buf)) {
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("false", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+1] != 'a' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+1], "false", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+2] != 'l' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+2], "false", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+3] != 's' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+3], "false", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+4] != 'e' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+4], "false", cursor)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateNull(buf []byte, cursor int64) error {
|
||||||
|
if cursor+3 >= int64(len(buf)) {
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+1] != 'u' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+1], "null", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+2] != 'l' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+2], "null", cursor)
|
||||||
|
}
|
||||||
|
if buf[cursor+3] != 'l' {
|
||||||
|
return errors.ErrInvalidCharacter(buf[cursor+3], "null", cursor)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
158
vendor/github.com/goccy/go-json/internal/decoder/float.go
generated
vendored
Normal file
158
vendor/github.com/goccy/go-json/internal/decoder/float.go
generated
vendored
Normal file
|
@ -0,0 +1,158 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strconv"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type floatDecoder struct {
|
||||||
|
op func(unsafe.Pointer, float64)
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFloatDecoder(structName, fieldName string, op func(unsafe.Pointer, float64)) *floatDecoder {
|
||||||
|
return &floatDecoder{op: op, structName: structName, fieldName: fieldName}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
floatTable = [256]bool{
|
||||||
|
'0': true,
|
||||||
|
'1': true,
|
||||||
|
'2': true,
|
||||||
|
'3': true,
|
||||||
|
'4': true,
|
||||||
|
'5': true,
|
||||||
|
'6': true,
|
||||||
|
'7': true,
|
||||||
|
'8': true,
|
||||||
|
'9': true,
|
||||||
|
'.': true,
|
||||||
|
'e': true,
|
||||||
|
'E': true,
|
||||||
|
'+': true,
|
||||||
|
'-': true,
|
||||||
|
}
|
||||||
|
|
||||||
|
validEndNumberChar = [256]bool{
|
||||||
|
nul: true,
|
||||||
|
' ': true,
|
||||||
|
'\t': true,
|
||||||
|
'\r': true,
|
||||||
|
'\n': true,
|
||||||
|
',': true,
|
||||||
|
':': true,
|
||||||
|
'}': true,
|
||||||
|
']': true,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func floatBytes(s *Stream) []byte {
|
||||||
|
start := s.cursor
|
||||||
|
for {
|
||||||
|
s.cursor++
|
||||||
|
if floatTable[s.char()] {
|
||||||
|
continue
|
||||||
|
} else if s.char() == nul {
|
||||||
|
if s.read() {
|
||||||
|
s.cursor-- // for retry current character
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return s.buf[start:s.cursor]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *floatDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return floatBytes(s), nil
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
default:
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
return nil, errors.ErrUnexpectedEndOfJSON("float", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *floatDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
start := cursor
|
||||||
|
cursor++
|
||||||
|
for floatTable[buf[cursor]] {
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
num := buf[start:cursor]
|
||||||
|
return num, cursor, nil
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return nil, cursor, nil
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *floatDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
bytes, err := d.decodeStreamByte(s)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
str := *(*string)(unsafe.Pointer(&bytes))
|
||||||
|
f64, err := strconv.ParseFloat(str, 64)
|
||||||
|
if err != nil {
|
||||||
|
return errors.ErrSyntax(err.Error(), s.totalOffset())
|
||||||
|
}
|
||||||
|
d.op(p, f64)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *floatDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
bytes, c, err := d.decodeByte(buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
if !validEndNumberChar[buf[cursor]] {
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("float", cursor)
|
||||||
|
}
|
||||||
|
s := *(*string)(unsafe.Pointer(&bytes))
|
||||||
|
f64, err := strconv.ParseFloat(s, 64)
|
||||||
|
if err != nil {
|
||||||
|
return 0, errors.ErrSyntax(err.Error(), cursor)
|
||||||
|
}
|
||||||
|
d.op(p, f64)
|
||||||
|
return cursor, nil
|
||||||
|
}
|
141
vendor/github.com/goccy/go-json/internal/decoder/func.go
generated
vendored
Normal file
141
vendor/github.com/goccy/go-json/internal/decoder/func.go
generated
vendored
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type funcDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFuncDecoder(typ *runtime.Type, structName, fieldName string) *funcDecoder {
|
||||||
|
fnDecoder := &funcDecoder{typ, structName, fieldName}
|
||||||
|
return fnDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *funcDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
s.skipWhiteSpace()
|
||||||
|
start := s.cursor
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
src := s.buf[start:s.cursor]
|
||||||
|
if len(src) > 0 {
|
||||||
|
switch src[0] {
|
||||||
|
case '"':
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "string",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
case '[':
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "array",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
case '{':
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "object",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "number",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
return nil
|
||||||
|
case 't':
|
||||||
|
if err := trueBytes(s); err == nil {
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "boolean",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case 'f':
|
||||||
|
if err := falseBytes(s); err == nil {
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "boolean",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return errors.ErrInvalidBeginningOfValue(s.buf[s.cursor], s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *funcDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
start := cursor
|
||||||
|
end, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
src := buf[start:end]
|
||||||
|
if len(src) > 0 {
|
||||||
|
switch src[0] {
|
||||||
|
case '"':
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "string",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
case '[':
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "array",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
case '{':
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "object",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "number",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
case 'n':
|
||||||
|
if bytes.Equal(src, nullbytes) {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
return end, nil
|
||||||
|
}
|
||||||
|
case 't':
|
||||||
|
if err := validateTrue(buf, start); err == nil {
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "boolean",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case 'f':
|
||||||
|
if err := validateFalse(buf, start); err == nil {
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "boolean",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||||
|
}
|
242
vendor/github.com/goccy/go-json/internal/decoder/int.go
generated
vendored
Normal file
242
vendor/github.com/goccy/go-json/internal/decoder/int.go
generated
vendored
Normal file
|
@ -0,0 +1,242 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type intDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
kind reflect.Kind
|
||||||
|
op func(unsafe.Pointer, int64)
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newIntDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, int64)) *intDecoder {
|
||||||
|
return &intDecoder{
|
||||||
|
typ: typ,
|
||||||
|
kind: typ.Kind(),
|
||||||
|
op: op,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *intDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError {
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: fmt.Sprintf("number %s", string(buf)),
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Struct: d.structName,
|
||||||
|
Field: d.fieldName,
|
||||||
|
Offset: offset,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
pow10i64 = [...]int64{
|
||||||
|
1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09,
|
||||||
|
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18,
|
||||||
|
}
|
||||||
|
pow10i64Len = len(pow10i64)
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *intDecoder) parseInt(b []byte) (int64, error) {
|
||||||
|
isNegative := false
|
||||||
|
if b[0] == '-' {
|
||||||
|
b = b[1:]
|
||||||
|
isNegative = true
|
||||||
|
}
|
||||||
|
maxDigit := len(b)
|
||||||
|
if maxDigit > pow10i64Len {
|
||||||
|
return 0, fmt.Errorf("invalid length of number")
|
||||||
|
}
|
||||||
|
sum := int64(0)
|
||||||
|
for i := 0; i < maxDigit; i++ {
|
||||||
|
c := int64(b[i]) - 48
|
||||||
|
digitValue := pow10i64[maxDigit-i-1]
|
||||||
|
sum += c * digitValue
|
||||||
|
}
|
||||||
|
if isNegative {
|
||||||
|
return -1 * sum, nil
|
||||||
|
}
|
||||||
|
return sum, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
numTable = [256]bool{
|
||||||
|
'0': true,
|
||||||
|
'1': true,
|
||||||
|
'2': true,
|
||||||
|
'3': true,
|
||||||
|
'4': true,
|
||||||
|
'5': true,
|
||||||
|
'6': true,
|
||||||
|
'7': true,
|
||||||
|
'8': true,
|
||||||
|
'9': true,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
numZeroBuf = []byte{'0'}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *intDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case '-':
|
||||||
|
start := s.cursor
|
||||||
|
for {
|
||||||
|
s.cursor++
|
||||||
|
if numTable[s.char()] {
|
||||||
|
continue
|
||||||
|
} else if s.char() == nul {
|
||||||
|
if s.read() {
|
||||||
|
s.cursor-- // for retry current character
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
num := s.buf[start:s.cursor]
|
||||||
|
if len(num) < 2 {
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
return num, nil
|
||||||
|
case '0':
|
||||||
|
s.cursor++
|
||||||
|
return numZeroBuf, nil
|
||||||
|
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
start := s.cursor
|
||||||
|
for {
|
||||||
|
s.cursor++
|
||||||
|
if numTable[s.char()] {
|
||||||
|
continue
|
||||||
|
} else if s.char() == nul {
|
||||||
|
if s.read() {
|
||||||
|
s.cursor-- // for retry current character
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
num := s.buf[start:s.cursor]
|
||||||
|
return num, nil
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
default:
|
||||||
|
return nil, d.typeError([]byte{s.char()}, s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
return nil, errors.ErrUnexpectedEndOfJSON("number(integer)", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *intDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||||
|
for {
|
||||||
|
switch char(b, cursor) {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case '0':
|
||||||
|
cursor++
|
||||||
|
return numZeroBuf, cursor, nil
|
||||||
|
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
start := cursor
|
||||||
|
cursor++
|
||||||
|
for numTable[char(b, cursor)] {
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
num := buf[start:cursor]
|
||||||
|
return num, cursor, nil
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return nil, cursor, nil
|
||||||
|
default:
|
||||||
|
return nil, 0, d.typeError([]byte{char(b, cursor)}, cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *intDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
bytes, err := d.decodeStreamByte(s)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
i64, err := d.parseInt(bytes)
|
||||||
|
if err != nil {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
switch d.kind {
|
||||||
|
case reflect.Int8:
|
||||||
|
if i64 <= -1*(1<<7) || (1<<7) <= i64 {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
case reflect.Int16:
|
||||||
|
if i64 <= -1*(1<<15) || (1<<15) <= i64 {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
case reflect.Int32:
|
||||||
|
if i64 <= -1*(1<<31) || (1<<31) <= i64 {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.op(p, i64)
|
||||||
|
s.reset()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *intDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
|
||||||
|
i64, err := d.parseInt(bytes)
|
||||||
|
if err != nil {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
switch d.kind {
|
||||||
|
case reflect.Int8:
|
||||||
|
if i64 <= -1*(1<<7) || (1<<7) <= i64 {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
case reflect.Int16:
|
||||||
|
if i64 <= -1*(1<<15) || (1<<15) <= i64 {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
case reflect.Int32:
|
||||||
|
if i64 <= -1*(1<<31) || (1<<31) <= i64 {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.op(p, i64)
|
||||||
|
return cursor, nil
|
||||||
|
}
|
458
vendor/github.com/goccy/go-json/internal/decoder/interface.go
generated
vendored
Normal file
458
vendor/github.com/goccy/go-json/internal/decoder/interface.go
generated
vendored
Normal file
|
@ -0,0 +1,458 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding"
|
||||||
|
"encoding/json"
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type interfaceDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
sliceDecoder *sliceDecoder
|
||||||
|
mapDecoder *mapDecoder
|
||||||
|
floatDecoder *floatDecoder
|
||||||
|
numberDecoder *numberDecoder
|
||||||
|
stringDecoder *stringDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
func newEmptyInterfaceDecoder(structName, fieldName string) *interfaceDecoder {
|
||||||
|
ifaceDecoder := &interfaceDecoder{
|
||||||
|
typ: emptyInterfaceType,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||||
|
*(*interface{})(p) = v
|
||||||
|
}),
|
||||||
|
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||||
|
*(*interface{})(p) = v
|
||||||
|
}),
|
||||||
|
stringDecoder: newStringDecoder(structName, fieldName),
|
||||||
|
}
|
||||||
|
ifaceDecoder.sliceDecoder = newSliceDecoder(
|
||||||
|
ifaceDecoder,
|
||||||
|
emptyInterfaceType,
|
||||||
|
emptyInterfaceType.Size(),
|
||||||
|
structName, fieldName,
|
||||||
|
)
|
||||||
|
ifaceDecoder.mapDecoder = newMapDecoder(
|
||||||
|
interfaceMapType,
|
||||||
|
stringType,
|
||||||
|
ifaceDecoder.stringDecoder,
|
||||||
|
interfaceMapType.Elem(),
|
||||||
|
ifaceDecoder,
|
||||||
|
structName,
|
||||||
|
fieldName,
|
||||||
|
)
|
||||||
|
return ifaceDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
func newInterfaceDecoder(typ *runtime.Type, structName, fieldName string) *interfaceDecoder {
|
||||||
|
emptyIfaceDecoder := newEmptyInterfaceDecoder(structName, fieldName)
|
||||||
|
stringDecoder := newStringDecoder(structName, fieldName)
|
||||||
|
return &interfaceDecoder{
|
||||||
|
typ: typ,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
sliceDecoder: newSliceDecoder(
|
||||||
|
emptyIfaceDecoder,
|
||||||
|
emptyInterfaceType,
|
||||||
|
emptyInterfaceType.Size(),
|
||||||
|
structName, fieldName,
|
||||||
|
),
|
||||||
|
mapDecoder: newMapDecoder(
|
||||||
|
interfaceMapType,
|
||||||
|
stringType,
|
||||||
|
stringDecoder,
|
||||||
|
interfaceMapType.Elem(),
|
||||||
|
emptyIfaceDecoder,
|
||||||
|
structName,
|
||||||
|
fieldName,
|
||||||
|
),
|
||||||
|
floatDecoder: newFloatDecoder(structName, fieldName, func(p unsafe.Pointer, v float64) {
|
||||||
|
*(*interface{})(p) = v
|
||||||
|
}),
|
||||||
|
numberDecoder: newNumberDecoder(structName, fieldName, func(p unsafe.Pointer, v json.Number) {
|
||||||
|
*(*interface{})(p) = v
|
||||||
|
}),
|
||||||
|
stringDecoder: stringDecoder,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *interfaceDecoder) numDecoder(s *Stream) Decoder {
|
||||||
|
if s.UseNumber {
|
||||||
|
return d.numberDecoder
|
||||||
|
}
|
||||||
|
return d.floatDecoder
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
emptyInterfaceType = runtime.Type2RType(reflect.TypeOf((*interface{})(nil)).Elem())
|
||||||
|
interfaceMapType = runtime.Type2RType(
|
||||||
|
reflect.TypeOf((*map[string]interface{})(nil)).Elem(),
|
||||||
|
)
|
||||||
|
stringType = runtime.Type2RType(
|
||||||
|
reflect.TypeOf(""),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
func decodeStreamUnmarshaler(s *Stream, depth int64, unmarshaler json.Unmarshaler) error {
|
||||||
|
start := s.cursor
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
src := s.buf[start:s.cursor]
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeStreamUnmarshalerContext(s *Stream, depth int64, unmarshaler unmarshalerContext) error {
|
||||||
|
start := s.cursor
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
src := s.buf[start:s.cursor]
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
if err := unmarshaler.UnmarshalJSON(s.Option.Context, dst); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeUnmarshaler(buf []byte, cursor, depth int64, unmarshaler json.Unmarshaler) (int64, error) {
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
start := cursor
|
||||||
|
end, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
src := buf[start:end]
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
if err := unmarshaler.UnmarshalJSON(dst); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return end, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeUnmarshalerContext(ctx *RuntimeContext, buf []byte, cursor, depth int64, unmarshaler unmarshalerContext) (int64, error) {
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
start := cursor
|
||||||
|
end, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
src := buf[start:end]
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
if err := unmarshaler.UnmarshalJSON(ctx.Option.Context, dst); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return end, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeStreamTextUnmarshaler(s *Stream, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) error {
|
||||||
|
start := s.cursor
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
src := s.buf[start:s.cursor]
|
||||||
|
if bytes.Equal(src, nullbytes) {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
if err := unmarshaler.UnmarshalText(dst); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeTextUnmarshaler(buf []byte, cursor, depth int64, unmarshaler encoding.TextUnmarshaler, p unsafe.Pointer) (int64, error) {
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
start := cursor
|
||||||
|
end, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
src := buf[start:end]
|
||||||
|
if bytes.Equal(src, nullbytes) {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
return end, nil
|
||||||
|
}
|
||||||
|
if s, ok := unquoteBytes(src); ok {
|
||||||
|
src = s
|
||||||
|
}
|
||||||
|
if err := unmarshaler.UnmarshalText(src); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return end, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *interfaceDecoder) decodeStreamEmptyInterface(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
c := s.skipWhiteSpace()
|
||||||
|
for {
|
||||||
|
switch c {
|
||||||
|
case '{':
|
||||||
|
var v map[string]interface{}
|
||||||
|
ptr := unsafe.Pointer(&v)
|
||||||
|
if err := d.mapDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*interface{})(p) = v
|
||||||
|
return nil
|
||||||
|
case '[':
|
||||||
|
var v []interface{}
|
||||||
|
ptr := unsafe.Pointer(&v)
|
||||||
|
if err := d.sliceDecoder.DecodeStream(s, depth, ptr); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*interface{})(p) = v
|
||||||
|
return nil
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return d.numDecoder(s).DecodeStream(s, depth, p)
|
||||||
|
case '"':
|
||||||
|
s.cursor++
|
||||||
|
start := s.cursor
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case '\\':
|
||||||
|
if _, err := decodeEscapeString(s, nil); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
literal := s.buf[start:s.cursor]
|
||||||
|
s.cursor++
|
||||||
|
*(*interface{})(p) = string(literal)
|
||||||
|
return nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
}
|
||||||
|
case 't':
|
||||||
|
if err := trueBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||||
|
return nil
|
||||||
|
case 'f':
|
||||||
|
if err := falseBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||||
|
return nil
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*interface{})(p) = nil
|
||||||
|
return nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
c = s.char()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return errors.ErrInvalidBeginningOfValue(c, s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *interfaceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: d.typ,
|
||||||
|
ptr: p,
|
||||||
|
}))
|
||||||
|
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||||
|
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||||
|
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||||
|
return decodeStreamUnmarshalerContext(s, depth, u)
|
||||||
|
}
|
||||||
|
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||||
|
return decodeStreamUnmarshaler(s, depth, u)
|
||||||
|
}
|
||||||
|
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||||
|
return decodeStreamTextUnmarshaler(s, depth, u, p)
|
||||||
|
}
|
||||||
|
if s.skipWhiteSpace() == 'n' {
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*interface{})(p) = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return d.errUnmarshalType(rv.Type(), s.totalOffset())
|
||||||
|
}
|
||||||
|
iface := rv.Interface()
|
||||||
|
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||||
|
typ := ifaceHeader.typ
|
||||||
|
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||||
|
// concrete type is empty interface
|
||||||
|
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||||
|
}
|
||||||
|
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||||
|
return d.decodeStreamEmptyInterface(s, depth, p)
|
||||||
|
}
|
||||||
|
if s.skipWhiteSpace() == 'n' {
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*interface{})(p) = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
decoder, err := CompileToGetDecoder(typ)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return decoder.DecodeStream(s, depth, ifaceHeader.ptr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *interfaceDecoder) errUnmarshalType(typ reflect.Type, offset int64) *errors.UnmarshalTypeError {
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: typ.String(),
|
||||||
|
Type: typ,
|
||||||
|
Offset: offset,
|
||||||
|
Struct: d.structName,
|
||||||
|
Field: d.fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *interfaceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
runtimeInterfaceValue := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: d.typ,
|
||||||
|
ptr: p,
|
||||||
|
}))
|
||||||
|
rv := reflect.ValueOf(runtimeInterfaceValue)
|
||||||
|
if rv.NumMethod() > 0 && rv.CanInterface() {
|
||||||
|
if u, ok := rv.Interface().(unmarshalerContext); ok {
|
||||||
|
return decodeUnmarshalerContext(ctx, buf, cursor, depth, u)
|
||||||
|
}
|
||||||
|
if u, ok := rv.Interface().(json.Unmarshaler); ok {
|
||||||
|
return decodeUnmarshaler(buf, cursor, depth, u)
|
||||||
|
}
|
||||||
|
if u, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
|
||||||
|
return decodeTextUnmarshaler(buf, cursor, depth, u, p)
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if buf[cursor] == 'n' {
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
return 0, d.errUnmarshalType(rv.Type(), cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
iface := rv.Interface()
|
||||||
|
ifaceHeader := (*emptyInterface)(unsafe.Pointer(&iface))
|
||||||
|
typ := ifaceHeader.typ
|
||||||
|
if ifaceHeader.ptr == nil || d.typ == typ || typ == nil {
|
||||||
|
// concrete type is empty interface
|
||||||
|
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||||
|
}
|
||||||
|
if typ.Kind() == reflect.Ptr && typ.Elem() == d.typ || typ.Kind() != reflect.Ptr {
|
||||||
|
return d.decodeEmptyInterface(ctx, cursor, depth, p)
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if buf[cursor] == 'n' {
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
decoder, err := CompileToGetDecoder(typ)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return decoder.Decode(ctx, cursor, depth, ifaceHeader.ptr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *interfaceDecoder) decodeEmptyInterface(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
switch buf[cursor] {
|
||||||
|
case '{':
|
||||||
|
var v map[string]interface{}
|
||||||
|
ptr := unsafe.Pointer(&v)
|
||||||
|
cursor, err := d.mapDecoder.Decode(ctx, cursor, depth, ptr)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||||
|
return cursor, nil
|
||||||
|
case '[':
|
||||||
|
var v []interface{}
|
||||||
|
ptr := unsafe.Pointer(&v)
|
||||||
|
cursor, err := d.sliceDecoder.Decode(ctx, cursor, depth, ptr)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||||
|
return cursor, nil
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return d.floatDecoder.Decode(ctx, cursor, depth, p)
|
||||||
|
case '"':
|
||||||
|
var v string
|
||||||
|
ptr := unsafe.Pointer(&v)
|
||||||
|
cursor, err := d.stringDecoder.Decode(ctx, cursor, depth, ptr)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = v
|
||||||
|
return cursor, nil
|
||||||
|
case 't':
|
||||||
|
if err := validateTrue(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = true
|
||||||
|
return cursor, nil
|
||||||
|
case 'f':
|
||||||
|
if err := validateFalse(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 5
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = false
|
||||||
|
return cursor, nil
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
**(**interface{})(unsafe.Pointer(&p)) = nil
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
return cursor, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||||
|
}
|
173
vendor/github.com/goccy/go-json/internal/decoder/map.go
generated
vendored
Normal file
173
vendor/github.com/goccy/go-json/internal/decoder/map.go
generated
vendored
Normal file
|
@ -0,0 +1,173 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mapDecoder struct {
|
||||||
|
mapType *runtime.Type
|
||||||
|
keyType *runtime.Type
|
||||||
|
valueType *runtime.Type
|
||||||
|
stringKeyType bool
|
||||||
|
keyDecoder Decoder
|
||||||
|
valueDecoder Decoder
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newMapDecoder(mapType *runtime.Type, keyType *runtime.Type, keyDec Decoder, valueType *runtime.Type, valueDec Decoder, structName, fieldName string) *mapDecoder {
|
||||||
|
return &mapDecoder{
|
||||||
|
mapType: mapType,
|
||||||
|
keyDecoder: keyDec,
|
||||||
|
keyType: keyType,
|
||||||
|
stringKeyType: keyType.Kind() == reflect.String,
|
||||||
|
valueType: valueType,
|
||||||
|
valueDecoder: valueDec,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname makemap reflect.makemap
|
||||||
|
func makemap(*runtime.Type, int) unsafe.Pointer
|
||||||
|
|
||||||
|
//nolint:golint
|
||||||
|
//go:linkname mapassign_faststr runtime.mapassign_faststr
|
||||||
|
//go:noescape
|
||||||
|
func mapassign_faststr(t *runtime.Type, m unsafe.Pointer, s string) unsafe.Pointer
|
||||||
|
|
||||||
|
//go:linkname mapassign reflect.mapassign
|
||||||
|
//go:noescape
|
||||||
|
func mapassign(t *runtime.Type, m unsafe.Pointer, k, v unsafe.Pointer)
|
||||||
|
|
||||||
|
func (d *mapDecoder) mapassign(t *runtime.Type, m, k, v unsafe.Pointer) {
|
||||||
|
if d.stringKeyType {
|
||||||
|
mapV := mapassign_faststr(d.mapType, m, *(*string)(k))
|
||||||
|
typedmemmove(d.valueType, mapV, v)
|
||||||
|
} else {
|
||||||
|
mapassign(t, m, k, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *mapDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch s.skipWhiteSpace() {
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil
|
||||||
|
return nil
|
||||||
|
case '{':
|
||||||
|
default:
|
||||||
|
return errors.ErrExpected("{ character for map value", s.totalOffset())
|
||||||
|
}
|
||||||
|
mapValue := *(*unsafe.Pointer)(p)
|
||||||
|
if mapValue == nil {
|
||||||
|
mapValue = makemap(d.mapType, 0)
|
||||||
|
}
|
||||||
|
if s.buf[s.cursor+1] == '}' {
|
||||||
|
*(*unsafe.Pointer)(p) = mapValue
|
||||||
|
s.cursor += 2
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
s.cursor++
|
||||||
|
k := unsafe_New(d.keyType)
|
||||||
|
if err := d.keyDecoder.DecodeStream(s, depth, k); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
s.skipWhiteSpace()
|
||||||
|
if !s.equalChar(':') {
|
||||||
|
return errors.ErrExpected("colon after object key", s.totalOffset())
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
v := unsafe_New(d.valueType)
|
||||||
|
if err := d.valueDecoder.DecodeStream(s, depth, v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.mapassign(d.mapType, mapValue, k, v)
|
||||||
|
s.skipWhiteSpace()
|
||||||
|
if s.equalChar('}') {
|
||||||
|
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if !s.equalChar(',') {
|
||||||
|
return errors.ErrExpected("comma after object value", s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *mapDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
buflen := int64(len(buf))
|
||||||
|
if buflen < 2 {
|
||||||
|
return 0, errors.ErrExpected("{} for map", cursor)
|
||||||
|
}
|
||||||
|
switch buf[cursor] {
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = nil
|
||||||
|
return cursor, nil
|
||||||
|
case '{':
|
||||||
|
default:
|
||||||
|
return 0, errors.ErrExpected("{ character for map value", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
mapValue := *(*unsafe.Pointer)(p)
|
||||||
|
if mapValue == nil {
|
||||||
|
mapValue = makemap(d.mapType, 0)
|
||||||
|
}
|
||||||
|
if buf[cursor] == '}' {
|
||||||
|
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
k := unsafe_New(d.keyType)
|
||||||
|
keyCursor, err := d.keyDecoder.Decode(ctx, cursor, depth, k)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(buf, keyCursor)
|
||||||
|
if buf[cursor] != ':' {
|
||||||
|
return 0, errors.ErrExpected("colon after object key", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
v := unsafe_New(d.valueType)
|
||||||
|
valueCursor, err := d.valueDecoder.Decode(ctx, cursor, depth, v)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
d.mapassign(d.mapType, mapValue, k, v)
|
||||||
|
cursor = skipWhiteSpace(buf, valueCursor)
|
||||||
|
if buf[cursor] == '}' {
|
||||||
|
**(**unsafe.Pointer)(unsafe.Pointer(&p)) = mapValue
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
if buf[cursor] != ',' {
|
||||||
|
return 0, errors.ErrExpected("comma after object value", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
112
vendor/github.com/goccy/go-json/internal/decoder/number.go
generated
vendored
Normal file
112
vendor/github.com/goccy/go-json/internal/decoder/number.go
generated
vendored
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"strconv"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type numberDecoder struct {
|
||||||
|
stringDecoder *stringDecoder
|
||||||
|
op func(unsafe.Pointer, json.Number)
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newNumberDecoder(structName, fieldName string, op func(unsafe.Pointer, json.Number)) *numberDecoder {
|
||||||
|
return &numberDecoder{
|
||||||
|
stringDecoder: newStringDecoder(structName, fieldName),
|
||||||
|
op: op,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *numberDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
bytes, err := d.decodeStreamByte(s)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil {
|
||||||
|
return errors.ErrSyntax(err.Error(), s.totalOffset())
|
||||||
|
}
|
||||||
|
d.op(p, json.Number(string(bytes)))
|
||||||
|
s.reset()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *numberDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&bytes)), 64); err != nil {
|
||||||
|
return 0, errors.ErrSyntax(err.Error(), c)
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
s := *(*string)(unsafe.Pointer(&bytes))
|
||||||
|
d.op(p, json.Number(s))
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *numberDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||||
|
start := s.cursor
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return floatBytes(s), nil
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
case '"':
|
||||||
|
return d.stringDecoder.decodeStreamByte(s)
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
default:
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
if s.cursor == start {
|
||||||
|
return nil, errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset())
|
||||||
|
}
|
||||||
|
return nil, errors.ErrUnexpectedEndOfJSON("json.Number", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *numberDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
start := cursor
|
||||||
|
cursor++
|
||||||
|
for floatTable[buf[cursor]] {
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
num := buf[start:cursor]
|
||||||
|
return num, cursor, nil
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return nil, cursor, nil
|
||||||
|
case '"':
|
||||||
|
return d.stringDecoder.decodeByte(buf, cursor)
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("json.Number", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
15
vendor/github.com/goccy/go-json/internal/decoder/option.go
generated
vendored
Normal file
15
vendor/github.com/goccy/go-json/internal/decoder/option.go
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import "context"
|
||||||
|
|
||||||
|
type OptionFlags uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
FirstWinOption OptionFlags = 1 << iota
|
||||||
|
ContextOption
|
||||||
|
)
|
||||||
|
|
||||||
|
type Option struct {
|
||||||
|
Flags OptionFlags
|
||||||
|
Context context.Context
|
||||||
|
}
|
87
vendor/github.com/goccy/go-json/internal/decoder/ptr.go
generated
vendored
Normal file
87
vendor/github.com/goccy/go-json/internal/decoder/ptr.go
generated
vendored
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ptrDecoder struct {
|
||||||
|
dec Decoder
|
||||||
|
typ *runtime.Type
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newPtrDecoder(dec Decoder, typ *runtime.Type, structName, fieldName string) *ptrDecoder {
|
||||||
|
return &ptrDecoder{
|
||||||
|
dec: dec,
|
||||||
|
typ: typ,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ptrDecoder) contentDecoder() Decoder {
|
||||||
|
dec, ok := d.dec.(*ptrDecoder)
|
||||||
|
if !ok {
|
||||||
|
return d.dec
|
||||||
|
}
|
||||||
|
return dec.contentDecoder()
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:golint
|
||||||
|
//go:linkname unsafe_New reflect.unsafe_New
|
||||||
|
func unsafe_New(*runtime.Type) unsafe.Pointer
|
||||||
|
|
||||||
|
func (d *ptrDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
if s.skipWhiteSpace() == nul {
|
||||||
|
s.read()
|
||||||
|
}
|
||||||
|
if s.char() == 'n' {
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var newptr unsafe.Pointer
|
||||||
|
if *(*unsafe.Pointer)(p) == nil {
|
||||||
|
newptr = unsafe_New(d.typ)
|
||||||
|
*(*unsafe.Pointer)(p) = newptr
|
||||||
|
} else {
|
||||||
|
newptr = *(*unsafe.Pointer)(p)
|
||||||
|
}
|
||||||
|
if err := d.dec.DecodeStream(s, depth, newptr); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *ptrDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if buf[cursor] == 'n' {
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if p != nil {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
var newptr unsafe.Pointer
|
||||||
|
if *(*unsafe.Pointer)(p) == nil {
|
||||||
|
newptr = unsafe_New(d.typ)
|
||||||
|
*(*unsafe.Pointer)(p) = newptr
|
||||||
|
} else {
|
||||||
|
newptr = *(*unsafe.Pointer)(p)
|
||||||
|
}
|
||||||
|
c, err := d.dec.Decode(ctx, cursor, depth, newptr)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
return cursor, nil
|
||||||
|
}
|
301
vendor/github.com/goccy/go-json/internal/decoder/slice.go
generated
vendored
Normal file
301
vendor/github.com/goccy/go-json/internal/decoder/slice.go
generated
vendored
Normal file
|
@ -0,0 +1,301 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
sliceType = runtime.Type2RType(
|
||||||
|
reflect.TypeOf((*sliceHeader)(nil)).Elem(),
|
||||||
|
)
|
||||||
|
nilSlice = unsafe.Pointer(&sliceHeader{})
|
||||||
|
)
|
||||||
|
|
||||||
|
type sliceDecoder struct {
|
||||||
|
elemType *runtime.Type
|
||||||
|
isElemPointerType bool
|
||||||
|
valueDecoder Decoder
|
||||||
|
size uintptr
|
||||||
|
arrayPool sync.Pool
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
// If use reflect.SliceHeader, data type is uintptr.
|
||||||
|
// In this case, Go compiler cannot trace reference created by newArray().
|
||||||
|
// So, define using unsafe.Pointer as data type
|
||||||
|
type sliceHeader struct {
|
||||||
|
data unsafe.Pointer
|
||||||
|
len int
|
||||||
|
cap int
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
defaultSliceCapacity = 2
|
||||||
|
)
|
||||||
|
|
||||||
|
func newSliceDecoder(dec Decoder, elemType *runtime.Type, size uintptr, structName, fieldName string) *sliceDecoder {
|
||||||
|
return &sliceDecoder{
|
||||||
|
valueDecoder: dec,
|
||||||
|
elemType: elemType,
|
||||||
|
isElemPointerType: elemType.Kind() == reflect.Ptr || elemType.Kind() == reflect.Map,
|
||||||
|
size: size,
|
||||||
|
arrayPool: sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return &sliceHeader{
|
||||||
|
data: newArray(elemType, defaultSliceCapacity),
|
||||||
|
len: 0,
|
||||||
|
cap: defaultSliceCapacity,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *sliceDecoder) newSlice(src *sliceHeader) *sliceHeader {
|
||||||
|
slice := d.arrayPool.Get().(*sliceHeader)
|
||||||
|
if src.len > 0 {
|
||||||
|
// copy original elem
|
||||||
|
if slice.cap < src.cap {
|
||||||
|
data := newArray(d.elemType, src.cap)
|
||||||
|
slice = &sliceHeader{data: data, len: src.len, cap: src.cap}
|
||||||
|
} else {
|
||||||
|
slice.len = src.len
|
||||||
|
}
|
||||||
|
copySlice(d.elemType, *slice, *src)
|
||||||
|
} else {
|
||||||
|
slice.len = 0
|
||||||
|
}
|
||||||
|
return slice
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *sliceDecoder) releaseSlice(p *sliceHeader) {
|
||||||
|
d.arrayPool.Put(p)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname copySlice reflect.typedslicecopy
|
||||||
|
func copySlice(elemType *runtime.Type, dst, src sliceHeader) int
|
||||||
|
|
||||||
|
//go:linkname newArray reflect.unsafe_NewArray
|
||||||
|
func newArray(*runtime.Type, int) unsafe.Pointer
|
||||||
|
|
||||||
|
//go:linkname typedmemmove reflect.typedmemmove
|
||||||
|
func typedmemmove(t *runtime.Type, dst, src unsafe.Pointer)
|
||||||
|
|
||||||
|
func (d *sliceDecoder) errNumber(offset int64) *errors.UnmarshalTypeError {
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "number",
|
||||||
|
Type: reflect.SliceOf(runtime.RType2Type(d.elemType)),
|
||||||
|
Struct: d.structName,
|
||||||
|
Field: d.fieldName,
|
||||||
|
Offset: offset,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *sliceDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
typedmemmove(sliceType, p, nilSlice)
|
||||||
|
return nil
|
||||||
|
case '[':
|
||||||
|
s.cursor++
|
||||||
|
if s.skipWhiteSpace() == ']' {
|
||||||
|
dst := (*sliceHeader)(p)
|
||||||
|
if dst.data == nil {
|
||||||
|
dst.data = newArray(d.elemType, 0)
|
||||||
|
} else {
|
||||||
|
dst.len = 0
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
idx := 0
|
||||||
|
slice := d.newSlice((*sliceHeader)(p))
|
||||||
|
srcLen := slice.len
|
||||||
|
capacity := slice.cap
|
||||||
|
data := slice.data
|
||||||
|
for {
|
||||||
|
if capacity <= idx {
|
||||||
|
src := sliceHeader{data: data, len: idx, cap: capacity}
|
||||||
|
capacity *= 2
|
||||||
|
data = newArray(d.elemType, capacity)
|
||||||
|
dst := sliceHeader{data: data, len: idx, cap: capacity}
|
||||||
|
copySlice(d.elemType, dst, src)
|
||||||
|
}
|
||||||
|
ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size)
|
||||||
|
|
||||||
|
// if srcLen is greater than idx, keep the original reference
|
||||||
|
if srcLen <= idx {
|
||||||
|
if d.isElemPointerType {
|
||||||
|
**(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer
|
||||||
|
} else {
|
||||||
|
// assign new element to the slice
|
||||||
|
typedmemmove(d.elemType, ep, unsafe_New(d.elemType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := d.valueDecoder.DecodeStream(s, depth, ep); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
s.skipWhiteSpace()
|
||||||
|
RETRY:
|
||||||
|
switch s.char() {
|
||||||
|
case ']':
|
||||||
|
slice.cap = capacity
|
||||||
|
slice.len = idx + 1
|
||||||
|
slice.data = data
|
||||||
|
dst := (*sliceHeader)(p)
|
||||||
|
dst.len = idx + 1
|
||||||
|
if dst.len > dst.cap {
|
||||||
|
dst.data = newArray(d.elemType, dst.len)
|
||||||
|
dst.cap = dst.len
|
||||||
|
}
|
||||||
|
copySlice(d.elemType, *dst, *slice)
|
||||||
|
d.releaseSlice(slice)
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
case ',':
|
||||||
|
idx++
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
goto RETRY
|
||||||
|
}
|
||||||
|
slice.cap = capacity
|
||||||
|
slice.data = data
|
||||||
|
d.releaseSlice(slice)
|
||||||
|
goto ERROR
|
||||||
|
default:
|
||||||
|
slice.cap = capacity
|
||||||
|
slice.data = data
|
||||||
|
d.releaseSlice(slice)
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return d.errNumber(s.totalOffset())
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
default:
|
||||||
|
goto ERROR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("slice", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *sliceDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
typedmemmove(sliceType, p, nilSlice)
|
||||||
|
return cursor, nil
|
||||||
|
case '[':
|
||||||
|
cursor++
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if buf[cursor] == ']' {
|
||||||
|
dst := (*sliceHeader)(p)
|
||||||
|
if dst.data == nil {
|
||||||
|
dst.data = newArray(d.elemType, 0)
|
||||||
|
} else {
|
||||||
|
dst.len = 0
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
idx := 0
|
||||||
|
slice := d.newSlice((*sliceHeader)(p))
|
||||||
|
srcLen := slice.len
|
||||||
|
capacity := slice.cap
|
||||||
|
data := slice.data
|
||||||
|
for {
|
||||||
|
if capacity <= idx {
|
||||||
|
src := sliceHeader{data: data, len: idx, cap: capacity}
|
||||||
|
capacity *= 2
|
||||||
|
data = newArray(d.elemType, capacity)
|
||||||
|
dst := sliceHeader{data: data, len: idx, cap: capacity}
|
||||||
|
copySlice(d.elemType, dst, src)
|
||||||
|
}
|
||||||
|
ep := unsafe.Pointer(uintptr(data) + uintptr(idx)*d.size)
|
||||||
|
// if srcLen is greater than idx, keep the original reference
|
||||||
|
if srcLen <= idx {
|
||||||
|
if d.isElemPointerType {
|
||||||
|
**(**unsafe.Pointer)(unsafe.Pointer(&ep)) = nil // initialize elem pointer
|
||||||
|
} else {
|
||||||
|
// assign new element to the slice
|
||||||
|
typedmemmove(d.elemType, ep, unsafe_New(d.elemType))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
c, err := d.valueDecoder.Decode(ctx, cursor, depth, ep)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ']':
|
||||||
|
slice.cap = capacity
|
||||||
|
slice.len = idx + 1
|
||||||
|
slice.data = data
|
||||||
|
dst := (*sliceHeader)(p)
|
||||||
|
dst.len = idx + 1
|
||||||
|
if dst.len > dst.cap {
|
||||||
|
dst.data = newArray(d.elemType, dst.len)
|
||||||
|
dst.cap = dst.len
|
||||||
|
}
|
||||||
|
copySlice(d.elemType, *dst, *slice)
|
||||||
|
d.releaseSlice(slice)
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
case ',':
|
||||||
|
idx++
|
||||||
|
default:
|
||||||
|
slice.cap = capacity
|
||||||
|
slice.data = data
|
||||||
|
d.releaseSlice(slice)
|
||||||
|
return 0, errors.ErrInvalidCharacter(buf[cursor], "slice", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return 0, d.errNumber(cursor)
|
||||||
|
default:
|
||||||
|
return 0, errors.ErrUnexpectedEndOfJSON("slice", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
554
vendor/github.com/goccy/go-json/internal/decoder/stream.go
generated
vendored
Normal file
554
vendor/github.com/goccy/go-json/internal/decoder/stream.go
generated
vendored
Normal file
|
@ -0,0 +1,554 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
initBufSize = 512
|
||||||
|
)
|
||||||
|
|
||||||
|
type Stream struct {
|
||||||
|
buf []byte
|
||||||
|
bufSize int64
|
||||||
|
length int64
|
||||||
|
r io.Reader
|
||||||
|
offset int64
|
||||||
|
cursor int64
|
||||||
|
filledBuffer bool
|
||||||
|
allRead bool
|
||||||
|
UseNumber bool
|
||||||
|
DisallowUnknownFields bool
|
||||||
|
Option *Option
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewStream(r io.Reader) *Stream {
|
||||||
|
return &Stream{
|
||||||
|
r: r,
|
||||||
|
bufSize: initBufSize,
|
||||||
|
buf: make([]byte, initBufSize),
|
||||||
|
Option: &Option{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) TotalOffset() int64 {
|
||||||
|
return s.totalOffset()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) Buffered() io.Reader {
|
||||||
|
buflen := int64(len(s.buf))
|
||||||
|
for i := s.cursor; i < buflen; i++ {
|
||||||
|
if s.buf[i] == nul {
|
||||||
|
return bytes.NewReader(s.buf[s.cursor:i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return bytes.NewReader(s.buf[s.cursor:])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) PrepareForDecode() error {
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\t', '\r', '\n':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case ',', ':':
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return io.EOF
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) totalOffset() int64 {
|
||||||
|
return s.offset + s.cursor
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) char() byte {
|
||||||
|
return s.buf[s.cursor]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) equalChar(c byte) bool {
|
||||||
|
cur := s.buf[s.cursor]
|
||||||
|
if cur == nul {
|
||||||
|
s.read()
|
||||||
|
cur = s.buf[s.cursor]
|
||||||
|
}
|
||||||
|
return cur == c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) stat() ([]byte, int64, unsafe.Pointer) {
|
||||||
|
return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) bufptr() unsafe.Pointer {
|
||||||
|
return (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) statForRetry() ([]byte, int64, unsafe.Pointer) {
|
||||||
|
s.cursor-- // for retry ( because caller progress cursor position in each loop )
|
||||||
|
return s.buf, s.cursor, (*sliceHeader)(unsafe.Pointer(&s.buf)).data
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) Reset() {
|
||||||
|
s.reset()
|
||||||
|
s.bufSize = initBufSize
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) More() bool {
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\r', '\t':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case '}', ']':
|
||||||
|
return false
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) Token() (interface{}, error) {
|
||||||
|
for {
|
||||||
|
c := s.char()
|
||||||
|
switch c {
|
||||||
|
case ' ', '\n', '\r', '\t':
|
||||||
|
s.cursor++
|
||||||
|
case '{', '[', ']', '}':
|
||||||
|
s.cursor++
|
||||||
|
return json.Delim(c), nil
|
||||||
|
case ',', ':':
|
||||||
|
s.cursor++
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
bytes := floatBytes(s)
|
||||||
|
s := *(*string)(unsafe.Pointer(&bytes))
|
||||||
|
f64, err := strconv.ParseFloat(s, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return f64, nil
|
||||||
|
case '"':
|
||||||
|
bytes, err := stringBytes(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return string(bytes), nil
|
||||||
|
case 't':
|
||||||
|
if err := trueBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
case 'f':
|
||||||
|
if err := falseBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto END
|
||||||
|
default:
|
||||||
|
return nil, errors.ErrInvalidCharacter(s.char(), "token", s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
END:
|
||||||
|
return nil, io.EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) reset() {
|
||||||
|
s.offset += s.cursor
|
||||||
|
s.buf = s.buf[s.cursor:]
|
||||||
|
s.length -= s.cursor
|
||||||
|
s.cursor = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) readBuf() []byte {
|
||||||
|
if s.filledBuffer {
|
||||||
|
s.bufSize *= 2
|
||||||
|
remainBuf := s.buf
|
||||||
|
s.buf = make([]byte, s.bufSize)
|
||||||
|
copy(s.buf, remainBuf)
|
||||||
|
}
|
||||||
|
remainLen := s.length - s.cursor
|
||||||
|
remainNotNulCharNum := int64(0)
|
||||||
|
for i := int64(0); i < remainLen; i++ {
|
||||||
|
if s.buf[s.cursor+i] == nul {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
remainNotNulCharNum++
|
||||||
|
}
|
||||||
|
s.length = s.cursor + remainNotNulCharNum
|
||||||
|
return s.buf[s.cursor+remainNotNulCharNum:]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) read() bool {
|
||||||
|
if s.allRead {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
buf := s.readBuf()
|
||||||
|
last := len(buf) - 1
|
||||||
|
buf[last] = nul
|
||||||
|
n, err := s.r.Read(buf[:last])
|
||||||
|
s.length += int64(n)
|
||||||
|
if n == last {
|
||||||
|
s.filledBuffer = true
|
||||||
|
} else {
|
||||||
|
s.filledBuffer = false
|
||||||
|
}
|
||||||
|
if err == io.EOF {
|
||||||
|
s.allRead = true
|
||||||
|
} else if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) skipWhiteSpace() byte {
|
||||||
|
p := s.bufptr()
|
||||||
|
LOOP:
|
||||||
|
c := char(p, s.cursor)
|
||||||
|
switch c {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
s.cursor++
|
||||||
|
goto LOOP
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
p = s.bufptr()
|
||||||
|
goto LOOP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) skipObject(depth int64) error {
|
||||||
|
braceCount := 1
|
||||||
|
_, cursor, p := s.stat()
|
||||||
|
for {
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '{':
|
||||||
|
braceCount++
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
case '}':
|
||||||
|
braceCount--
|
||||||
|
depth--
|
||||||
|
if braceCount == 0 {
|
||||||
|
s.cursor = cursor + 1
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
case '[':
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
case ']':
|
||||||
|
depth--
|
||||||
|
case '"':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if char(p, cursor) == nul {
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.statForRetry()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
goto SWITCH_OUT
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.statForRetry()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("object of object", cursor)
|
||||||
|
}
|
||||||
|
SWITCH_OUT:
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) skipArray(depth int64) error {
|
||||||
|
bracketCount := 1
|
||||||
|
_, cursor, p := s.stat()
|
||||||
|
for {
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '[':
|
||||||
|
bracketCount++
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
case ']':
|
||||||
|
bracketCount--
|
||||||
|
depth--
|
||||||
|
if bracketCount == 0 {
|
||||||
|
s.cursor = cursor + 1
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
case '{':
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
case '}':
|
||||||
|
depth--
|
||||||
|
case '"':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if char(p, cursor) == nul {
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.statForRetry()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
goto SWITCH_OUT
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.statForRetry()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("string of object", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("array of object", cursor)
|
||||||
|
}
|
||||||
|
SWITCH_OUT:
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Stream) skipValue(depth int64) error {
|
||||||
|
_, cursor, p := s.stat()
|
||||||
|
for {
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("value of object", s.totalOffset())
|
||||||
|
case '{':
|
||||||
|
s.cursor = cursor + 1
|
||||||
|
return s.skipObject(depth + 1)
|
||||||
|
case '[':
|
||||||
|
s.cursor = cursor + 1
|
||||||
|
return s.skipArray(depth + 1)
|
||||||
|
case '"':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if char(p, cursor) == nul {
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.statForRetry()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset())
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
s.cursor = cursor + 1
|
||||||
|
return nil
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.statForRetry()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("value of string", s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
c := char(p, cursor)
|
||||||
|
if floatTable[c] {
|
||||||
|
continue
|
||||||
|
} else if c == nul {
|
||||||
|
if s.read() {
|
||||||
|
s.cursor-- // for retry current character
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor = cursor
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
case 't':
|
||||||
|
s.cursor = cursor
|
||||||
|
if err := trueBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case 'f':
|
||||||
|
s.cursor = cursor
|
||||||
|
if err := falseBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case 'n':
|
||||||
|
s.cursor = cursor
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func nullBytes(s *Stream) error {
|
||||||
|
// current cursor's character is 'n'
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'u' {
|
||||||
|
if err := retryReadNull(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'l' {
|
||||||
|
if err := retryReadNull(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'l' {
|
||||||
|
if err := retryReadNull(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func retryReadNull(s *Stream) error {
|
||||||
|
if s.char() == nul && s.read() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.ErrInvalidCharacter(s.char(), "null", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func trueBytes(s *Stream) error {
|
||||||
|
// current cursor's character is 't'
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'r' {
|
||||||
|
if err := retryReadTrue(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'u' {
|
||||||
|
if err := retryReadTrue(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'e' {
|
||||||
|
if err := retryReadTrue(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func retryReadTrue(s *Stream) error {
|
||||||
|
if s.char() == nul && s.read() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.ErrInvalidCharacter(s.char(), "bool(true)", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func falseBytes(s *Stream) error {
|
||||||
|
// current cursor's character is 'f'
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'a' {
|
||||||
|
if err := retryReadFalse(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'l' {
|
||||||
|
if err := retryReadFalse(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 's' {
|
||||||
|
if err := retryReadFalse(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.char() != 'e' {
|
||||||
|
if err := retryReadFalse(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func retryReadFalse(s *Stream) error {
|
||||||
|
if s.char() == nul && s.read() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.ErrInvalidCharacter(s.char(), "bool(false)", s.totalOffset())
|
||||||
|
}
|
362
vendor/github.com/goccy/go-json/internal/decoder/string.go
generated
vendored
Normal file
362
vendor/github.com/goccy/go-json/internal/decoder/string.go
generated
vendored
Normal file
|
@ -0,0 +1,362 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unicode"
|
||||||
|
"unicode/utf16"
|
||||||
|
"unicode/utf8"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type stringDecoder struct {
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newStringDecoder(structName, fieldName string) *stringDecoder {
|
||||||
|
return &stringDecoder{
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *stringDecoder) errUnmarshalType(typeName string, offset int64) *errors.UnmarshalTypeError {
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: typeName,
|
||||||
|
Type: reflect.TypeOf(""),
|
||||||
|
Offset: offset,
|
||||||
|
Struct: d.structName,
|
||||||
|
Field: d.fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *stringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
bytes, err := d.decodeStreamByte(s)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
**(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes))
|
||||||
|
s.reset()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *stringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
**(**string)(unsafe.Pointer(&p)) = *(*string)(unsafe.Pointer(&bytes))
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
hexToInt = [256]int{
|
||||||
|
'0': 0,
|
||||||
|
'1': 1,
|
||||||
|
'2': 2,
|
||||||
|
'3': 3,
|
||||||
|
'4': 4,
|
||||||
|
'5': 5,
|
||||||
|
'6': 6,
|
||||||
|
'7': 7,
|
||||||
|
'8': 8,
|
||||||
|
'9': 9,
|
||||||
|
'A': 10,
|
||||||
|
'B': 11,
|
||||||
|
'C': 12,
|
||||||
|
'D': 13,
|
||||||
|
'E': 14,
|
||||||
|
'F': 15,
|
||||||
|
'a': 10,
|
||||||
|
'b': 11,
|
||||||
|
'c': 12,
|
||||||
|
'd': 13,
|
||||||
|
'e': 14,
|
||||||
|
'f': 15,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func unicodeToRune(code []byte) rune {
|
||||||
|
var r rune
|
||||||
|
for i := 0; i < len(code); i++ {
|
||||||
|
r = r*16 + rune(hexToInt[code[i]])
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeUnicodeRune(s *Stream, p unsafe.Pointer) (rune, int64, unsafe.Pointer, error) {
|
||||||
|
const defaultOffset = 5
|
||||||
|
const surrogateOffset = 11
|
||||||
|
|
||||||
|
if s.cursor+defaultOffset >= s.length {
|
||||||
|
if !s.read() {
|
||||||
|
return rune(0), 0, nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset())
|
||||||
|
}
|
||||||
|
p = s.bufptr()
|
||||||
|
}
|
||||||
|
|
||||||
|
r := unicodeToRune(s.buf[s.cursor+1 : s.cursor+defaultOffset])
|
||||||
|
if utf16.IsSurrogate(r) {
|
||||||
|
if s.cursor+surrogateOffset >= s.length {
|
||||||
|
s.read()
|
||||||
|
p = s.bufptr()
|
||||||
|
}
|
||||||
|
if s.cursor+surrogateOffset >= s.length || s.buf[s.cursor+defaultOffset] != '\\' || s.buf[s.cursor+defaultOffset+1] != 'u' {
|
||||||
|
return unicode.ReplacementChar, defaultOffset, p, nil
|
||||||
|
}
|
||||||
|
r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset])
|
||||||
|
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||||
|
return r, surrogateOffset, p, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return r, defaultOffset, p, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeUnicode(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) {
|
||||||
|
const backSlashAndULen = 2 // length of \u
|
||||||
|
|
||||||
|
r, offset, pp, err := decodeUnicodeRune(s, p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
unicode := []byte(string(r))
|
||||||
|
unicodeLen := int64(len(unicode))
|
||||||
|
s.buf = append(append(s.buf[:s.cursor-1], unicode...), s.buf[s.cursor+offset:]...)
|
||||||
|
unicodeOrgLen := offset - 1
|
||||||
|
s.length = s.length - (backSlashAndULen + (unicodeOrgLen - unicodeLen))
|
||||||
|
s.cursor = s.cursor - backSlashAndULen + unicodeLen
|
||||||
|
return pp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeEscapeString(s *Stream, p unsafe.Pointer) (unsafe.Pointer, error) {
|
||||||
|
s.cursor++
|
||||||
|
RETRY:
|
||||||
|
switch s.buf[s.cursor] {
|
||||||
|
case '"':
|
||||||
|
s.buf[s.cursor] = '"'
|
||||||
|
case '\\':
|
||||||
|
s.buf[s.cursor] = '\\'
|
||||||
|
case '/':
|
||||||
|
s.buf[s.cursor] = '/'
|
||||||
|
case 'b':
|
||||||
|
s.buf[s.cursor] = '\b'
|
||||||
|
case 'f':
|
||||||
|
s.buf[s.cursor] = '\f'
|
||||||
|
case 'n':
|
||||||
|
s.buf[s.cursor] = '\n'
|
||||||
|
case 'r':
|
||||||
|
s.buf[s.cursor] = '\r'
|
||||||
|
case 't':
|
||||||
|
s.buf[s.cursor] = '\t'
|
||||||
|
case 'u':
|
||||||
|
return decodeUnicode(s, p)
|
||||||
|
case nul:
|
||||||
|
if !s.read() {
|
||||||
|
return nil, errors.ErrInvalidCharacter(s.char(), "escaped string", s.totalOffset())
|
||||||
|
}
|
||||||
|
goto RETRY
|
||||||
|
default:
|
||||||
|
return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
}
|
||||||
|
s.buf = append(s.buf[:s.cursor-1], s.buf[s.cursor:]...)
|
||||||
|
s.length--
|
||||||
|
s.cursor--
|
||||||
|
p = s.bufptr()
|
||||||
|
return p, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
runeErrBytes = []byte(string(utf8.RuneError))
|
||||||
|
runeErrBytesLen = int64(len(runeErrBytes))
|
||||||
|
)
|
||||||
|
|
||||||
|
func stringBytes(s *Stream) ([]byte, error) {
|
||||||
|
_, cursor, p := s.stat()
|
||||||
|
cursor++ // skip double quote char
|
||||||
|
start := cursor
|
||||||
|
for {
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '\\':
|
||||||
|
s.cursor = cursor
|
||||||
|
pp, err := decodeEscapeString(s, p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
p = pp
|
||||||
|
cursor = s.cursor
|
||||||
|
case '"':
|
||||||
|
literal := s.buf[start:cursor]
|
||||||
|
cursor++
|
||||||
|
s.cursor = cursor
|
||||||
|
return literal, nil
|
||||||
|
case
|
||||||
|
// 0x00 is nul, 0x5c is '\\', 0x22 is '"' .
|
||||||
|
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, // 0x00-0x0F
|
||||||
|
0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, // 0x10-0x1F
|
||||||
|
0x20, 0x21 /*0x22,*/, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, // 0x20-0x2F
|
||||||
|
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0x3E, 0x3F, // 0x30-0x3F
|
||||||
|
0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 0x4B, 0x4C, 0x4D, 0x4E, 0x4F, // 0x40-0x4F
|
||||||
|
0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x5B /*0x5C,*/, 0x5D, 0x5E, 0x5F, // 0x50-0x5F
|
||||||
|
0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6A, 0x6B, 0x6C, 0x6D, 0x6E, 0x6F, // 0x60-0x6F
|
||||||
|
0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 0x7B, 0x7C, 0x7D, 0x7E, 0x7F: // 0x70-0x7F
|
||||||
|
// character is ASCII. skip to next char
|
||||||
|
case
|
||||||
|
0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x8B, 0x8C, 0x8D, 0x8E, 0x8F, // 0x80-0x8F
|
||||||
|
0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0x9B, 0x9C, 0x9D, 0x9E, 0x9F, // 0x90-0x9F
|
||||||
|
0xA0, 0xA1, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xAB, 0xAC, 0xAD, 0xAE, 0xAF, // 0xA0-0xAF
|
||||||
|
0xB0, 0xB1, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xBB, 0xBC, 0xBD, 0xBE, 0xBF, // 0xB0-0xBF
|
||||||
|
0xC0, 0xC1, // 0xC0-0xC1
|
||||||
|
0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA, 0xFB, 0xFC, 0xFD, 0xFE, 0xFF: // 0xF5-0xFE
|
||||||
|
// character is invalid
|
||||||
|
s.buf = append(append(append([]byte{}, s.buf[:cursor]...), runeErrBytes...), s.buf[cursor+1:]...)
|
||||||
|
_, _, p = s.stat()
|
||||||
|
cursor += runeErrBytesLen
|
||||||
|
s.length += runeErrBytesLen
|
||||||
|
continue
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
goto ERROR
|
||||||
|
case 0xEF:
|
||||||
|
// RuneError is {0xEF, 0xBF, 0xBD}
|
||||||
|
if s.buf[cursor+1] == 0xBF && s.buf[cursor+2] == 0xBD {
|
||||||
|
// found RuneError: skip
|
||||||
|
cursor += 2
|
||||||
|
break
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
default:
|
||||||
|
// multi bytes character
|
||||||
|
r, _ := utf8.DecodeRune(s.buf[cursor:])
|
||||||
|
b := []byte(string(r))
|
||||||
|
if r == utf8.RuneError {
|
||||||
|
s.buf = append(append(append([]byte{}, s.buf[:cursor]...), b...), s.buf[cursor+1:]...)
|
||||||
|
_, _, p = s.stat()
|
||||||
|
}
|
||||||
|
cursor += int64(len(b))
|
||||||
|
s.length += int64(len(b))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
ERROR:
|
||||||
|
return nil, errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *stringDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case '[':
|
||||||
|
return nil, d.errUnmarshalType("array", s.totalOffset())
|
||||||
|
case '{':
|
||||||
|
return nil, d.errUnmarshalType("object", s.totalOffset())
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return nil, d.errUnmarshalType("number", s.totalOffset())
|
||||||
|
case '"':
|
||||||
|
return stringBytes(s)
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return nil, errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *stringDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
case '[':
|
||||||
|
return nil, 0, d.errUnmarshalType("array", cursor)
|
||||||
|
case '{':
|
||||||
|
return nil, 0, d.errUnmarshalType("object", cursor)
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return nil, 0, d.errUnmarshalType("number", cursor)
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
start := cursor
|
||||||
|
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||||
|
for {
|
||||||
|
switch char(b, cursor) {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
switch char(b, cursor) {
|
||||||
|
case '"':
|
||||||
|
buf[cursor] = '"'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case '\\':
|
||||||
|
buf[cursor] = '\\'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case '/':
|
||||||
|
buf[cursor] = '/'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case 'b':
|
||||||
|
buf[cursor] = '\b'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case 'f':
|
||||||
|
buf[cursor] = '\f'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case 'n':
|
||||||
|
buf[cursor] = '\n'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case 'r':
|
||||||
|
buf[cursor] = '\r'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case 't':
|
||||||
|
buf[cursor] = '\t'
|
||||||
|
buf = append(buf[:cursor-1], buf[cursor:]...)
|
||||||
|
case 'u':
|
||||||
|
buflen := int64(len(buf))
|
||||||
|
if cursor+5 >= buflen {
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor)
|
||||||
|
}
|
||||||
|
code := unicodeToRune(buf[cursor+1 : cursor+5])
|
||||||
|
unicode := []byte(string(code))
|
||||||
|
buf = append(append(buf[:cursor-1], unicode...), buf[cursor+5:]...)
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("escaped string", cursor)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
case '"':
|
||||||
|
literal := buf[start:cursor]
|
||||||
|
cursor++
|
||||||
|
return literal, cursor, nil
|
||||||
|
case nul:
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return nil, cursor, nil
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrInvalidBeginningOfValue(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
819
vendor/github.com/goccy/go-json/internal/decoder/struct.go
generated
vendored
Normal file
819
vendor/github.com/goccy/go-json/internal/decoder/struct.go
generated
vendored
Normal file
|
@ -0,0 +1,819 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"math/bits"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
"unicode/utf16"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
type structFieldSet struct {
|
||||||
|
dec Decoder
|
||||||
|
offset uintptr
|
||||||
|
isTaggedKey bool
|
||||||
|
fieldIdx int
|
||||||
|
key string
|
||||||
|
keyLen int64
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
type structDecoder struct {
|
||||||
|
fieldMap map[string]*structFieldSet
|
||||||
|
fieldUniqueNameNum int
|
||||||
|
stringDecoder *stringDecoder
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
isTriedOptimize bool
|
||||||
|
keyBitmapUint8 [][256]uint8
|
||||||
|
keyBitmapUint16 [][256]uint16
|
||||||
|
sortedFieldSets []*structFieldSet
|
||||||
|
keyDecoder func(*structDecoder, []byte, int64) (int64, *structFieldSet, error)
|
||||||
|
keyStreamDecoder func(*structDecoder, *Stream) (*structFieldSet, string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
largeToSmallTable [256]byte
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
for i := 0; i < 256; i++ {
|
||||||
|
c := i
|
||||||
|
if 'A' <= c && c <= 'Z' {
|
||||||
|
c += 'a' - 'A'
|
||||||
|
}
|
||||||
|
largeToSmallTable[i] = byte(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newStructDecoder(structName, fieldName string, fieldMap map[string]*structFieldSet) *structDecoder {
|
||||||
|
return &structDecoder{
|
||||||
|
fieldMap: fieldMap,
|
||||||
|
stringDecoder: newStringDecoder(structName, fieldName),
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
keyDecoder: decodeKey,
|
||||||
|
keyStreamDecoder: decodeKeyStream,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
allowOptimizeMaxKeyLen = 64
|
||||||
|
allowOptimizeMaxFieldLen = 16
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *structDecoder) tryOptimize() {
|
||||||
|
fieldUniqueNameMap := map[string]int{}
|
||||||
|
fieldIdx := -1
|
||||||
|
for k, v := range d.fieldMap {
|
||||||
|
lower := strings.ToLower(k)
|
||||||
|
idx, exists := fieldUniqueNameMap[lower]
|
||||||
|
if exists {
|
||||||
|
v.fieldIdx = idx
|
||||||
|
} else {
|
||||||
|
fieldIdx++
|
||||||
|
v.fieldIdx = fieldIdx
|
||||||
|
}
|
||||||
|
fieldUniqueNameMap[lower] = fieldIdx
|
||||||
|
}
|
||||||
|
d.fieldUniqueNameNum = len(fieldUniqueNameMap)
|
||||||
|
|
||||||
|
if d.isTriedOptimize {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fieldMap := map[string]*structFieldSet{}
|
||||||
|
conflicted := map[string]struct{}{}
|
||||||
|
for k, v := range d.fieldMap {
|
||||||
|
key := strings.ToLower(k)
|
||||||
|
if key != k {
|
||||||
|
// already exists same key (e.g. Hello and HELLO has same lower case key
|
||||||
|
if _, exists := conflicted[key]; exists {
|
||||||
|
d.isTriedOptimize = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
conflicted[key] = struct{}{}
|
||||||
|
}
|
||||||
|
if field, exists := fieldMap[key]; exists {
|
||||||
|
if field != v {
|
||||||
|
d.isTriedOptimize = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fieldMap[key] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fieldMap) > allowOptimizeMaxFieldLen {
|
||||||
|
d.isTriedOptimize = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var maxKeyLen int
|
||||||
|
sortedKeys := []string{}
|
||||||
|
for key := range fieldMap {
|
||||||
|
keyLen := len(key)
|
||||||
|
if keyLen > allowOptimizeMaxKeyLen {
|
||||||
|
d.isTriedOptimize = true
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if maxKeyLen < keyLen {
|
||||||
|
maxKeyLen = keyLen
|
||||||
|
}
|
||||||
|
sortedKeys = append(sortedKeys, key)
|
||||||
|
}
|
||||||
|
sort.Strings(sortedKeys)
|
||||||
|
|
||||||
|
// By allocating one extra capacity than `maxKeyLen`,
|
||||||
|
// it is possible to avoid the process of comparing the index of the key with the length of the bitmap each time.
|
||||||
|
bitmapLen := maxKeyLen + 1
|
||||||
|
if len(sortedKeys) <= 8 {
|
||||||
|
keyBitmap := make([][256]uint8, bitmapLen)
|
||||||
|
for i, key := range sortedKeys {
|
||||||
|
for j := 0; j < len(key); j++ {
|
||||||
|
c := key[j]
|
||||||
|
keyBitmap[j][c] |= (1 << uint(i))
|
||||||
|
}
|
||||||
|
d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key])
|
||||||
|
}
|
||||||
|
d.keyBitmapUint8 = keyBitmap
|
||||||
|
d.keyDecoder = decodeKeyByBitmapUint8
|
||||||
|
d.keyStreamDecoder = decodeKeyByBitmapUint8Stream
|
||||||
|
} else {
|
||||||
|
keyBitmap := make([][256]uint16, bitmapLen)
|
||||||
|
for i, key := range sortedKeys {
|
||||||
|
for j := 0; j < len(key); j++ {
|
||||||
|
c := key[j]
|
||||||
|
keyBitmap[j][c] |= (1 << uint(i))
|
||||||
|
}
|
||||||
|
d.sortedFieldSets = append(d.sortedFieldSets, fieldMap[key])
|
||||||
|
}
|
||||||
|
d.keyBitmapUint16 = keyBitmap
|
||||||
|
d.keyDecoder = decodeKeyByBitmapUint16
|
||||||
|
d.keyStreamDecoder = decodeKeyByBitmapUint16Stream
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// decode from '\uXXXX'
|
||||||
|
func decodeKeyCharByUnicodeRune(buf []byte, cursor int64) ([]byte, int64) {
|
||||||
|
const defaultOffset = 4
|
||||||
|
const surrogateOffset = 6
|
||||||
|
|
||||||
|
r := unicodeToRune(buf[cursor : cursor+defaultOffset])
|
||||||
|
if utf16.IsSurrogate(r) {
|
||||||
|
cursor += defaultOffset
|
||||||
|
if cursor+surrogateOffset >= int64(len(buf)) || buf[cursor] != '\\' || buf[cursor+1] != 'u' {
|
||||||
|
return []byte(string(unicode.ReplacementChar)), cursor + defaultOffset - 1
|
||||||
|
}
|
||||||
|
cursor += 2
|
||||||
|
r2 := unicodeToRune(buf[cursor : cursor+defaultOffset])
|
||||||
|
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||||
|
return []byte(string(r)), cursor + defaultOffset - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []byte(string(r)), cursor + defaultOffset - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyCharByEscapedChar(buf []byte, cursor int64) ([]byte, int64) {
|
||||||
|
c := buf[cursor]
|
||||||
|
cursor++
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
return []byte{'"'}, cursor
|
||||||
|
case '\\':
|
||||||
|
return []byte{'\\'}, cursor
|
||||||
|
case '/':
|
||||||
|
return []byte{'/'}, cursor
|
||||||
|
case 'b':
|
||||||
|
return []byte{'\b'}, cursor
|
||||||
|
case 'f':
|
||||||
|
return []byte{'\f'}, cursor
|
||||||
|
case 'n':
|
||||||
|
return []byte{'\n'}, cursor
|
||||||
|
case 'r':
|
||||||
|
return []byte{'\r'}, cursor
|
||||||
|
case 't':
|
||||||
|
return []byte{'\t'}, cursor
|
||||||
|
case 'u':
|
||||||
|
return decodeKeyCharByUnicodeRune(buf, cursor)
|
||||||
|
}
|
||||||
|
return nil, cursor
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyByBitmapUint8(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||||
|
var (
|
||||||
|
curBit uint8 = math.MaxUint8
|
||||||
|
)
|
||||||
|
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||||
|
for {
|
||||||
|
switch char(b, cursor) {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
c := char(b, cursor)
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
return cursor, nil, nil
|
||||||
|
case nul:
|
||||||
|
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||||
|
}
|
||||||
|
keyIdx := 0
|
||||||
|
bitmap := d.keyBitmapUint8
|
||||||
|
start := cursor
|
||||||
|
for {
|
||||||
|
c := char(b, cursor)
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
fieldSetIndex := bits.TrailingZeros8(curBit)
|
||||||
|
field := d.sortedFieldSets[fieldSetIndex]
|
||||||
|
keyLen := cursor - start
|
||||||
|
cursor++
|
||||||
|
if keyLen < field.keyLen {
|
||||||
|
// early match
|
||||||
|
return cursor, nil, nil
|
||||||
|
}
|
||||||
|
return cursor, field, nil
|
||||||
|
case nul:
|
||||||
|
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
chars, nextCursor := decodeKeyCharByEscapedChar(buf, cursor)
|
||||||
|
for _, c := range chars {
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
return decodeKeyNotFound(b, cursor)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
cursor = nextCursor
|
||||||
|
default:
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
return decodeKeyNotFound(b, cursor)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return cursor, nil, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyByBitmapUint16(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||||
|
var (
|
||||||
|
curBit uint16 = math.MaxUint16
|
||||||
|
)
|
||||||
|
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||||
|
for {
|
||||||
|
switch char(b, cursor) {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
c := char(b, cursor)
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
return cursor, nil, nil
|
||||||
|
case nul:
|
||||||
|
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||||
|
}
|
||||||
|
keyIdx := 0
|
||||||
|
bitmap := d.keyBitmapUint16
|
||||||
|
start := cursor
|
||||||
|
for {
|
||||||
|
c := char(b, cursor)
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
fieldSetIndex := bits.TrailingZeros16(curBit)
|
||||||
|
field := d.sortedFieldSets[fieldSetIndex]
|
||||||
|
keyLen := cursor - start
|
||||||
|
cursor++
|
||||||
|
if keyLen < field.keyLen {
|
||||||
|
// early match
|
||||||
|
return cursor, nil, nil
|
||||||
|
}
|
||||||
|
return cursor, field, nil
|
||||||
|
case nul:
|
||||||
|
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
chars, nextCursor := decodeKeyCharByEscapedChar(buf, cursor)
|
||||||
|
for _, c := range chars {
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
return decodeKeyNotFound(b, cursor)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
cursor = nextCursor
|
||||||
|
default:
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
return decodeKeyNotFound(b, cursor)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return cursor, nil, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyNotFound(b unsafe.Pointer, cursor int64) (int64, *structFieldSet, error) {
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch char(b, cursor) {
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
return cursor, nil, nil
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if char(b, cursor) == nul {
|
||||||
|
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||||
|
}
|
||||||
|
case nul:
|
||||||
|
return 0, nil, errors.ErrUnexpectedEndOfJSON("string", cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKey(d *structDecoder, buf []byte, cursor int64) (int64, *structFieldSet, error) {
|
||||||
|
key, c, err := d.stringDecoder.decodeByte(buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, nil, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
k := *(*string)(unsafe.Pointer(&key))
|
||||||
|
field, exists := d.fieldMap[k]
|
||||||
|
if !exists {
|
||||||
|
return cursor, nil, nil
|
||||||
|
}
|
||||||
|
return cursor, field, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyByBitmapUint8Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||||
|
var (
|
||||||
|
curBit uint8 = math.MaxUint8
|
||||||
|
)
|
||||||
|
_, cursor, p := s.stat()
|
||||||
|
for {
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
FIRST_CHAR:
|
||||||
|
start := cursor
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
s.cursor = cursor
|
||||||
|
return nil, "", nil
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
goto FIRST_CHAR
|
||||||
|
}
|
||||||
|
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
}
|
||||||
|
keyIdx := 0
|
||||||
|
bitmap := d.keyBitmapUint8
|
||||||
|
for {
|
||||||
|
c := char(p, cursor)
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
fieldSetIndex := bits.TrailingZeros8(curBit)
|
||||||
|
field := d.sortedFieldSets[fieldSetIndex]
|
||||||
|
keyLen := cursor - start
|
||||||
|
cursor++
|
||||||
|
s.cursor = cursor
|
||||||
|
if keyLen < field.keyLen {
|
||||||
|
// early match
|
||||||
|
return nil, field.key, nil
|
||||||
|
}
|
||||||
|
return field, field.key, nil
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
case '\\':
|
||||||
|
s.cursor = cursor + 1 // skip '\' char
|
||||||
|
chars, err := decodeKeyCharByEscapeCharStream(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
cursor = s.cursor
|
||||||
|
for _, c := range chars {
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
s.cursor = cursor
|
||||||
|
return decodeKeyNotFoundStream(s, start)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
s.cursor = cursor
|
||||||
|
return decodeKeyNotFoundStream(s, start)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyByBitmapUint16Stream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||||
|
var (
|
||||||
|
curBit uint16 = math.MaxUint16
|
||||||
|
)
|
||||||
|
_, cursor, p := s.stat()
|
||||||
|
for {
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
FIRST_CHAR:
|
||||||
|
start := cursor
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
s.cursor = cursor
|
||||||
|
return nil, "", nil
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
goto FIRST_CHAR
|
||||||
|
}
|
||||||
|
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
}
|
||||||
|
keyIdx := 0
|
||||||
|
bitmap := d.keyBitmapUint16
|
||||||
|
for {
|
||||||
|
c := char(p, cursor)
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
fieldSetIndex := bits.TrailingZeros16(curBit)
|
||||||
|
field := d.sortedFieldSets[fieldSetIndex]
|
||||||
|
keyLen := cursor - start
|
||||||
|
cursor++
|
||||||
|
s.cursor = cursor
|
||||||
|
if keyLen < field.keyLen {
|
||||||
|
// early match
|
||||||
|
return nil, field.key, nil
|
||||||
|
}
|
||||||
|
return field, field.key, nil
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if s.read() {
|
||||||
|
_, cursor, p = s.stat()
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
case '\\':
|
||||||
|
s.cursor = cursor + 1 // skip '\' char
|
||||||
|
chars, err := decodeKeyCharByEscapeCharStream(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
cursor = s.cursor
|
||||||
|
for _, c := range chars {
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
s.cursor = cursor
|
||||||
|
return decodeKeyNotFoundStream(s, start)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
curBit &= bitmap[keyIdx][largeToSmallTable[c]]
|
||||||
|
if curBit == 0 {
|
||||||
|
s.cursor = cursor
|
||||||
|
return decodeKeyNotFoundStream(s, start)
|
||||||
|
}
|
||||||
|
keyIdx++
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, "", errors.ErrInvalidBeginningOfValue(char(p, cursor), s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// decode from '\uXXXX'
|
||||||
|
func decodeKeyCharByUnicodeRuneStream(s *Stream) ([]byte, error) {
|
||||||
|
const defaultOffset = 4
|
||||||
|
const surrogateOffset = 6
|
||||||
|
|
||||||
|
if s.cursor+defaultOffset >= s.length {
|
||||||
|
if !s.read() {
|
||||||
|
return nil, errors.ErrInvalidCharacter(s.char(), "escaped unicode char", s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r := unicodeToRune(s.buf[s.cursor : s.cursor+defaultOffset])
|
||||||
|
if utf16.IsSurrogate(r) {
|
||||||
|
s.cursor += defaultOffset
|
||||||
|
if s.cursor+surrogateOffset >= s.length {
|
||||||
|
s.read()
|
||||||
|
}
|
||||||
|
if s.cursor+surrogateOffset >= s.length || s.buf[s.cursor] != '\\' || s.buf[s.cursor+1] != 'u' {
|
||||||
|
s.cursor += defaultOffset - 1
|
||||||
|
return []byte(string(unicode.ReplacementChar)), nil
|
||||||
|
}
|
||||||
|
r2 := unicodeToRune(s.buf[s.cursor+defaultOffset+2 : s.cursor+surrogateOffset])
|
||||||
|
if r := utf16.DecodeRune(r, r2); r != unicode.ReplacementChar {
|
||||||
|
s.cursor += defaultOffset - 1
|
||||||
|
return []byte(string(r)), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor += defaultOffset - 1
|
||||||
|
return []byte(string(r)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyCharByEscapeCharStream(s *Stream) ([]byte, error) {
|
||||||
|
c := s.buf[s.cursor]
|
||||||
|
s.cursor++
|
||||||
|
RETRY:
|
||||||
|
switch c {
|
||||||
|
case '"':
|
||||||
|
return []byte{'"'}, nil
|
||||||
|
case '\\':
|
||||||
|
return []byte{'\\'}, nil
|
||||||
|
case '/':
|
||||||
|
return []byte{'/'}, nil
|
||||||
|
case 'b':
|
||||||
|
return []byte{'\b'}, nil
|
||||||
|
case 'f':
|
||||||
|
return []byte{'\f'}, nil
|
||||||
|
case 'n':
|
||||||
|
return []byte{'\n'}, nil
|
||||||
|
case 'r':
|
||||||
|
return []byte{'\r'}, nil
|
||||||
|
case 't':
|
||||||
|
return []byte{'\t'}, nil
|
||||||
|
case 'u':
|
||||||
|
return decodeKeyCharByUnicodeRuneStream(s)
|
||||||
|
case nul:
|
||||||
|
if !s.read() {
|
||||||
|
return nil, errors.ErrInvalidCharacter(s.char(), "escaped char", s.totalOffset())
|
||||||
|
}
|
||||||
|
goto RETRY
|
||||||
|
default:
|
||||||
|
return nil, errors.ErrUnexpectedEndOfJSON("struct field", s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyNotFoundStream(s *Stream, start int64) (*structFieldSet, string, error) {
|
||||||
|
buf, cursor, p := s.stat()
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
switch char(p, cursor) {
|
||||||
|
case '"':
|
||||||
|
b := buf[start:cursor]
|
||||||
|
key := *(*string)(unsafe.Pointer(&b))
|
||||||
|
cursor++
|
||||||
|
s.cursor = cursor
|
||||||
|
return nil, key, nil
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if char(p, cursor) == nul {
|
||||||
|
s.cursor = cursor
|
||||||
|
if !s.read() {
|
||||||
|
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
}
|
||||||
|
buf, cursor, p = s.statForRetry()
|
||||||
|
}
|
||||||
|
case nul:
|
||||||
|
s.cursor = cursor
|
||||||
|
if !s.read() {
|
||||||
|
return nil, "", errors.ErrUnexpectedEndOfJSON("string", s.totalOffset())
|
||||||
|
}
|
||||||
|
buf, cursor, p = s.statForRetry()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeKeyStream(d *structDecoder, s *Stream) (*structFieldSet, string, error) {
|
||||||
|
key, err := d.stringDecoder.decodeStreamByte(s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, "", err
|
||||||
|
}
|
||||||
|
k := *(*string)(unsafe.Pointer(&key))
|
||||||
|
return d.fieldMap[k], k, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *structDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return errors.ErrExceededMaxDepth(s.char(), s.cursor)
|
||||||
|
}
|
||||||
|
|
||||||
|
c := s.skipWhiteSpace()
|
||||||
|
switch c {
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
if s.char() != '{' {
|
||||||
|
return errors.ErrInvalidBeginningOfValue(s.char(), s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if s.skipWhiteSpace() == '}' {
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var (
|
||||||
|
seenFields map[int]struct{}
|
||||||
|
seenFieldNum int
|
||||||
|
)
|
||||||
|
firstWin := (s.Option.Flags & FirstWinOption) != 0
|
||||||
|
if firstWin {
|
||||||
|
seenFields = make(map[int]struct{}, d.fieldUniqueNameNum)
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
s.reset()
|
||||||
|
field, key, err := d.keyStreamDecoder(d, s)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if s.skipWhiteSpace() != ':' {
|
||||||
|
return errors.ErrExpected("colon after object key", s.totalOffset())
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
if field != nil {
|
||||||
|
if field.err != nil {
|
||||||
|
return field.err
|
||||||
|
}
|
||||||
|
if firstWin {
|
||||||
|
if _, exists := seenFields[field.fieldIdx]; exists {
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
seenFieldNum++
|
||||||
|
if d.fieldUniqueNameNum <= seenFieldNum {
|
||||||
|
return s.skipObject(depth)
|
||||||
|
}
|
||||||
|
seenFields[field.fieldIdx] = struct{}{}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := field.dec.DecodeStream(s, depth, unsafe.Pointer(uintptr(p)+field.offset)); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if s.DisallowUnknownFields {
|
||||||
|
return fmt.Errorf("json: unknown field %q", key)
|
||||||
|
} else {
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
c := s.skipWhiteSpace()
|
||||||
|
if c == '}' {
|
||||||
|
s.cursor++
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if c != ',' {
|
||||||
|
return errors.ErrExpected("comma after object element", s.totalOffset())
|
||||||
|
}
|
||||||
|
s.cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *structDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
depth++
|
||||||
|
if depth > maxDecodeNestingDepth {
|
||||||
|
return 0, errors.ErrExceededMaxDepth(buf[cursor], cursor)
|
||||||
|
}
|
||||||
|
buflen := int64(len(buf))
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
b := (*sliceHeader)(unsafe.Pointer(&buf)).data
|
||||||
|
switch char(b, cursor) {
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return cursor, nil
|
||||||
|
case '{':
|
||||||
|
default:
|
||||||
|
return 0, errors.ErrInvalidBeginningOfValue(char(b, cursor), cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if buf[cursor] == '}' {
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
var (
|
||||||
|
seenFields map[int]struct{}
|
||||||
|
seenFieldNum int
|
||||||
|
)
|
||||||
|
firstWin := (ctx.Option.Flags & FirstWinOption) != 0
|
||||||
|
if firstWin {
|
||||||
|
seenFields = make(map[int]struct{}, d.fieldUniqueNameNum)
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
c, field, err := d.keyDecoder(d, buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(buf, c)
|
||||||
|
if char(b, cursor) != ':' {
|
||||||
|
return 0, errors.ErrExpected("colon after object key", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
if cursor >= buflen {
|
||||||
|
return 0, errors.ErrExpected("object value after colon", cursor)
|
||||||
|
}
|
||||||
|
if field != nil {
|
||||||
|
if field.err != nil {
|
||||||
|
return 0, field.err
|
||||||
|
}
|
||||||
|
if firstWin {
|
||||||
|
if _, exists := seenFields[field.fieldIdx]; exists {
|
||||||
|
c, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
} else {
|
||||||
|
c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset))
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
seenFieldNum++
|
||||||
|
if d.fieldUniqueNameNum <= seenFieldNum {
|
||||||
|
return skipObject(buf, cursor, depth)
|
||||||
|
}
|
||||||
|
seenFields[field.fieldIdx] = struct{}{}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
c, err := field.dec.Decode(ctx, cursor, depth, unsafe.Pointer(uintptr(p)+field.offset))
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
c, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
if char(b, cursor) == '}' {
|
||||||
|
cursor++
|
||||||
|
return cursor, nil
|
||||||
|
}
|
||||||
|
if char(b, cursor) != ',' {
|
||||||
|
return 0, errors.ErrExpected("comma after object element", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
29
vendor/github.com/goccy/go-json/internal/decoder/type.go
generated
vendored
Normal file
29
vendor/github.com/goccy/go-json/internal/decoder/type.go
generated
vendored
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding"
|
||||||
|
"encoding/json"
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Decoder interface {
|
||||||
|
Decode(*RuntimeContext, int64, int64, unsafe.Pointer) (int64, error)
|
||||||
|
DecodeStream(*Stream, int64, unsafe.Pointer) error
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
nul = '\000'
|
||||||
|
maxDecodeNestingDepth = 10000
|
||||||
|
)
|
||||||
|
|
||||||
|
type unmarshalerContext interface {
|
||||||
|
UnmarshalJSON(context.Context, []byte) error
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
unmarshalJSONType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
|
||||||
|
unmarshalJSONContextType = reflect.TypeOf((*unmarshalerContext)(nil)).Elem()
|
||||||
|
unmarshalTextType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||||
|
)
|
190
vendor/github.com/goccy/go-json/internal/decoder/uint.go
generated
vendored
Normal file
190
vendor/github.com/goccy/go-json/internal/decoder/uint.go
generated
vendored
Normal file
|
@ -0,0 +1,190 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type uintDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
kind reflect.Kind
|
||||||
|
op func(unsafe.Pointer, uint64)
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newUintDecoder(typ *runtime.Type, structName, fieldName string, op func(unsafe.Pointer, uint64)) *uintDecoder {
|
||||||
|
return &uintDecoder{
|
||||||
|
typ: typ,
|
||||||
|
kind: typ.Kind(),
|
||||||
|
op: op,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *uintDecoder) typeError(buf []byte, offset int64) *errors.UnmarshalTypeError {
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: fmt.Sprintf("number %s", string(buf)),
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: offset,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
pow10u64 = [...]uint64{
|
||||||
|
1e00, 1e01, 1e02, 1e03, 1e04, 1e05, 1e06, 1e07, 1e08, 1e09,
|
||||||
|
1e10, 1e11, 1e12, 1e13, 1e14, 1e15, 1e16, 1e17, 1e18, 1e19,
|
||||||
|
}
|
||||||
|
pow10u64Len = len(pow10u64)
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *uintDecoder) parseUint(b []byte) (uint64, error) {
|
||||||
|
maxDigit := len(b)
|
||||||
|
if maxDigit > pow10u64Len {
|
||||||
|
return 0, fmt.Errorf("invalid length of number")
|
||||||
|
}
|
||||||
|
sum := uint64(0)
|
||||||
|
for i := 0; i < maxDigit; i++ {
|
||||||
|
c := uint64(b[i]) - 48
|
||||||
|
digitValue := pow10u64[maxDigit-i-1]
|
||||||
|
sum += c * digitValue
|
||||||
|
}
|
||||||
|
return sum, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *uintDecoder) decodeStreamByte(s *Stream) ([]byte, error) {
|
||||||
|
for {
|
||||||
|
switch s.char() {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
s.cursor++
|
||||||
|
continue
|
||||||
|
case '0':
|
||||||
|
s.cursor++
|
||||||
|
return numZeroBuf, nil
|
||||||
|
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
start := s.cursor
|
||||||
|
for {
|
||||||
|
s.cursor++
|
||||||
|
if numTable[s.char()] {
|
||||||
|
continue
|
||||||
|
} else if s.char() == nul {
|
||||||
|
if s.read() {
|
||||||
|
s.cursor-- // for retry current character
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
num := s.buf[start:s.cursor]
|
||||||
|
return num, nil
|
||||||
|
case 'n':
|
||||||
|
if err := nullBytes(s); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
case nul:
|
||||||
|
if s.read() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, d.typeError([]byte{s.char()}, s.totalOffset())
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return nil, errors.ErrUnexpectedEndOfJSON("number(unsigned integer)", s.totalOffset())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *uintDecoder) decodeByte(buf []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
for {
|
||||||
|
switch buf[cursor] {
|
||||||
|
case ' ', '\n', '\t', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case '0':
|
||||||
|
cursor++
|
||||||
|
return numZeroBuf, cursor, nil
|
||||||
|
case '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
start := cursor
|
||||||
|
cursor++
|
||||||
|
for numTable[buf[cursor]] {
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
num := buf[start:cursor]
|
||||||
|
return num, cursor, nil
|
||||||
|
case 'n':
|
||||||
|
if err := validateNull(buf, cursor); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor += 4
|
||||||
|
return nil, cursor, nil
|
||||||
|
default:
|
||||||
|
return nil, 0, d.typeError([]byte{buf[cursor]}, cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *uintDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
bytes, err := d.decodeStreamByte(s)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
u64, err := d.parseUint(bytes)
|
||||||
|
if err != nil {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
switch d.kind {
|
||||||
|
case reflect.Uint8:
|
||||||
|
if (1 << 8) <= u64 {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
case reflect.Uint16:
|
||||||
|
if (1 << 16) <= u64 {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
case reflect.Uint32:
|
||||||
|
if (1 << 32) <= u64 {
|
||||||
|
return d.typeError(bytes, s.totalOffset())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.op(p, u64)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *uintDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
bytes, c, err := d.decodeByte(ctx.Buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
cursor = c
|
||||||
|
u64, err := d.parseUint(bytes)
|
||||||
|
if err != nil {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
switch d.kind {
|
||||||
|
case reflect.Uint8:
|
||||||
|
if (1 << 8) <= u64 {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
case reflect.Uint16:
|
||||||
|
if (1 << 16) <= u64 {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
case reflect.Uint32:
|
||||||
|
if (1 << 32) <= u64 {
|
||||||
|
return 0, d.typeError(bytes, cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
d.op(p, u64)
|
||||||
|
return cursor, nil
|
||||||
|
}
|
91
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go
generated
vendored
Normal file
91
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_json.go
generated
vendored
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type unmarshalJSONDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newUnmarshalJSONDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalJSONDecoder {
|
||||||
|
return &unmarshalJSONDecoder{
|
||||||
|
typ: typ,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *unmarshalJSONDecoder) annotateError(cursor int64, err error) {
|
||||||
|
switch e := err.(type) {
|
||||||
|
case *errors.UnmarshalTypeError:
|
||||||
|
e.Struct = d.structName
|
||||||
|
e.Field = d.fieldName
|
||||||
|
case *errors.SyntaxError:
|
||||||
|
e.Offset = cursor
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *unmarshalJSONDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
s.skipWhiteSpace()
|
||||||
|
start := s.cursor
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
src := s.buf[start:s.cursor]
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: d.typ,
|
||||||
|
ptr: p,
|
||||||
|
}))
|
||||||
|
if (s.Option.Flags & ContextOption) != 0 {
|
||||||
|
if err := v.(unmarshalerContext).UnmarshalJSON(s.Option.Context, dst); err != nil {
|
||||||
|
d.annotateError(s.cursor, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := v.(json.Unmarshaler).UnmarshalJSON(dst); err != nil {
|
||||||
|
d.annotateError(s.cursor, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *unmarshalJSONDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
start := cursor
|
||||||
|
end, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
src := buf[start:end]
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: d.typ,
|
||||||
|
ptr: p,
|
||||||
|
}))
|
||||||
|
if (ctx.Option.Flags & ContextOption) != 0 {
|
||||||
|
if err := v.(unmarshalerContext).UnmarshalJSON(ctx.Option.Context, dst); err != nil {
|
||||||
|
d.annotateError(cursor, err)
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := v.(json.Unmarshaler).UnmarshalJSON(dst); err != nil {
|
||||||
|
d.annotateError(cursor, err)
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return end, nil
|
||||||
|
}
|
280
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go
generated
vendored
Normal file
280
vendor/github.com/goccy/go-json/internal/decoder/unmarshal_text.go
generated
vendored
Normal file
|
@ -0,0 +1,280 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding"
|
||||||
|
"unicode"
|
||||||
|
"unicode/utf16"
|
||||||
|
"unicode/utf8"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type unmarshalTextDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newUnmarshalTextDecoder(typ *runtime.Type, structName, fieldName string) *unmarshalTextDecoder {
|
||||||
|
return &unmarshalTextDecoder{
|
||||||
|
typ: typ,
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *unmarshalTextDecoder) annotateError(cursor int64, err error) {
|
||||||
|
switch e := err.(type) {
|
||||||
|
case *errors.UnmarshalTypeError:
|
||||||
|
e.Struct = d.structName
|
||||||
|
e.Field = d.fieldName
|
||||||
|
case *errors.SyntaxError:
|
||||||
|
e.Offset = cursor
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
nullbytes = []byte(`null`)
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *unmarshalTextDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
s.skipWhiteSpace()
|
||||||
|
start := s.cursor
|
||||||
|
if err := s.skipValue(depth); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
src := s.buf[start:s.cursor]
|
||||||
|
if len(src) > 0 {
|
||||||
|
switch src[0] {
|
||||||
|
case '[':
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "array",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
case '{':
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "object",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return &errors.UnmarshalTypeError{
|
||||||
|
Value: "number",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: s.totalOffset(),
|
||||||
|
}
|
||||||
|
case 'n':
|
||||||
|
if bytes.Equal(src, nullbytes) {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
copy(dst, src)
|
||||||
|
|
||||||
|
if b, ok := unquoteBytes(dst); ok {
|
||||||
|
dst = b
|
||||||
|
}
|
||||||
|
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: d.typ,
|
||||||
|
ptr: p,
|
||||||
|
}))
|
||||||
|
if err := v.(encoding.TextUnmarshaler).UnmarshalText(dst); err != nil {
|
||||||
|
d.annotateError(s.cursor, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *unmarshalTextDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
buf := ctx.Buf
|
||||||
|
cursor = skipWhiteSpace(buf, cursor)
|
||||||
|
start := cursor
|
||||||
|
end, err := skipValue(buf, cursor, depth)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
src := buf[start:end]
|
||||||
|
if len(src) > 0 {
|
||||||
|
switch src[0] {
|
||||||
|
case '[':
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "array",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
case '{':
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "object",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return 0, &errors.UnmarshalTypeError{
|
||||||
|
Value: "number",
|
||||||
|
Type: runtime.RType2Type(d.typ),
|
||||||
|
Offset: start,
|
||||||
|
}
|
||||||
|
case 'n':
|
||||||
|
if bytes.Equal(src, nullbytes) {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
return end, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if s, ok := unquoteBytes(src); ok {
|
||||||
|
src = s
|
||||||
|
}
|
||||||
|
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: d.typ,
|
||||||
|
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||||
|
}))
|
||||||
|
if err := v.(encoding.TextUnmarshaler).UnmarshalText(src); err != nil {
|
||||||
|
d.annotateError(cursor, err)
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
return end, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func unquoteBytes(s []byte) (t []byte, ok bool) {
|
||||||
|
length := len(s)
|
||||||
|
if length < 2 || s[0] != '"' || s[length-1] != '"' {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
s = s[1 : length-1]
|
||||||
|
length -= 2
|
||||||
|
|
||||||
|
// Check for unusual characters. If there are none,
|
||||||
|
// then no unquoting is needed, so return a slice of the
|
||||||
|
// original bytes.
|
||||||
|
r := 0
|
||||||
|
for r < length {
|
||||||
|
c := s[r]
|
||||||
|
if c == '\\' || c == '"' || c < ' ' {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if c < utf8.RuneSelf {
|
||||||
|
r++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
rr, size := utf8.DecodeRune(s[r:])
|
||||||
|
if rr == utf8.RuneError && size == 1 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
r += size
|
||||||
|
}
|
||||||
|
if r == length {
|
||||||
|
return s, true
|
||||||
|
}
|
||||||
|
|
||||||
|
b := make([]byte, length+2*utf8.UTFMax)
|
||||||
|
w := copy(b, s[0:r])
|
||||||
|
for r < length {
|
||||||
|
// Out of room? Can only happen if s is full of
|
||||||
|
// malformed UTF-8 and we're replacing each
|
||||||
|
// byte with RuneError.
|
||||||
|
if w >= len(b)-2*utf8.UTFMax {
|
||||||
|
nb := make([]byte, (len(b)+utf8.UTFMax)*2)
|
||||||
|
copy(nb, b[0:w])
|
||||||
|
b = nb
|
||||||
|
}
|
||||||
|
switch c := s[r]; {
|
||||||
|
case c == '\\':
|
||||||
|
r++
|
||||||
|
if r >= length {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
switch s[r] {
|
||||||
|
default:
|
||||||
|
return
|
||||||
|
case '"', '\\', '/', '\'':
|
||||||
|
b[w] = s[r]
|
||||||
|
r++
|
||||||
|
w++
|
||||||
|
case 'b':
|
||||||
|
b[w] = '\b'
|
||||||
|
r++
|
||||||
|
w++
|
||||||
|
case 'f':
|
||||||
|
b[w] = '\f'
|
||||||
|
r++
|
||||||
|
w++
|
||||||
|
case 'n':
|
||||||
|
b[w] = '\n'
|
||||||
|
r++
|
||||||
|
w++
|
||||||
|
case 'r':
|
||||||
|
b[w] = '\r'
|
||||||
|
r++
|
||||||
|
w++
|
||||||
|
case 't':
|
||||||
|
b[w] = '\t'
|
||||||
|
r++
|
||||||
|
w++
|
||||||
|
case 'u':
|
||||||
|
r--
|
||||||
|
rr := getu4(s[r:])
|
||||||
|
if rr < 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r += 6
|
||||||
|
if utf16.IsSurrogate(rr) {
|
||||||
|
rr1 := getu4(s[r:])
|
||||||
|
if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
|
||||||
|
// A valid pair; consume.
|
||||||
|
r += 6
|
||||||
|
w += utf8.EncodeRune(b[w:], dec)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// Invalid surrogate; fall back to replacement rune.
|
||||||
|
rr = unicode.ReplacementChar
|
||||||
|
}
|
||||||
|
w += utf8.EncodeRune(b[w:], rr)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Quote, control characters are invalid.
|
||||||
|
case c == '"', c < ' ':
|
||||||
|
return
|
||||||
|
|
||||||
|
// ASCII
|
||||||
|
case c < utf8.RuneSelf:
|
||||||
|
b[w] = c
|
||||||
|
r++
|
||||||
|
w++
|
||||||
|
|
||||||
|
// Coerce to well-formed UTF-8.
|
||||||
|
default:
|
||||||
|
rr, size := utf8.DecodeRune(s[r:])
|
||||||
|
r += size
|
||||||
|
w += utf8.EncodeRune(b[w:], rr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b[0:w], true
|
||||||
|
}
|
||||||
|
|
||||||
|
func getu4(s []byte) rune {
|
||||||
|
if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
var r rune
|
||||||
|
for _, c := range s[2:6] {
|
||||||
|
switch {
|
||||||
|
case '0' <= c && c <= '9':
|
||||||
|
c = c - '0'
|
||||||
|
case 'a' <= c && c <= 'f':
|
||||||
|
c = c - 'a' + 10
|
||||||
|
case 'A' <= c && c <= 'F':
|
||||||
|
c = c - 'A' + 10
|
||||||
|
default:
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
r = r*16 + rune(c)
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
68
vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go
generated
vendored
Normal file
68
vendor/github.com/goccy/go-json/internal/decoder/wrapped_string.go
generated
vendored
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
package decoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type wrappedStringDecoder struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
dec Decoder
|
||||||
|
stringDecoder *stringDecoder
|
||||||
|
structName string
|
||||||
|
fieldName string
|
||||||
|
isPtrType bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newWrappedStringDecoder(typ *runtime.Type, dec Decoder, structName, fieldName string) *wrappedStringDecoder {
|
||||||
|
return &wrappedStringDecoder{
|
||||||
|
typ: typ,
|
||||||
|
dec: dec,
|
||||||
|
stringDecoder: newStringDecoder(structName, fieldName),
|
||||||
|
structName: structName,
|
||||||
|
fieldName: fieldName,
|
||||||
|
isPtrType: typ.Kind() == reflect.Ptr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *wrappedStringDecoder) DecodeStream(s *Stream, depth int64, p unsafe.Pointer) error {
|
||||||
|
bytes, err := d.stringDecoder.decodeStreamByte(s)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
if d.isPtrType {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
b := make([]byte, len(bytes)+1)
|
||||||
|
copy(b, bytes)
|
||||||
|
if _, err := d.dec.Decode(&RuntimeContext{Buf: b}, 0, depth, p); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *wrappedStringDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.Pointer) (int64, error) {
|
||||||
|
bytes, c, err := d.stringDecoder.decodeByte(ctx.Buf, cursor)
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
if bytes == nil {
|
||||||
|
if d.isPtrType {
|
||||||
|
*(*unsafe.Pointer)(p) = nil
|
||||||
|
}
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
bytes = append(bytes, nul)
|
||||||
|
oldBuf := ctx.Buf
|
||||||
|
ctx.Buf = bytes
|
||||||
|
if _, err := d.dec.Decode(ctx, 0, depth, p); err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
ctx.Buf = oldBuf
|
||||||
|
return c, nil
|
||||||
|
}
|
286
vendor/github.com/goccy/go-json/internal/encoder/compact.go
generated
vendored
Normal file
286
vendor/github.com/goccy/go-json/internal/encoder/compact.go
generated
vendored
Normal file
|
@ -0,0 +1,286 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
isWhiteSpace = [256]bool{
|
||||||
|
' ': true,
|
||||||
|
'\n': true,
|
||||||
|
'\t': true,
|
||||||
|
'\r': true,
|
||||||
|
}
|
||||||
|
isHTMLEscapeChar = [256]bool{
|
||||||
|
'<': true,
|
||||||
|
'>': true,
|
||||||
|
'&': true,
|
||||||
|
}
|
||||||
|
nul = byte('\000')
|
||||||
|
)
|
||||||
|
|
||||||
|
func Compact(buf *bytes.Buffer, src []byte, escape bool) error {
|
||||||
|
if len(src) == 0 {
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("", 0)
|
||||||
|
}
|
||||||
|
buf.Grow(len(src))
|
||||||
|
dst := buf.Bytes()
|
||||||
|
|
||||||
|
ctx := TakeRuntimeContext()
|
||||||
|
ctxBuf := ctx.Buf[:0]
|
||||||
|
ctxBuf = append(append(ctxBuf, src...), nul)
|
||||||
|
ctx.Buf = ctxBuf
|
||||||
|
|
||||||
|
if err := compactAndWrite(buf, dst, ctxBuf, escape); err != nil {
|
||||||
|
ReleaseRuntimeContext(ctx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
ReleaseRuntimeContext(ctx)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactAndWrite(buf *bytes.Buffer, dst []byte, src []byte, escape bool) error {
|
||||||
|
dst, err := compact(dst, src, escape)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := buf.Write(dst); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compact(dst, src []byte, escape bool) ([]byte, error) {
|
||||||
|
buf, cursor, err := compactValue(dst, src, 0, escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := validateEndBuf(src, cursor); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func validateEndBuf(src []byte, cursor int64) error {
|
||||||
|
for {
|
||||||
|
switch src[cursor] {
|
||||||
|
case ' ', '\t', '\n', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case nul:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return errors.ErrSyntax(
|
||||||
|
fmt.Sprintf("invalid character '%c' after top-level value", src[cursor]),
|
||||||
|
cursor+1,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func skipWhiteSpace(buf []byte, cursor int64) int64 {
|
||||||
|
LOOP:
|
||||||
|
if isWhiteSpace[buf[cursor]] {
|
||||||
|
cursor++
|
||||||
|
goto LOOP
|
||||||
|
}
|
||||||
|
return cursor
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactValue(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||||
|
for {
|
||||||
|
switch src[cursor] {
|
||||||
|
case ' ', '\t', '\n', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case '{':
|
||||||
|
return compactObject(dst, src, cursor, escape)
|
||||||
|
case '}':
|
||||||
|
return nil, 0, errors.ErrSyntax("unexpected character '}'", cursor)
|
||||||
|
case '[':
|
||||||
|
return compactArray(dst, src, cursor, escape)
|
||||||
|
case ']':
|
||||||
|
return nil, 0, errors.ErrSyntax("unexpected character ']'", cursor)
|
||||||
|
case '"':
|
||||||
|
return compactString(dst, src, cursor, escape)
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return compactNumber(dst, src, cursor)
|
||||||
|
case 't':
|
||||||
|
return compactTrue(dst, src, cursor)
|
||||||
|
case 'f':
|
||||||
|
return compactFalse(dst, src, cursor)
|
||||||
|
case 'n':
|
||||||
|
return compactNull(dst, src, cursor)
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrSyntax(fmt.Sprintf("unexpected character '%c'", src[cursor]), cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactObject(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||||
|
if src[cursor] == '{' {
|
||||||
|
dst = append(dst, '{')
|
||||||
|
} else {
|
||||||
|
return nil, 0, errors.ErrExpected("expected { character for object value", cursor)
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor+1)
|
||||||
|
if src[cursor] == '}' {
|
||||||
|
dst = append(dst, '}')
|
||||||
|
return dst, cursor + 1, nil
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
for {
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
dst, cursor, err = compactString(dst, src, cursor, escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
if src[cursor] != ':' {
|
||||||
|
return nil, 0, errors.ErrExpected("colon after object key", cursor)
|
||||||
|
}
|
||||||
|
dst = append(dst, ':')
|
||||||
|
dst, cursor, err = compactValue(dst, src, cursor+1, escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
switch src[cursor] {
|
||||||
|
case '}':
|
||||||
|
dst = append(dst, '}')
|
||||||
|
cursor++
|
||||||
|
return dst, cursor, nil
|
||||||
|
case ',':
|
||||||
|
dst = append(dst, ',')
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrExpected("comma after object value", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactArray(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||||
|
if src[cursor] == '[' {
|
||||||
|
dst = append(dst, '[')
|
||||||
|
} else {
|
||||||
|
return nil, 0, errors.ErrExpected("expected [ character for array value", cursor)
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor+1)
|
||||||
|
if src[cursor] == ']' {
|
||||||
|
dst = append(dst, ']')
|
||||||
|
return dst, cursor + 1, nil
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
for {
|
||||||
|
dst, cursor, err = compactValue(dst, src, cursor, escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
switch src[cursor] {
|
||||||
|
case ']':
|
||||||
|
dst = append(dst, ']')
|
||||||
|
cursor++
|
||||||
|
return dst, cursor, nil
|
||||||
|
case ',':
|
||||||
|
dst = append(dst, ',')
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrExpected("comma after array value", cursor)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactString(dst, src []byte, cursor int64, escape bool) ([]byte, int64, error) {
|
||||||
|
if src[cursor] != '"' {
|
||||||
|
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "string", cursor)
|
||||||
|
}
|
||||||
|
start := cursor
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
c := src[cursor]
|
||||||
|
if escape {
|
||||||
|
if isHTMLEscapeChar[c] {
|
||||||
|
dst = append(dst, src[start:cursor]...)
|
||||||
|
dst = append(dst, `\u00`...)
|
||||||
|
dst = append(dst, hex[c>>4], hex[c&0xF])
|
||||||
|
start = cursor + 1
|
||||||
|
} else if c == 0xE2 && cursor+2 < int64(len(src)) && src[cursor+1] == 0x80 && src[cursor+2]&^1 == 0xA8 {
|
||||||
|
dst = append(dst, src[start:cursor]...)
|
||||||
|
dst = append(dst, `\u202`...)
|
||||||
|
dst = append(dst, hex[src[cursor+2]&0xF])
|
||||||
|
cursor += 2
|
||||||
|
start = cursor + 3
|
||||||
|
}
|
||||||
|
}
|
||||||
|
switch c {
|
||||||
|
case '\\':
|
||||||
|
cursor++
|
||||||
|
if src[cursor] == nul {
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src)))
|
||||||
|
}
|
||||||
|
case '"':
|
||||||
|
cursor++
|
||||||
|
return append(dst, src[start:cursor]...), cursor, nil
|
||||||
|
case nul:
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("string", int64(len(src)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactNumber(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
start := cursor
|
||||||
|
for {
|
||||||
|
cursor++
|
||||||
|
if floatTable[src[cursor]] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
num := src[start:cursor]
|
||||||
|
if _, err := strconv.ParseFloat(*(*string)(unsafe.Pointer(&num)), 64); err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
dst = append(dst, num...)
|
||||||
|
return dst, cursor, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactTrue(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
if cursor+3 >= int64(len(src)) {
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("true", cursor)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(src[cursor:cursor+4], []byte(`true`)) {
|
||||||
|
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "true", cursor)
|
||||||
|
}
|
||||||
|
dst = append(dst, "true"...)
|
||||||
|
cursor += 4
|
||||||
|
return dst, cursor, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactFalse(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
if cursor+4 >= int64(len(src)) {
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("false", cursor)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(src[cursor:cursor+5], []byte(`false`)) {
|
||||||
|
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "false", cursor)
|
||||||
|
}
|
||||||
|
dst = append(dst, "false"...)
|
||||||
|
cursor += 5
|
||||||
|
return dst, cursor, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func compactNull(dst, src []byte, cursor int64) ([]byte, int64, error) {
|
||||||
|
if cursor+3 >= int64(len(src)) {
|
||||||
|
return nil, 0, errors.ErrUnexpectedEndOfJSON("null", cursor)
|
||||||
|
}
|
||||||
|
if !bytes.Equal(src[cursor:cursor+4], []byte(`null`)) {
|
||||||
|
return nil, 0, errors.ErrInvalidCharacter(src[cursor], "null", cursor)
|
||||||
|
}
|
||||||
|
dst = append(dst, "null"...)
|
||||||
|
cursor += 4
|
||||||
|
return dst, cursor, nil
|
||||||
|
}
|
1570
vendor/github.com/goccy/go-json/internal/encoder/compiler.go
generated
vendored
Normal file
1570
vendor/github.com/goccy/go-json/internal/encoder/compiler.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
56
vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go
generated
vendored
Normal file
56
vendor/github.com/goccy/go-json/internal/encoder/compiler_norace.go
generated
vendored
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
// +build !race
|
||||||
|
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
func CompileToGetCodeSet(typeptr uintptr) (*OpcodeSet, error) {
|
||||||
|
if typeptr > typeAddr.MaxTypeAddr {
|
||||||
|
return compileToGetCodeSetSlowPath(typeptr)
|
||||||
|
}
|
||||||
|
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||||
|
if codeSet := cachedOpcodeSets[index]; codeSet != nil {
|
||||||
|
return codeSet, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// noescape trick for header.typ ( reflect.*rtype )
|
||||||
|
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||||
|
|
||||||
|
noescapeKeyCode, err := compileHead(&compileContext{
|
||||||
|
typ: copiedType,
|
||||||
|
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
escapeKeyCode, err := compileHead(&compileContext{
|
||||||
|
typ: copiedType,
|
||||||
|
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||||
|
escapeKey: true,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
noescapeKeyCode = copyOpcode(noescapeKeyCode)
|
||||||
|
escapeKeyCode = copyOpcode(escapeKeyCode)
|
||||||
|
setTotalLengthToInterfaceOp(noescapeKeyCode)
|
||||||
|
setTotalLengthToInterfaceOp(escapeKeyCode)
|
||||||
|
interfaceNoescapeKeyCode := copyToInterfaceOpcode(noescapeKeyCode)
|
||||||
|
interfaceEscapeKeyCode := copyToInterfaceOpcode(escapeKeyCode)
|
||||||
|
codeLength := noescapeKeyCode.TotalLength()
|
||||||
|
codeSet := &OpcodeSet{
|
||||||
|
Type: copiedType,
|
||||||
|
NoescapeKeyCode: noescapeKeyCode,
|
||||||
|
EscapeKeyCode: escapeKeyCode,
|
||||||
|
InterfaceNoescapeKeyCode: interfaceNoescapeKeyCode,
|
||||||
|
InterfaceEscapeKeyCode: interfaceEscapeKeyCode,
|
||||||
|
CodeLength: codeLength,
|
||||||
|
EndCode: ToEndCode(interfaceNoescapeKeyCode),
|
||||||
|
}
|
||||||
|
cachedOpcodeSets[index] = codeSet
|
||||||
|
return codeSet, nil
|
||||||
|
}
|
65
vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go
generated
vendored
Normal file
65
vendor/github.com/goccy/go-json/internal/encoder/compiler_race.go
generated
vendored
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
// +build race
|
||||||
|
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
var setsMu sync.RWMutex
|
||||||
|
|
||||||
|
func CompileToGetCodeSet(typeptr uintptr) (*OpcodeSet, error) {
|
||||||
|
if typeptr > typeAddr.MaxTypeAddr {
|
||||||
|
return compileToGetCodeSetSlowPath(typeptr)
|
||||||
|
}
|
||||||
|
index := (typeptr - typeAddr.BaseTypeAddr) >> typeAddr.AddrShift
|
||||||
|
setsMu.RLock()
|
||||||
|
if codeSet := cachedOpcodeSets[index]; codeSet != nil {
|
||||||
|
setsMu.RUnlock()
|
||||||
|
return codeSet, nil
|
||||||
|
}
|
||||||
|
setsMu.RUnlock()
|
||||||
|
|
||||||
|
// noescape trick for header.typ ( reflect.*rtype )
|
||||||
|
copiedType := *(**runtime.Type)(unsafe.Pointer(&typeptr))
|
||||||
|
|
||||||
|
noescapeKeyCode, err := compileHead(&compileContext{
|
||||||
|
typ: copiedType,
|
||||||
|
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
escapeKeyCode, err := compileHead(&compileContext{
|
||||||
|
typ: copiedType,
|
||||||
|
structTypeToCompiledCode: map[uintptr]*CompiledCode{},
|
||||||
|
escapeKey: true,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
noescapeKeyCode = copyOpcode(noescapeKeyCode)
|
||||||
|
escapeKeyCode = copyOpcode(escapeKeyCode)
|
||||||
|
setTotalLengthToInterfaceOp(noescapeKeyCode)
|
||||||
|
setTotalLengthToInterfaceOp(escapeKeyCode)
|
||||||
|
interfaceNoescapeKeyCode := copyToInterfaceOpcode(noescapeKeyCode)
|
||||||
|
interfaceEscapeKeyCode := copyToInterfaceOpcode(escapeKeyCode)
|
||||||
|
codeLength := noescapeKeyCode.TotalLength()
|
||||||
|
codeSet := &OpcodeSet{
|
||||||
|
Type: copiedType,
|
||||||
|
NoescapeKeyCode: noescapeKeyCode,
|
||||||
|
EscapeKeyCode: escapeKeyCode,
|
||||||
|
InterfaceNoescapeKeyCode: interfaceNoescapeKeyCode,
|
||||||
|
InterfaceEscapeKeyCode: interfaceEscapeKeyCode,
|
||||||
|
CodeLength: codeLength,
|
||||||
|
EndCode: ToEndCode(interfaceNoescapeKeyCode),
|
||||||
|
}
|
||||||
|
setsMu.Lock()
|
||||||
|
cachedOpcodeSets[index] = codeSet
|
||||||
|
setsMu.Unlock()
|
||||||
|
return codeSet, nil
|
||||||
|
}
|
141
vendor/github.com/goccy/go-json/internal/encoder/context.go
generated
vendored
Normal file
141
vendor/github.com/goccy/go-json/internal/encoder/context.go
generated
vendored
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
type compileContext struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
opcodeIndex uint32
|
||||||
|
ptrIndex int
|
||||||
|
indent uint32
|
||||||
|
escapeKey bool
|
||||||
|
structTypeToCompiledCode map[uintptr]*CompiledCode
|
||||||
|
|
||||||
|
parent *compileContext
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) context() *compileContext {
|
||||||
|
return &compileContext{
|
||||||
|
typ: c.typ,
|
||||||
|
opcodeIndex: c.opcodeIndex,
|
||||||
|
ptrIndex: c.ptrIndex,
|
||||||
|
indent: c.indent,
|
||||||
|
escapeKey: c.escapeKey,
|
||||||
|
structTypeToCompiledCode: c.structTypeToCompiledCode,
|
||||||
|
parent: c,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) withType(typ *runtime.Type) *compileContext {
|
||||||
|
ctx := c.context()
|
||||||
|
ctx.typ = typ
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) incIndent() *compileContext {
|
||||||
|
ctx := c.context()
|
||||||
|
ctx.indent++
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) decIndent() *compileContext {
|
||||||
|
ctx := c.context()
|
||||||
|
ctx.indent--
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) incIndex() {
|
||||||
|
c.incOpcodeIndex()
|
||||||
|
c.incPtrIndex()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) decIndex() {
|
||||||
|
c.decOpcodeIndex()
|
||||||
|
c.decPtrIndex()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) incOpcodeIndex() {
|
||||||
|
c.opcodeIndex++
|
||||||
|
if c.parent != nil {
|
||||||
|
c.parent.incOpcodeIndex()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) decOpcodeIndex() {
|
||||||
|
c.opcodeIndex--
|
||||||
|
if c.parent != nil {
|
||||||
|
c.parent.decOpcodeIndex()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) incPtrIndex() {
|
||||||
|
c.ptrIndex++
|
||||||
|
if c.parent != nil {
|
||||||
|
c.parent.incPtrIndex()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compileContext) decPtrIndex() {
|
||||||
|
c.ptrIndex--
|
||||||
|
if c.parent != nil {
|
||||||
|
c.parent.decPtrIndex()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
bufSize = 1024
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
runtimeContextPool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return &RuntimeContext{
|
||||||
|
Buf: make([]byte, 0, bufSize),
|
||||||
|
Ptrs: make([]uintptr, 128),
|
||||||
|
KeepRefs: make([]unsafe.Pointer, 0, 8),
|
||||||
|
Option: &Option{},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
type RuntimeContext struct {
|
||||||
|
Context context.Context
|
||||||
|
Buf []byte
|
||||||
|
MarshalBuf []byte
|
||||||
|
Ptrs []uintptr
|
||||||
|
KeepRefs []unsafe.Pointer
|
||||||
|
SeenPtr []uintptr
|
||||||
|
BaseIndent uint32
|
||||||
|
Prefix []byte
|
||||||
|
IndentStr []byte
|
||||||
|
Option *Option
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RuntimeContext) Init(p uintptr, codelen int) {
|
||||||
|
if len(c.Ptrs) < codelen {
|
||||||
|
c.Ptrs = make([]uintptr, codelen)
|
||||||
|
}
|
||||||
|
c.Ptrs[0] = p
|
||||||
|
c.KeepRefs = c.KeepRefs[:0]
|
||||||
|
c.SeenPtr = c.SeenPtr[:0]
|
||||||
|
c.BaseIndent = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RuntimeContext) Ptr() uintptr {
|
||||||
|
header := (*runtime.SliceHeader)(unsafe.Pointer(&c.Ptrs))
|
||||||
|
return uintptr(header.Data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TakeRuntimeContext() *RuntimeContext {
|
||||||
|
return runtimeContextPool.Get().(*RuntimeContext)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReleaseRuntimeContext(ctx *RuntimeContext) {
|
||||||
|
runtimeContextPool.Put(ctx)
|
||||||
|
}
|
551
vendor/github.com/goccy/go-json/internal/encoder/encoder.go
generated
vendored
Normal file
551
vendor/github.com/goccy/go-json/internal/encoder/encoder.go
generated
vendored
Normal file
|
@ -0,0 +1,551 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (t OpType) IsMultipleOpHead() bool {
|
||||||
|
switch t {
|
||||||
|
case OpStructHead:
|
||||||
|
return true
|
||||||
|
case OpStructHeadSlice:
|
||||||
|
return true
|
||||||
|
case OpStructHeadArray:
|
||||||
|
return true
|
||||||
|
case OpStructHeadMap:
|
||||||
|
return true
|
||||||
|
case OpStructHeadStruct:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmpty:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmptySlice:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmptyArray:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmptyMap:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmptyStruct:
|
||||||
|
return true
|
||||||
|
case OpStructHeadSlicePtr:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmptySlicePtr:
|
||||||
|
return true
|
||||||
|
case OpStructHeadArrayPtr:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmptyArrayPtr:
|
||||||
|
return true
|
||||||
|
case OpStructHeadMapPtr:
|
||||||
|
return true
|
||||||
|
case OpStructHeadOmitEmptyMapPtr:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t OpType) IsMultipleOpField() bool {
|
||||||
|
switch t {
|
||||||
|
case OpStructField:
|
||||||
|
return true
|
||||||
|
case OpStructFieldSlice:
|
||||||
|
return true
|
||||||
|
case OpStructFieldArray:
|
||||||
|
return true
|
||||||
|
case OpStructFieldMap:
|
||||||
|
return true
|
||||||
|
case OpStructFieldStruct:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmpty:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmptySlice:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmptyArray:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmptyMap:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmptyStruct:
|
||||||
|
return true
|
||||||
|
case OpStructFieldSlicePtr:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmptySlicePtr:
|
||||||
|
return true
|
||||||
|
case OpStructFieldArrayPtr:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmptyArrayPtr:
|
||||||
|
return true
|
||||||
|
case OpStructFieldMapPtr:
|
||||||
|
return true
|
||||||
|
case OpStructFieldOmitEmptyMapPtr:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type OpcodeSet struct {
|
||||||
|
Type *runtime.Type
|
||||||
|
NoescapeKeyCode *Opcode
|
||||||
|
EscapeKeyCode *Opcode
|
||||||
|
InterfaceNoescapeKeyCode *Opcode
|
||||||
|
InterfaceEscapeKeyCode *Opcode
|
||||||
|
CodeLength int
|
||||||
|
EndCode *Opcode
|
||||||
|
}
|
||||||
|
|
||||||
|
type CompiledCode struct {
|
||||||
|
Code *Opcode
|
||||||
|
Linked bool // whether recursive code already have linked
|
||||||
|
CurLen uintptr
|
||||||
|
NextLen uintptr
|
||||||
|
}
|
||||||
|
|
||||||
|
const StartDetectingCyclesAfter = 1000
|
||||||
|
|
||||||
|
func Load(base uintptr, idx uintptr) uintptr {
|
||||||
|
addr := base + idx
|
||||||
|
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func Store(base uintptr, idx uintptr, p uintptr) {
|
||||||
|
addr := base + idx
|
||||||
|
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadNPtr(base uintptr, idx uintptr, ptrNum int) uintptr {
|
||||||
|
addr := base + idx
|
||||||
|
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return PtrToPtr(p)
|
||||||
|
/*
|
||||||
|
for i := 0; i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
p = PtrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
|
||||||
|
func PtrToUint64(p uintptr) uint64 { return **(**uint64)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||||
|
func PtrToPtr(p uintptr) uintptr {
|
||||||
|
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||||
|
}
|
||||||
|
func PtrToNPtr(p uintptr, ptrNum int) uintptr {
|
||||||
|
for i := 0; i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = PtrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func PtrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||||
|
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||||
|
}
|
||||||
|
func PtrToInterface(code *Opcode, p uintptr) interface{} {
|
||||||
|
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: code.Type,
|
||||||
|
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrUnsupportedValue(code *Opcode, ptr uintptr) *errors.UnsupportedValueError {
|
||||||
|
v := *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: code.Type,
|
||||||
|
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&ptr)),
|
||||||
|
}))
|
||||||
|
return &errors.UnsupportedValueError{
|
||||||
|
Value: reflect.ValueOf(v),
|
||||||
|
Str: fmt.Sprintf("encountered a cycle via %s", code.Type),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrUnsupportedFloat(v float64) *errors.UnsupportedValueError {
|
||||||
|
return &errors.UnsupportedValueError{
|
||||||
|
Value: reflect.ValueOf(v),
|
||||||
|
Str: strconv.FormatFloat(v, 'g', -1, 64),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrMarshalerWithCode(code *Opcode, err error) *errors.MarshalerError {
|
||||||
|
return &errors.MarshalerError{
|
||||||
|
Type: runtime.RType2Type(code.Type),
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
type MapItem struct {
|
||||||
|
Key []byte
|
||||||
|
Value []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type Mapslice struct {
|
||||||
|
Items []MapItem
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Mapslice) Len() int {
|
||||||
|
return len(m.Items)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Mapslice) Less(i, j int) bool {
|
||||||
|
return bytes.Compare(m.Items[i].Key, m.Items[j].Key) < 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *Mapslice) Swap(i, j int) {
|
||||||
|
m.Items[i], m.Items[j] = m.Items[j], m.Items[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
type MapContext struct {
|
||||||
|
Pos []int
|
||||||
|
Slice *Mapslice
|
||||||
|
Buf []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
var mapContextPool = sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return &MapContext{}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewMapContext(mapLen int) *MapContext {
|
||||||
|
ctx := mapContextPool.Get().(*MapContext)
|
||||||
|
if ctx.Slice == nil {
|
||||||
|
ctx.Slice = &Mapslice{
|
||||||
|
Items: make([]MapItem, 0, mapLen),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cap(ctx.Pos) < (mapLen*2 + 1) {
|
||||||
|
ctx.Pos = make([]int, 0, mapLen*2+1)
|
||||||
|
ctx.Slice.Items = make([]MapItem, 0, mapLen)
|
||||||
|
} else {
|
||||||
|
ctx.Pos = ctx.Pos[:0]
|
||||||
|
ctx.Slice.Items = ctx.Slice.Items[:0]
|
||||||
|
}
|
||||||
|
ctx.Buf = ctx.Buf[:0]
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReleaseMapContext(c *MapContext) {
|
||||||
|
mapContextPool.Put(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname MapIterInit reflect.mapiterinit
|
||||||
|
//go:noescape
|
||||||
|
func MapIterInit(mapType *runtime.Type, m unsafe.Pointer) unsafe.Pointer
|
||||||
|
|
||||||
|
//go:linkname MapIterKey reflect.mapiterkey
|
||||||
|
//go:noescape
|
||||||
|
func MapIterKey(it unsafe.Pointer) unsafe.Pointer
|
||||||
|
|
||||||
|
//go:linkname MapIterNext reflect.mapiternext
|
||||||
|
//go:noescape
|
||||||
|
func MapIterNext(it unsafe.Pointer)
|
||||||
|
|
||||||
|
//go:linkname MapLen reflect.maplen
|
||||||
|
//go:noescape
|
||||||
|
func MapLen(m unsafe.Pointer) int
|
||||||
|
|
||||||
|
func AppendByteSlice(_ *RuntimeContext, b []byte, src []byte) []byte {
|
||||||
|
if src == nil {
|
||||||
|
return append(b, `null`...)
|
||||||
|
}
|
||||||
|
encodedLen := base64.StdEncoding.EncodedLen(len(src))
|
||||||
|
b = append(b, '"')
|
||||||
|
pos := len(b)
|
||||||
|
remainLen := cap(b[pos:])
|
||||||
|
var buf []byte
|
||||||
|
if remainLen > encodedLen {
|
||||||
|
buf = b[pos : pos+encodedLen]
|
||||||
|
} else {
|
||||||
|
buf = make([]byte, encodedLen)
|
||||||
|
}
|
||||||
|
base64.StdEncoding.Encode(buf, src)
|
||||||
|
return append(append(b, buf...), '"')
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendFloat32(_ *RuntimeContext, b []byte, v float32) []byte {
|
||||||
|
f64 := float64(v)
|
||||||
|
abs := math.Abs(f64)
|
||||||
|
fmt := byte('f')
|
||||||
|
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||||
|
if abs != 0 {
|
||||||
|
f32 := float32(abs)
|
||||||
|
if f32 < 1e-6 || f32 >= 1e21 {
|
||||||
|
fmt = 'e'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strconv.AppendFloat(b, f64, fmt, -1, 32)
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendFloat64(_ *RuntimeContext, b []byte, v float64) []byte {
|
||||||
|
abs := math.Abs(v)
|
||||||
|
fmt := byte('f')
|
||||||
|
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
|
||||||
|
if abs != 0 {
|
||||||
|
if abs < 1e-6 || abs >= 1e21 {
|
||||||
|
fmt = 'e'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strconv.AppendFloat(b, v, fmt, -1, 64)
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendBool(_ *RuntimeContext, b []byte, v bool) []byte {
|
||||||
|
if v {
|
||||||
|
return append(b, "true"...)
|
||||||
|
}
|
||||||
|
return append(b, "false"...)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
floatTable = [256]bool{
|
||||||
|
'0': true,
|
||||||
|
'1': true,
|
||||||
|
'2': true,
|
||||||
|
'3': true,
|
||||||
|
'4': true,
|
||||||
|
'5': true,
|
||||||
|
'6': true,
|
||||||
|
'7': true,
|
||||||
|
'8': true,
|
||||||
|
'9': true,
|
||||||
|
'.': true,
|
||||||
|
'e': true,
|
||||||
|
'E': true,
|
||||||
|
'+': true,
|
||||||
|
'-': true,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func AppendNumber(_ *RuntimeContext, b []byte, n json.Number) ([]byte, error) {
|
||||||
|
if len(n) == 0 {
|
||||||
|
return append(b, '0'), nil
|
||||||
|
}
|
||||||
|
for i := 0; i < len(n); i++ {
|
||||||
|
if !floatTable[n[i]] {
|
||||||
|
return nil, fmt.Errorf("json: invalid number literal %q", n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b = append(b, n...)
|
||||||
|
return b, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendMarshalJSON(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||||
|
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||||
|
if rv.CanAddr() {
|
||||||
|
rv = rv.Addr()
|
||||||
|
} else {
|
||||||
|
newV := reflect.New(rv.Type())
|
||||||
|
newV.Elem().Set(rv)
|
||||||
|
rv = newV
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = rv.Interface()
|
||||||
|
var bb []byte
|
||||||
|
if (code.Flags & MarshalerContextFlags) != 0 {
|
||||||
|
marshaler, ok := v.(marshalerContext)
|
||||||
|
if !ok {
|
||||||
|
return AppendNull(ctx, b), nil
|
||||||
|
}
|
||||||
|
b, err := marshaler.MarshalJSON(ctx.Option.Context)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
bb = b
|
||||||
|
} else {
|
||||||
|
marshaler, ok := v.(json.Marshaler)
|
||||||
|
if !ok {
|
||||||
|
return AppendNull(ctx, b), nil
|
||||||
|
}
|
||||||
|
b, err := marshaler.MarshalJSON()
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
bb = b
|
||||||
|
}
|
||||||
|
marshalBuf := ctx.MarshalBuf[:0]
|
||||||
|
marshalBuf = append(append(marshalBuf, bb...), nul)
|
||||||
|
compactedBuf, err := compact(b, marshalBuf, (ctx.Option.Flag&HTMLEscapeOption) != 0)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
ctx.MarshalBuf = marshalBuf
|
||||||
|
return compactedBuf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendMarshalJSONIndent(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||||
|
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||||
|
if rv.CanAddr() {
|
||||||
|
rv = rv.Addr()
|
||||||
|
} else {
|
||||||
|
newV := reflect.New(rv.Type())
|
||||||
|
newV.Elem().Set(rv)
|
||||||
|
rv = newV
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = rv.Interface()
|
||||||
|
var bb []byte
|
||||||
|
if (code.Flags & MarshalerContextFlags) != 0 {
|
||||||
|
marshaler, ok := v.(marshalerContext)
|
||||||
|
if !ok {
|
||||||
|
return AppendNull(ctx, b), nil
|
||||||
|
}
|
||||||
|
b, err := marshaler.MarshalJSON(ctx.Option.Context)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
bb = b
|
||||||
|
} else {
|
||||||
|
marshaler, ok := v.(json.Marshaler)
|
||||||
|
if !ok {
|
||||||
|
return AppendNull(ctx, b), nil
|
||||||
|
}
|
||||||
|
b, err := marshaler.MarshalJSON()
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
bb = b
|
||||||
|
}
|
||||||
|
marshalBuf := ctx.MarshalBuf[:0]
|
||||||
|
marshalBuf = append(append(marshalBuf, bb...), nul)
|
||||||
|
indentedBuf, err := doIndent(
|
||||||
|
b,
|
||||||
|
marshalBuf,
|
||||||
|
string(ctx.Prefix)+strings.Repeat(string(ctx.IndentStr), int(ctx.BaseIndent+code.Indent)),
|
||||||
|
string(ctx.IndentStr),
|
||||||
|
(ctx.Option.Flag&HTMLEscapeOption) != 0,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
ctx.MarshalBuf = marshalBuf
|
||||||
|
return indentedBuf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendMarshalText(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||||
|
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||||
|
if rv.CanAddr() {
|
||||||
|
rv = rv.Addr()
|
||||||
|
} else {
|
||||||
|
newV := reflect.New(rv.Type())
|
||||||
|
newV.Elem().Set(rv)
|
||||||
|
rv = newV
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = rv.Interface()
|
||||||
|
marshaler, ok := v.(encoding.TextMarshaler)
|
||||||
|
if !ok {
|
||||||
|
return AppendNull(ctx, b), nil
|
||||||
|
}
|
||||||
|
bytes, err := marshaler.MarshalText()
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
return AppendString(ctx, b, *(*string)(unsafe.Pointer(&bytes))), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendMarshalTextIndent(ctx *RuntimeContext, code *Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
rv := reflect.ValueOf(v) // convert by dynamic interface type
|
||||||
|
if (code.Flags & AddrForMarshalerFlags) != 0 {
|
||||||
|
if rv.CanAddr() {
|
||||||
|
rv = rv.Addr()
|
||||||
|
} else {
|
||||||
|
newV := reflect.New(rv.Type())
|
||||||
|
newV.Elem().Set(rv)
|
||||||
|
rv = newV
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = rv.Interface()
|
||||||
|
marshaler, ok := v.(encoding.TextMarshaler)
|
||||||
|
if !ok {
|
||||||
|
return AppendNull(ctx, b), nil
|
||||||
|
}
|
||||||
|
bytes, err := marshaler.MarshalText()
|
||||||
|
if err != nil {
|
||||||
|
return nil, &errors.MarshalerError{Type: reflect.TypeOf(v), Err: err}
|
||||||
|
}
|
||||||
|
return AppendString(ctx, b, *(*string)(unsafe.Pointer(&bytes))), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendNull(_ *RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, "null"...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendComma(_ *RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendCommaIndent(_ *RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendStructEnd(_ *RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '}', ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendStructEndIndent(ctx *RuntimeContext, code *Opcode, b []byte) []byte {
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = append(b, ctx.Prefix...)
|
||||||
|
indentNum := ctx.BaseIndent + code.Indent - 1
|
||||||
|
for i := uint32(0); i < indentNum; i++ {
|
||||||
|
b = append(b, ctx.IndentStr...)
|
||||||
|
}
|
||||||
|
return append(b, '}', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendIndent(ctx *RuntimeContext, b []byte, indent uint32) []byte {
|
||||||
|
b = append(b, ctx.Prefix...)
|
||||||
|
indentNum := ctx.BaseIndent + indent
|
||||||
|
for i := uint32(0); i < indentNum; i++ {
|
||||||
|
b = append(b, ctx.IndentStr...)
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsNilForMarshaler(v interface{}) bool {
|
||||||
|
rv := reflect.ValueOf(v)
|
||||||
|
switch rv.Kind() {
|
||||||
|
case reflect.Bool:
|
||||||
|
return !rv.Bool()
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
return rv.Int() == 0
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
return rv.Uint() == 0
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return math.Float64bits(rv.Float()) == 0
|
||||||
|
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Func:
|
||||||
|
return rv.IsNil()
|
||||||
|
case reflect.Slice:
|
||||||
|
return rv.IsNil() || rv.Len() == 0
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
211
vendor/github.com/goccy/go-json/internal/encoder/indent.go
generated
vendored
Normal file
211
vendor/github.com/goccy/go-json/internal/encoder/indent.go
generated
vendored
Normal file
|
@ -0,0 +1,211 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
func takeIndentSrcRuntimeContext(src []byte) (*RuntimeContext, []byte) {
|
||||||
|
ctx := TakeRuntimeContext()
|
||||||
|
buf := ctx.Buf[:0]
|
||||||
|
buf = append(append(buf, src...), nul)
|
||||||
|
ctx.Buf = buf
|
||||||
|
return ctx, buf
|
||||||
|
}
|
||||||
|
|
||||||
|
func Indent(buf *bytes.Buffer, src []byte, prefix, indentStr string) error {
|
||||||
|
if len(src) == 0 {
|
||||||
|
return errors.ErrUnexpectedEndOfJSON("", 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
srcCtx, srcBuf := takeIndentSrcRuntimeContext(src)
|
||||||
|
dstCtx := TakeRuntimeContext()
|
||||||
|
dst := dstCtx.Buf[:0]
|
||||||
|
|
||||||
|
dst, err := indentAndWrite(buf, dst, srcBuf, prefix, indentStr)
|
||||||
|
if err != nil {
|
||||||
|
ReleaseRuntimeContext(srcCtx)
|
||||||
|
ReleaseRuntimeContext(dstCtx)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dstCtx.Buf = dst
|
||||||
|
ReleaseRuntimeContext(srcCtx)
|
||||||
|
ReleaseRuntimeContext(dstCtx)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func indentAndWrite(buf *bytes.Buffer, dst []byte, src []byte, prefix, indentStr string) ([]byte, error) {
|
||||||
|
dst, err := doIndent(dst, src, prefix, indentStr, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if _, err := buf.Write(dst); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return dst, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func doIndent(dst, src []byte, prefix, indentStr string, escape bool) ([]byte, error) {
|
||||||
|
buf, cursor, err := indentValue(dst, src, 0, 0, []byte(prefix), []byte(indentStr), escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if err := validateEndBuf(src, cursor); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return buf, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func indentValue(
|
||||||
|
dst []byte,
|
||||||
|
src []byte,
|
||||||
|
indentNum int,
|
||||||
|
cursor int64,
|
||||||
|
prefix []byte,
|
||||||
|
indentBytes []byte,
|
||||||
|
escape bool) ([]byte, int64, error) {
|
||||||
|
for {
|
||||||
|
switch src[cursor] {
|
||||||
|
case ' ', '\t', '\n', '\r':
|
||||||
|
cursor++
|
||||||
|
continue
|
||||||
|
case '{':
|
||||||
|
return indentObject(dst, src, indentNum, cursor, prefix, indentBytes, escape)
|
||||||
|
case '}':
|
||||||
|
return nil, 0, errors.ErrSyntax("unexpected character '}'", cursor)
|
||||||
|
case '[':
|
||||||
|
return indentArray(dst, src, indentNum, cursor, prefix, indentBytes, escape)
|
||||||
|
case ']':
|
||||||
|
return nil, 0, errors.ErrSyntax("unexpected character ']'", cursor)
|
||||||
|
case '"':
|
||||||
|
return compactString(dst, src, cursor, escape)
|
||||||
|
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
|
||||||
|
return compactNumber(dst, src, cursor)
|
||||||
|
case 't':
|
||||||
|
return compactTrue(dst, src, cursor)
|
||||||
|
case 'f':
|
||||||
|
return compactFalse(dst, src, cursor)
|
||||||
|
case 'n':
|
||||||
|
return compactNull(dst, src, cursor)
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrSyntax(fmt.Sprintf("unexpected character '%c'", src[cursor]), cursor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func indentObject(
|
||||||
|
dst []byte,
|
||||||
|
src []byte,
|
||||||
|
indentNum int,
|
||||||
|
cursor int64,
|
||||||
|
prefix []byte,
|
||||||
|
indentBytes []byte,
|
||||||
|
escape bool) ([]byte, int64, error) {
|
||||||
|
if src[cursor] == '{' {
|
||||||
|
dst = append(dst, '{')
|
||||||
|
} else {
|
||||||
|
return nil, 0, errors.ErrExpected("expected { character for object value", cursor)
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor+1)
|
||||||
|
if src[cursor] == '}' {
|
||||||
|
dst = append(dst, '}')
|
||||||
|
return dst, cursor + 1, nil
|
||||||
|
}
|
||||||
|
indentNum++
|
||||||
|
var err error
|
||||||
|
for {
|
||||||
|
dst = append(append(dst, '\n'), prefix...)
|
||||||
|
for i := 0; i < indentNum; i++ {
|
||||||
|
dst = append(dst, indentBytes...)
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
dst, cursor, err = compactString(dst, src, cursor, escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
if src[cursor] != ':' {
|
||||||
|
return nil, 0, errors.ErrSyntax(
|
||||||
|
fmt.Sprintf("invalid character '%c' after object key", src[cursor]),
|
||||||
|
cursor+1,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
dst = append(dst, ':', ' ')
|
||||||
|
dst, cursor, err = indentValue(dst, src, indentNum, cursor+1, prefix, indentBytes, escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
switch src[cursor] {
|
||||||
|
case '}':
|
||||||
|
dst = append(append(dst, '\n'), prefix...)
|
||||||
|
for i := 0; i < indentNum-1; i++ {
|
||||||
|
dst = append(dst, indentBytes...)
|
||||||
|
}
|
||||||
|
dst = append(dst, '}')
|
||||||
|
cursor++
|
||||||
|
return dst, cursor, nil
|
||||||
|
case ',':
|
||||||
|
dst = append(dst, ',')
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrSyntax(
|
||||||
|
fmt.Sprintf("invalid character '%c' after object key:value pair", src[cursor]),
|
||||||
|
cursor+1,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func indentArray(
|
||||||
|
dst []byte,
|
||||||
|
src []byte,
|
||||||
|
indentNum int,
|
||||||
|
cursor int64,
|
||||||
|
prefix []byte,
|
||||||
|
indentBytes []byte,
|
||||||
|
escape bool) ([]byte, int64, error) {
|
||||||
|
if src[cursor] == '[' {
|
||||||
|
dst = append(dst, '[')
|
||||||
|
} else {
|
||||||
|
return nil, 0, errors.ErrExpected("expected [ character for array value", cursor)
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor+1)
|
||||||
|
if src[cursor] == ']' {
|
||||||
|
dst = append(dst, ']')
|
||||||
|
return dst, cursor + 1, nil
|
||||||
|
}
|
||||||
|
indentNum++
|
||||||
|
var err error
|
||||||
|
for {
|
||||||
|
dst = append(append(dst, '\n'), prefix...)
|
||||||
|
for i := 0; i < indentNum; i++ {
|
||||||
|
dst = append(dst, indentBytes...)
|
||||||
|
}
|
||||||
|
dst, cursor, err = indentValue(dst, src, indentNum, cursor, prefix, indentBytes, escape)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
cursor = skipWhiteSpace(src, cursor)
|
||||||
|
switch src[cursor] {
|
||||||
|
case ']':
|
||||||
|
dst = append(append(dst, '\n'), prefix...)
|
||||||
|
for i := 0; i < indentNum-1; i++ {
|
||||||
|
dst = append(dst, indentBytes...)
|
||||||
|
}
|
||||||
|
dst = append(dst, ']')
|
||||||
|
cursor++
|
||||||
|
return dst, cursor, nil
|
||||||
|
case ',':
|
||||||
|
dst = append(dst, ',')
|
||||||
|
default:
|
||||||
|
return nil, 0, errors.ErrSyntax(
|
||||||
|
fmt.Sprintf("invalid character '%c' after array value", src[cursor]),
|
||||||
|
cursor+1,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
cursor++
|
||||||
|
}
|
||||||
|
}
|
130
vendor/github.com/goccy/go-json/internal/encoder/int.go
generated
vendored
Normal file
130
vendor/github.com/goccy/go-json/internal/encoder/int.go
generated
vendored
Normal file
|
@ -0,0 +1,130 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
var endianness int
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
var b [2]byte
|
||||||
|
*(*uint16)(unsafe.Pointer(&b)) = uint16(0xABCD)
|
||||||
|
|
||||||
|
switch b[0] {
|
||||||
|
case 0xCD:
|
||||||
|
endianness = 0 // LE
|
||||||
|
case 0xAB:
|
||||||
|
endianness = 1 // BE
|
||||||
|
default:
|
||||||
|
panic("could not determine endianness")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// "00010203...96979899" cast to []uint16
|
||||||
|
var intLELookup = [100]uint16{
|
||||||
|
0x3030, 0x3130, 0x3230, 0x3330, 0x3430, 0x3530, 0x3630, 0x3730, 0x3830, 0x3930,
|
||||||
|
0x3031, 0x3131, 0x3231, 0x3331, 0x3431, 0x3531, 0x3631, 0x3731, 0x3831, 0x3931,
|
||||||
|
0x3032, 0x3132, 0x3232, 0x3332, 0x3432, 0x3532, 0x3632, 0x3732, 0x3832, 0x3932,
|
||||||
|
0x3033, 0x3133, 0x3233, 0x3333, 0x3433, 0x3533, 0x3633, 0x3733, 0x3833, 0x3933,
|
||||||
|
0x3034, 0x3134, 0x3234, 0x3334, 0x3434, 0x3534, 0x3634, 0x3734, 0x3834, 0x3934,
|
||||||
|
0x3035, 0x3135, 0x3235, 0x3335, 0x3435, 0x3535, 0x3635, 0x3735, 0x3835, 0x3935,
|
||||||
|
0x3036, 0x3136, 0x3236, 0x3336, 0x3436, 0x3536, 0x3636, 0x3736, 0x3836, 0x3936,
|
||||||
|
0x3037, 0x3137, 0x3237, 0x3337, 0x3437, 0x3537, 0x3637, 0x3737, 0x3837, 0x3937,
|
||||||
|
0x3038, 0x3138, 0x3238, 0x3338, 0x3438, 0x3538, 0x3638, 0x3738, 0x3838, 0x3938,
|
||||||
|
0x3039, 0x3139, 0x3239, 0x3339, 0x3439, 0x3539, 0x3639, 0x3739, 0x3839, 0x3939,
|
||||||
|
}
|
||||||
|
|
||||||
|
var intBELookup = [100]uint16{
|
||||||
|
0x3030, 0x3031, 0x3032, 0x3033, 0x3034, 0x3035, 0x3036, 0x3037, 0x3038, 0x3039,
|
||||||
|
0x3130, 0x3131, 0x3132, 0x3133, 0x3134, 0x3135, 0x3136, 0x3137, 0x3138, 0x3139,
|
||||||
|
0x3230, 0x3231, 0x3232, 0x3233, 0x3234, 0x3235, 0x3236, 0x3237, 0x3238, 0x3239,
|
||||||
|
0x3330, 0x3331, 0x3332, 0x3333, 0x3334, 0x3335, 0x3336, 0x3337, 0x3338, 0x3339,
|
||||||
|
0x3430, 0x3431, 0x3432, 0x3433, 0x3434, 0x3435, 0x3436, 0x3437, 0x3438, 0x3439,
|
||||||
|
0x3530, 0x3531, 0x3532, 0x3533, 0x3534, 0x3535, 0x3536, 0x3537, 0x3538, 0x3539,
|
||||||
|
0x3630, 0x3631, 0x3632, 0x3633, 0x3634, 0x3635, 0x3636, 0x3637, 0x3638, 0x3639,
|
||||||
|
0x3730, 0x3731, 0x3732, 0x3733, 0x3734, 0x3735, 0x3736, 0x3737, 0x3738, 0x3739,
|
||||||
|
0x3830, 0x3831, 0x3832, 0x3833, 0x3834, 0x3835, 0x3836, 0x3837, 0x3838, 0x3839,
|
||||||
|
0x3930, 0x3931, 0x3932, 0x3933, 0x3934, 0x3935, 0x3936, 0x3937, 0x3938, 0x3939,
|
||||||
|
}
|
||||||
|
|
||||||
|
var intLookup = [2]*[100]uint16{&intLELookup, &intBELookup}
|
||||||
|
|
||||||
|
func numMask(numBitSize uint8) uint64 {
|
||||||
|
return 1<<numBitSize - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendInt(_ *RuntimeContext, out []byte, u64 uint64, code *Opcode) []byte {
|
||||||
|
mask := numMask(code.NumBitSize)
|
||||||
|
n := u64 & mask
|
||||||
|
negative := (u64>>(code.NumBitSize-1))&1 == 1
|
||||||
|
if !negative {
|
||||||
|
if n < 10 {
|
||||||
|
return append(out, byte(n+'0'))
|
||||||
|
} else if n < 100 {
|
||||||
|
u := intLELookup[n]
|
||||||
|
return append(out, byte(u), byte(u>>8))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
n = -n & mask
|
||||||
|
}
|
||||||
|
|
||||||
|
lookup := intLookup[endianness]
|
||||||
|
|
||||||
|
var b [22]byte
|
||||||
|
u := (*[11]uint16)(unsafe.Pointer(&b))
|
||||||
|
i := 11
|
||||||
|
|
||||||
|
for n >= 100 {
|
||||||
|
j := n % 100
|
||||||
|
n /= 100
|
||||||
|
i--
|
||||||
|
u[i] = lookup[j]
|
||||||
|
}
|
||||||
|
|
||||||
|
i--
|
||||||
|
u[i] = lookup[n]
|
||||||
|
|
||||||
|
i *= 2 // convert to byte index
|
||||||
|
if n < 10 {
|
||||||
|
i++ // remove leading zero
|
||||||
|
}
|
||||||
|
if negative {
|
||||||
|
i--
|
||||||
|
b[i] = '-'
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(out, b[i:]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendUint(_ *RuntimeContext, out []byte, u64 uint64, code *Opcode) []byte {
|
||||||
|
mask := numMask(code.NumBitSize)
|
||||||
|
n := u64 & mask
|
||||||
|
if n < 10 {
|
||||||
|
return append(out, byte(n+'0'))
|
||||||
|
} else if n < 100 {
|
||||||
|
u := intLELookup[n]
|
||||||
|
return append(out, byte(u), byte(u>>8))
|
||||||
|
}
|
||||||
|
|
||||||
|
lookup := intLookup[endianness]
|
||||||
|
|
||||||
|
var b [22]byte
|
||||||
|
u := (*[11]uint16)(unsafe.Pointer(&b))
|
||||||
|
i := 11
|
||||||
|
|
||||||
|
for n >= 100 {
|
||||||
|
j := n % 100
|
||||||
|
n /= 100
|
||||||
|
i--
|
||||||
|
u[i] = lookup[j]
|
||||||
|
}
|
||||||
|
|
||||||
|
i--
|
||||||
|
u[i] = lookup[n]
|
||||||
|
|
||||||
|
i *= 2 // convert to byte index
|
||||||
|
if n < 10 {
|
||||||
|
i++ // remove leading zero
|
||||||
|
}
|
||||||
|
return append(out, b[i:]...)
|
||||||
|
}
|
8
vendor/github.com/goccy/go-json/internal/encoder/map112.go
generated
vendored
Normal file
8
vendor/github.com/goccy/go-json/internal/encoder/map112.go
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
// +build !go1.13
|
||||||
|
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import "unsafe"
|
||||||
|
|
||||||
|
//go:linkname MapIterValue reflect.mapitervalue
|
||||||
|
func MapIterValue(it unsafe.Pointer) unsafe.Pointer
|
8
vendor/github.com/goccy/go-json/internal/encoder/map113.go
generated
vendored
Normal file
8
vendor/github.com/goccy/go-json/internal/encoder/map113.go
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
// +build go1.13
|
||||||
|
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import "unsafe"
|
||||||
|
|
||||||
|
//go:linkname MapIterValue reflect.mapiterelem
|
||||||
|
func MapIterValue(it unsafe.Pointer) unsafe.Pointer
|
766
vendor/github.com/goccy/go-json/internal/encoder/opcode.go
generated
vendored
Normal file
766
vendor/github.com/goccy/go-json/internal/encoder/opcode.go
generated
vendored
Normal file
|
@ -0,0 +1,766 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||||
|
|
||||||
|
type OpFlags uint16
|
||||||
|
|
||||||
|
const (
|
||||||
|
AnonymousHeadFlags OpFlags = 1 << 0
|
||||||
|
AnonymousKeyFlags OpFlags = 1 << 1
|
||||||
|
IndirectFlags OpFlags = 1 << 2
|
||||||
|
IsTaggedKeyFlags OpFlags = 1 << 3
|
||||||
|
NilCheckFlags OpFlags = 1 << 4
|
||||||
|
AddrForMarshalerFlags OpFlags = 1 << 5
|
||||||
|
IsNextOpPtrTypeFlags OpFlags = 1 << 6
|
||||||
|
IsNilableTypeFlags OpFlags = 1 << 7
|
||||||
|
MarshalerContextFlags OpFlags = 1 << 8
|
||||||
|
)
|
||||||
|
|
||||||
|
type Opcode struct {
|
||||||
|
Op OpType // operation type
|
||||||
|
Idx uint32 // offset to access ptr
|
||||||
|
Next *Opcode // next opcode
|
||||||
|
End *Opcode // array/slice/struct/map end
|
||||||
|
NextField *Opcode // next struct field
|
||||||
|
Key string // struct field key
|
||||||
|
Offset uint32 // offset size from struct header
|
||||||
|
PtrNum uint8 // pointer number: e.g. double pointer is 2.
|
||||||
|
NumBitSize uint8
|
||||||
|
Flags OpFlags
|
||||||
|
|
||||||
|
Type *runtime.Type // go type
|
||||||
|
PrevField *Opcode // prev struct field
|
||||||
|
Jmp *CompiledCode // for recursive call
|
||||||
|
ElemIdx uint32 // offset to access array/slice/map elem
|
||||||
|
Length uint32 // offset to access slice/map length or array length
|
||||||
|
MapIter uint32 // offset to access map iterator
|
||||||
|
MapPos uint32 // offset to access position list for sorted map
|
||||||
|
Indent uint32 // indent number
|
||||||
|
Size uint32 // array/slice elem size
|
||||||
|
DisplayIdx uint32 // opcode index
|
||||||
|
DisplayKey string // key text to display
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) MaxIdx() uint32 {
|
||||||
|
max := uint32(0)
|
||||||
|
for _, value := range []uint32{
|
||||||
|
c.Idx,
|
||||||
|
c.ElemIdx,
|
||||||
|
c.Length,
|
||||||
|
c.MapIter,
|
||||||
|
c.MapPos,
|
||||||
|
c.Size,
|
||||||
|
} {
|
||||||
|
if max < value {
|
||||||
|
max = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return max
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) ToHeaderType(isString bool) OpType {
|
||||||
|
switch c.Op {
|
||||||
|
case OpInt:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadIntString
|
||||||
|
}
|
||||||
|
return OpStructHeadInt
|
||||||
|
case OpIntPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadIntPtrString
|
||||||
|
}
|
||||||
|
return OpStructHeadIntPtr
|
||||||
|
case OpUint:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadUintString
|
||||||
|
}
|
||||||
|
return OpStructHeadUint
|
||||||
|
case OpUintPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadUintPtrString
|
||||||
|
}
|
||||||
|
return OpStructHeadUintPtr
|
||||||
|
case OpFloat32:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadFloat32String
|
||||||
|
}
|
||||||
|
return OpStructHeadFloat32
|
||||||
|
case OpFloat32Ptr:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadFloat32PtrString
|
||||||
|
}
|
||||||
|
return OpStructHeadFloat32Ptr
|
||||||
|
case OpFloat64:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadFloat64String
|
||||||
|
}
|
||||||
|
return OpStructHeadFloat64
|
||||||
|
case OpFloat64Ptr:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadFloat64PtrString
|
||||||
|
}
|
||||||
|
return OpStructHeadFloat64Ptr
|
||||||
|
case OpString:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadStringString
|
||||||
|
}
|
||||||
|
return OpStructHeadString
|
||||||
|
case OpStringPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadStringPtrString
|
||||||
|
}
|
||||||
|
return OpStructHeadStringPtr
|
||||||
|
case OpNumber:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadNumberString
|
||||||
|
}
|
||||||
|
return OpStructHeadNumber
|
||||||
|
case OpNumberPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadNumberPtrString
|
||||||
|
}
|
||||||
|
return OpStructHeadNumberPtr
|
||||||
|
case OpBool:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadBoolString
|
||||||
|
}
|
||||||
|
return OpStructHeadBool
|
||||||
|
case OpBoolPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructHeadBoolPtrString
|
||||||
|
}
|
||||||
|
return OpStructHeadBoolPtr
|
||||||
|
case OpBytes:
|
||||||
|
return OpStructHeadBytes
|
||||||
|
case OpBytesPtr:
|
||||||
|
return OpStructHeadBytesPtr
|
||||||
|
case OpMap:
|
||||||
|
return OpStructHeadMap
|
||||||
|
case OpMapPtr:
|
||||||
|
c.Op = OpMap
|
||||||
|
return OpStructHeadMapPtr
|
||||||
|
case OpArray:
|
||||||
|
return OpStructHeadArray
|
||||||
|
case OpArrayPtr:
|
||||||
|
c.Op = OpArray
|
||||||
|
return OpStructHeadArrayPtr
|
||||||
|
case OpSlice:
|
||||||
|
return OpStructHeadSlice
|
||||||
|
case OpSlicePtr:
|
||||||
|
c.Op = OpSlice
|
||||||
|
return OpStructHeadSlicePtr
|
||||||
|
case OpMarshalJSON:
|
||||||
|
return OpStructHeadMarshalJSON
|
||||||
|
case OpMarshalJSONPtr:
|
||||||
|
return OpStructHeadMarshalJSONPtr
|
||||||
|
case OpMarshalText:
|
||||||
|
return OpStructHeadMarshalText
|
||||||
|
case OpMarshalTextPtr:
|
||||||
|
return OpStructHeadMarshalTextPtr
|
||||||
|
}
|
||||||
|
return OpStructHead
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) ToFieldType(isString bool) OpType {
|
||||||
|
switch c.Op {
|
||||||
|
case OpInt:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldIntString
|
||||||
|
}
|
||||||
|
return OpStructFieldInt
|
||||||
|
case OpIntPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldIntPtrString
|
||||||
|
}
|
||||||
|
return OpStructFieldIntPtr
|
||||||
|
case OpUint:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldUintString
|
||||||
|
}
|
||||||
|
return OpStructFieldUint
|
||||||
|
case OpUintPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldUintPtrString
|
||||||
|
}
|
||||||
|
return OpStructFieldUintPtr
|
||||||
|
case OpFloat32:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldFloat32String
|
||||||
|
}
|
||||||
|
return OpStructFieldFloat32
|
||||||
|
case OpFloat32Ptr:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldFloat32PtrString
|
||||||
|
}
|
||||||
|
return OpStructFieldFloat32Ptr
|
||||||
|
case OpFloat64:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldFloat64String
|
||||||
|
}
|
||||||
|
return OpStructFieldFloat64
|
||||||
|
case OpFloat64Ptr:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldFloat64PtrString
|
||||||
|
}
|
||||||
|
return OpStructFieldFloat64Ptr
|
||||||
|
case OpString:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldStringString
|
||||||
|
}
|
||||||
|
return OpStructFieldString
|
||||||
|
case OpStringPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldStringPtrString
|
||||||
|
}
|
||||||
|
return OpStructFieldStringPtr
|
||||||
|
case OpNumber:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldNumberString
|
||||||
|
}
|
||||||
|
return OpStructFieldNumber
|
||||||
|
case OpNumberPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldNumberPtrString
|
||||||
|
}
|
||||||
|
return OpStructFieldNumberPtr
|
||||||
|
case OpBool:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldBoolString
|
||||||
|
}
|
||||||
|
return OpStructFieldBool
|
||||||
|
case OpBoolPtr:
|
||||||
|
if isString {
|
||||||
|
return OpStructFieldBoolPtrString
|
||||||
|
}
|
||||||
|
return OpStructFieldBoolPtr
|
||||||
|
case OpBytes:
|
||||||
|
return OpStructFieldBytes
|
||||||
|
case OpBytesPtr:
|
||||||
|
return OpStructFieldBytesPtr
|
||||||
|
case OpMap:
|
||||||
|
return OpStructFieldMap
|
||||||
|
case OpMapPtr:
|
||||||
|
c.Op = OpMap
|
||||||
|
return OpStructFieldMapPtr
|
||||||
|
case OpArray:
|
||||||
|
return OpStructFieldArray
|
||||||
|
case OpArrayPtr:
|
||||||
|
c.Op = OpArray
|
||||||
|
return OpStructFieldArrayPtr
|
||||||
|
case OpSlice:
|
||||||
|
return OpStructFieldSlice
|
||||||
|
case OpSlicePtr:
|
||||||
|
c.Op = OpSlice
|
||||||
|
return OpStructFieldSlicePtr
|
||||||
|
case OpMarshalJSON:
|
||||||
|
return OpStructFieldMarshalJSON
|
||||||
|
case OpMarshalJSONPtr:
|
||||||
|
return OpStructFieldMarshalJSONPtr
|
||||||
|
case OpMarshalText:
|
||||||
|
return OpStructFieldMarshalText
|
||||||
|
case OpMarshalTextPtr:
|
||||||
|
return OpStructFieldMarshalTextPtr
|
||||||
|
}
|
||||||
|
return OpStructField
|
||||||
|
}
|
||||||
|
|
||||||
|
func newOpCode(ctx *compileContext, op OpType) *Opcode {
|
||||||
|
return newOpCodeWithNext(ctx, op, newEndOp(ctx))
|
||||||
|
}
|
||||||
|
|
||||||
|
func opcodeOffset(idx int) uint32 {
|
||||||
|
return uint32(idx) * uintptrSize
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyOpcode(code *Opcode) *Opcode {
|
||||||
|
codeMap := map[uintptr]*Opcode{}
|
||||||
|
return code.copy(codeMap)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setTotalLengthToInterfaceOp(code *Opcode) {
|
||||||
|
c := code
|
||||||
|
for c.Op != OpEnd && c.Op != OpInterfaceEnd {
|
||||||
|
if c.Op == OpInterface {
|
||||||
|
c.Length = uint32(code.TotalLength())
|
||||||
|
}
|
||||||
|
switch c.Op.CodeType() {
|
||||||
|
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||||
|
c = c.End
|
||||||
|
default:
|
||||||
|
c = c.Next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ToEndCode(code *Opcode) *Opcode {
|
||||||
|
c := code
|
||||||
|
for c.Op != OpEnd && c.Op != OpInterfaceEnd {
|
||||||
|
switch c.Op.CodeType() {
|
||||||
|
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||||
|
c = c.End
|
||||||
|
default:
|
||||||
|
c = c.Next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyToInterfaceOpcode(code *Opcode) *Opcode {
|
||||||
|
copied := copyOpcode(code)
|
||||||
|
c := copied
|
||||||
|
c = ToEndCode(c)
|
||||||
|
c.Idx += uintptrSize
|
||||||
|
c.ElemIdx = c.Idx + uintptrSize
|
||||||
|
c.Length = c.Idx + 2*uintptrSize
|
||||||
|
c.Op = OpInterfaceEnd
|
||||||
|
return copied
|
||||||
|
}
|
||||||
|
|
||||||
|
func newOpCodeWithNext(ctx *compileContext, op OpType, next *Opcode) *Opcode {
|
||||||
|
return &Opcode{
|
||||||
|
Op: op,
|
||||||
|
Idx: opcodeOffset(ctx.ptrIndex),
|
||||||
|
Next: next,
|
||||||
|
Type: ctx.typ,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newEndOp(ctx *compileContext) *Opcode {
|
||||||
|
return newOpCodeWithNext(ctx, OpEnd, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) copy(codeMap map[uintptr]*Opcode) *Opcode {
|
||||||
|
if c == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
addr := uintptr(unsafe.Pointer(c))
|
||||||
|
if code, exists := codeMap[addr]; exists {
|
||||||
|
return code
|
||||||
|
}
|
||||||
|
copied := &Opcode{
|
||||||
|
Op: c.Op,
|
||||||
|
Key: c.Key,
|
||||||
|
PtrNum: c.PtrNum,
|
||||||
|
NumBitSize: c.NumBitSize,
|
||||||
|
Flags: c.Flags,
|
||||||
|
Idx: c.Idx,
|
||||||
|
Offset: c.Offset,
|
||||||
|
Type: c.Type,
|
||||||
|
DisplayIdx: c.DisplayIdx,
|
||||||
|
DisplayKey: c.DisplayKey,
|
||||||
|
ElemIdx: c.ElemIdx,
|
||||||
|
Length: c.Length,
|
||||||
|
MapIter: c.MapIter,
|
||||||
|
MapPos: c.MapPos,
|
||||||
|
Size: c.Size,
|
||||||
|
Indent: c.Indent,
|
||||||
|
}
|
||||||
|
codeMap[addr] = copied
|
||||||
|
copied.End = c.End.copy(codeMap)
|
||||||
|
copied.PrevField = c.PrevField.copy(codeMap)
|
||||||
|
copied.NextField = c.NextField.copy(codeMap)
|
||||||
|
copied.Next = c.Next.copy(codeMap)
|
||||||
|
copied.Jmp = c.Jmp
|
||||||
|
return copied
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) BeforeLastCode() *Opcode {
|
||||||
|
code := c
|
||||||
|
for {
|
||||||
|
var nextCode *Opcode
|
||||||
|
switch code.Op.CodeType() {
|
||||||
|
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||||
|
nextCode = code.End
|
||||||
|
default:
|
||||||
|
nextCode = code.Next
|
||||||
|
}
|
||||||
|
if nextCode.Op == OpEnd {
|
||||||
|
return code
|
||||||
|
}
|
||||||
|
code = nextCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) TotalLength() int {
|
||||||
|
var idx int
|
||||||
|
code := c
|
||||||
|
for code.Op != OpEnd && code.Op != OpInterfaceEnd {
|
||||||
|
maxIdx := int(code.MaxIdx() / uintptrSize)
|
||||||
|
if idx < maxIdx {
|
||||||
|
idx = maxIdx
|
||||||
|
}
|
||||||
|
if code.Op == OpRecursiveEnd {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
switch code.Op.CodeType() {
|
||||||
|
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||||
|
code = code.End
|
||||||
|
default:
|
||||||
|
code = code.Next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
maxIdx := int(code.MaxIdx() / uintptrSize)
|
||||||
|
if idx < maxIdx {
|
||||||
|
idx = maxIdx
|
||||||
|
}
|
||||||
|
return idx + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) decOpcodeIndex() {
|
||||||
|
for code := c; code.Op != OpEnd; {
|
||||||
|
code.DisplayIdx--
|
||||||
|
if code.Idx > 0 {
|
||||||
|
code.Idx -= uintptrSize
|
||||||
|
}
|
||||||
|
if code.ElemIdx > 0 {
|
||||||
|
code.ElemIdx -= uintptrSize
|
||||||
|
}
|
||||||
|
if code.MapIter > 0 {
|
||||||
|
code.MapIter -= uintptrSize
|
||||||
|
}
|
||||||
|
if code.Length > 0 && code.Op.CodeType() != CodeArrayHead && code.Op.CodeType() != CodeArrayElem {
|
||||||
|
code.Length -= uintptrSize
|
||||||
|
}
|
||||||
|
switch code.Op.CodeType() {
|
||||||
|
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||||
|
code = code.End
|
||||||
|
default:
|
||||||
|
code = code.Next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) decIndent() {
|
||||||
|
for code := c; code.Op != OpEnd; {
|
||||||
|
code.Indent--
|
||||||
|
switch code.Op.CodeType() {
|
||||||
|
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||||
|
code = code.End
|
||||||
|
default:
|
||||||
|
code = code.Next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) dumpHead(code *Opcode) string {
|
||||||
|
var length uint32
|
||||||
|
if code.Op.CodeType() == CodeArrayHead {
|
||||||
|
length = code.Length
|
||||||
|
} else {
|
||||||
|
length = code.Length / uintptrSize
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`[%d]%s%s ([idx:%d][elemIdx:%d][length:%d])`,
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
code.ElemIdx/uintptrSize,
|
||||||
|
length,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) dumpMapHead(code *Opcode) string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`[%d]%s%s ([idx:%d][elemIdx:%d][length:%d][mapIter:%d])`,
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
code.ElemIdx/uintptrSize,
|
||||||
|
code.Length/uintptrSize,
|
||||||
|
code.MapIter/uintptrSize,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) dumpMapEnd(code *Opcode) string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`[%d]%s%s ([idx:%d][mapPos:%d][length:%d])`,
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
code.MapPos/uintptrSize,
|
||||||
|
code.Length/uintptrSize,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) dumpElem(code *Opcode) string {
|
||||||
|
var length uint32
|
||||||
|
if code.Op.CodeType() == CodeArrayElem {
|
||||||
|
length = code.Length
|
||||||
|
} else {
|
||||||
|
length = code.Length / uintptrSize
|
||||||
|
}
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`[%d]%s%s ([idx:%d][elemIdx:%d][length:%d][size:%d])`,
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
code.ElemIdx/uintptrSize,
|
||||||
|
length,
|
||||||
|
code.Size,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) dumpField(code *Opcode) string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`[%d]%s%s ([idx:%d][key:%s][offset:%d])`,
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
code.DisplayKey,
|
||||||
|
code.Offset,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) dumpKey(code *Opcode) string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`[%d]%s%s ([idx:%d][elemIdx:%d][length:%d][mapIter:%d])`,
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
code.ElemIdx/uintptrSize,
|
||||||
|
code.Length/uintptrSize,
|
||||||
|
code.MapIter/uintptrSize,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) dumpValue(code *Opcode) string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
`[%d]%s%s ([idx:%d][mapIter:%d])`,
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
code.MapIter/uintptrSize,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Opcode) Dump() string {
|
||||||
|
codes := []string{}
|
||||||
|
for code := c; code.Op != OpEnd && code.Op != OpInterfaceEnd; {
|
||||||
|
switch code.Op.CodeType() {
|
||||||
|
case CodeSliceHead:
|
||||||
|
codes = append(codes, c.dumpHead(code))
|
||||||
|
code = code.Next
|
||||||
|
case CodeMapHead:
|
||||||
|
codes = append(codes, c.dumpMapHead(code))
|
||||||
|
code = code.Next
|
||||||
|
case CodeArrayElem, CodeSliceElem:
|
||||||
|
codes = append(codes, c.dumpElem(code))
|
||||||
|
code = code.End
|
||||||
|
case CodeMapKey:
|
||||||
|
codes = append(codes, c.dumpKey(code))
|
||||||
|
code = code.End
|
||||||
|
case CodeMapValue:
|
||||||
|
codes = append(codes, c.dumpValue(code))
|
||||||
|
code = code.Next
|
||||||
|
case CodeMapEnd:
|
||||||
|
codes = append(codes, c.dumpMapEnd(code))
|
||||||
|
code = code.Next
|
||||||
|
case CodeStructField:
|
||||||
|
codes = append(codes, c.dumpField(code))
|
||||||
|
code = code.Next
|
||||||
|
case CodeStructEnd:
|
||||||
|
codes = append(codes, c.dumpField(code))
|
||||||
|
code = code.Next
|
||||||
|
default:
|
||||||
|
codes = append(codes, fmt.Sprintf(
|
||||||
|
"[%d]%s%s ([idx:%d])",
|
||||||
|
code.DisplayIdx,
|
||||||
|
strings.Repeat("-", int(code.Indent)),
|
||||||
|
code.Op,
|
||||||
|
code.Idx/uintptrSize,
|
||||||
|
))
|
||||||
|
code = code.Next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Join(codes, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func prevField(code *Opcode, removedFields map[*Opcode]struct{}) *Opcode {
|
||||||
|
if _, exists := removedFields[code]; exists {
|
||||||
|
return prevField(code.PrevField, removedFields)
|
||||||
|
}
|
||||||
|
return code
|
||||||
|
}
|
||||||
|
|
||||||
|
func nextField(code *Opcode, removedFields map[*Opcode]struct{}) *Opcode {
|
||||||
|
if _, exists := removedFields[code]; exists {
|
||||||
|
return nextField(code.NextField, removedFields)
|
||||||
|
}
|
||||||
|
return code
|
||||||
|
}
|
||||||
|
|
||||||
|
func linkPrevToNextField(cur *Opcode, removedFields map[*Opcode]struct{}) {
|
||||||
|
prev := prevField(cur.PrevField, removedFields)
|
||||||
|
prev.NextField = nextField(cur.NextField, removedFields)
|
||||||
|
code := prev
|
||||||
|
fcode := cur
|
||||||
|
for {
|
||||||
|
var nextCode *Opcode
|
||||||
|
switch code.Op.CodeType() {
|
||||||
|
case CodeArrayElem, CodeSliceElem, CodeMapKey:
|
||||||
|
nextCode = code.End
|
||||||
|
default:
|
||||||
|
nextCode = code.Next
|
||||||
|
}
|
||||||
|
if nextCode == fcode {
|
||||||
|
code.Next = fcode.Next
|
||||||
|
break
|
||||||
|
} else if nextCode.Op == OpEnd {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
code = nextCode
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSliceHeaderCode(ctx *compileContext) *Opcode {
|
||||||
|
idx := opcodeOffset(ctx.ptrIndex)
|
||||||
|
ctx.incPtrIndex()
|
||||||
|
elemIdx := opcodeOffset(ctx.ptrIndex)
|
||||||
|
ctx.incPtrIndex()
|
||||||
|
length := opcodeOffset(ctx.ptrIndex)
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpSlice,
|
||||||
|
Idx: idx,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
ElemIdx: elemIdx,
|
||||||
|
Length: length,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newSliceElemCode(ctx *compileContext, head *Opcode, size uintptr) *Opcode {
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpSliceElem,
|
||||||
|
Idx: head.Idx,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
ElemIdx: head.ElemIdx,
|
||||||
|
Length: head.Length,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
Size: uint32(size),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newArrayHeaderCode(ctx *compileContext, alen int) *Opcode {
|
||||||
|
idx := opcodeOffset(ctx.ptrIndex)
|
||||||
|
ctx.incPtrIndex()
|
||||||
|
elemIdx := opcodeOffset(ctx.ptrIndex)
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpArray,
|
||||||
|
Idx: idx,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
ElemIdx: elemIdx,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
Length: uint32(alen),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newArrayElemCode(ctx *compileContext, head *Opcode, length int, size uintptr) *Opcode {
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpArrayElem,
|
||||||
|
Idx: head.Idx,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
ElemIdx: head.ElemIdx,
|
||||||
|
Length: uint32(length),
|
||||||
|
Indent: ctx.indent,
|
||||||
|
Size: uint32(size),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newMapHeaderCode(ctx *compileContext) *Opcode {
|
||||||
|
idx := opcodeOffset(ctx.ptrIndex)
|
||||||
|
ctx.incPtrIndex()
|
||||||
|
elemIdx := opcodeOffset(ctx.ptrIndex)
|
||||||
|
ctx.incPtrIndex()
|
||||||
|
length := opcodeOffset(ctx.ptrIndex)
|
||||||
|
ctx.incPtrIndex()
|
||||||
|
mapIter := opcodeOffset(ctx.ptrIndex)
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpMap,
|
||||||
|
Idx: idx,
|
||||||
|
Type: ctx.typ,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
ElemIdx: elemIdx,
|
||||||
|
Length: length,
|
||||||
|
MapIter: mapIter,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newMapKeyCode(ctx *compileContext, head *Opcode) *Opcode {
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpMapKey,
|
||||||
|
Idx: opcodeOffset(ctx.ptrIndex),
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
ElemIdx: head.ElemIdx,
|
||||||
|
Length: head.Length,
|
||||||
|
MapIter: head.MapIter,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newMapValueCode(ctx *compileContext, head *Opcode) *Opcode {
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpMapValue,
|
||||||
|
Idx: opcodeOffset(ctx.ptrIndex),
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
ElemIdx: head.ElemIdx,
|
||||||
|
Length: head.Length,
|
||||||
|
MapIter: head.MapIter,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newMapEndCode(ctx *compileContext, head *Opcode) *Opcode {
|
||||||
|
mapPos := opcodeOffset(ctx.ptrIndex)
|
||||||
|
ctx.incPtrIndex()
|
||||||
|
idx := opcodeOffset(ctx.ptrIndex)
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpMapEnd,
|
||||||
|
Idx: idx,
|
||||||
|
Next: newEndOp(ctx),
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
Length: head.Length,
|
||||||
|
MapPos: mapPos,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newInterfaceCode(ctx *compileContext) *Opcode {
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpInterface,
|
||||||
|
Idx: opcodeOffset(ctx.ptrIndex),
|
||||||
|
Next: newEndOp(ctx),
|
||||||
|
Type: ctx.typ,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newRecursiveCode(ctx *compileContext, jmp *CompiledCode) *Opcode {
|
||||||
|
return &Opcode{
|
||||||
|
Op: OpRecursive,
|
||||||
|
Idx: opcodeOffset(ctx.ptrIndex),
|
||||||
|
Next: newEndOp(ctx),
|
||||||
|
Type: ctx.typ,
|
||||||
|
DisplayIdx: ctx.opcodeIndex,
|
||||||
|
Indent: ctx.indent,
|
||||||
|
Jmp: jmp,
|
||||||
|
}
|
||||||
|
}
|
41
vendor/github.com/goccy/go-json/internal/encoder/option.go
generated
vendored
Normal file
41
vendor/github.com/goccy/go-json/internal/encoder/option.go
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import "context"
|
||||||
|
|
||||||
|
type OptionFlag uint8
|
||||||
|
|
||||||
|
const (
|
||||||
|
HTMLEscapeOption OptionFlag = 1 << iota
|
||||||
|
IndentOption
|
||||||
|
UnorderedMapOption
|
||||||
|
DebugOption
|
||||||
|
ColorizeOption
|
||||||
|
ContextOption
|
||||||
|
)
|
||||||
|
|
||||||
|
type Option struct {
|
||||||
|
Flag OptionFlag
|
||||||
|
ColorScheme *ColorScheme
|
||||||
|
Context context.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
type EncodeFormat struct {
|
||||||
|
Header string
|
||||||
|
Footer string
|
||||||
|
}
|
||||||
|
|
||||||
|
type EncodeFormatScheme struct {
|
||||||
|
Int EncodeFormat
|
||||||
|
Uint EncodeFormat
|
||||||
|
Float EncodeFormat
|
||||||
|
Bool EncodeFormat
|
||||||
|
String EncodeFormat
|
||||||
|
Binary EncodeFormat
|
||||||
|
ObjectKey EncodeFormat
|
||||||
|
Null EncodeFormat
|
||||||
|
}
|
||||||
|
|
||||||
|
type (
|
||||||
|
ColorScheme = EncodeFormatScheme
|
||||||
|
ColorFormat = EncodeFormat
|
||||||
|
)
|
934
vendor/github.com/goccy/go-json/internal/encoder/optype.go
generated
vendored
Normal file
934
vendor/github.com/goccy/go-json/internal/encoder/optype.go
generated
vendored
Normal file
|
@ -0,0 +1,934 @@
|
||||||
|
// Code generated by internal/cmd/generator. DO NOT EDIT!
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CodeType int
|
||||||
|
|
||||||
|
const (
|
||||||
|
CodeOp CodeType = 0
|
||||||
|
CodeArrayHead CodeType = 1
|
||||||
|
CodeArrayElem CodeType = 2
|
||||||
|
CodeSliceHead CodeType = 3
|
||||||
|
CodeSliceElem CodeType = 4
|
||||||
|
CodeMapHead CodeType = 5
|
||||||
|
CodeMapKey CodeType = 6
|
||||||
|
CodeMapValue CodeType = 7
|
||||||
|
CodeMapEnd CodeType = 8
|
||||||
|
CodeRecursive CodeType = 9
|
||||||
|
CodeStructField CodeType = 10
|
||||||
|
CodeStructEnd CodeType = 11
|
||||||
|
)
|
||||||
|
|
||||||
|
var opTypeStrings = [401]string{
|
||||||
|
"End",
|
||||||
|
"Interface",
|
||||||
|
"Ptr",
|
||||||
|
"SliceElem",
|
||||||
|
"SliceEnd",
|
||||||
|
"ArrayElem",
|
||||||
|
"ArrayEnd",
|
||||||
|
"MapKey",
|
||||||
|
"MapValue",
|
||||||
|
"MapEnd",
|
||||||
|
"Recursive",
|
||||||
|
"RecursivePtr",
|
||||||
|
"RecursiveEnd",
|
||||||
|
"InterfaceEnd",
|
||||||
|
"StructAnonymousEnd",
|
||||||
|
"Int",
|
||||||
|
"Uint",
|
||||||
|
"Float32",
|
||||||
|
"Float64",
|
||||||
|
"Bool",
|
||||||
|
"String",
|
||||||
|
"Bytes",
|
||||||
|
"Number",
|
||||||
|
"Array",
|
||||||
|
"Map",
|
||||||
|
"Slice",
|
||||||
|
"Struct",
|
||||||
|
"MarshalJSON",
|
||||||
|
"MarshalText",
|
||||||
|
"IntString",
|
||||||
|
"UintString",
|
||||||
|
"Float32String",
|
||||||
|
"Float64String",
|
||||||
|
"BoolString",
|
||||||
|
"StringString",
|
||||||
|
"NumberString",
|
||||||
|
"IntPtr",
|
||||||
|
"UintPtr",
|
||||||
|
"Float32Ptr",
|
||||||
|
"Float64Ptr",
|
||||||
|
"BoolPtr",
|
||||||
|
"StringPtr",
|
||||||
|
"BytesPtr",
|
||||||
|
"NumberPtr",
|
||||||
|
"ArrayPtr",
|
||||||
|
"MapPtr",
|
||||||
|
"SlicePtr",
|
||||||
|
"MarshalJSONPtr",
|
||||||
|
"MarshalTextPtr",
|
||||||
|
"InterfacePtr",
|
||||||
|
"IntPtrString",
|
||||||
|
"UintPtrString",
|
||||||
|
"Float32PtrString",
|
||||||
|
"Float64PtrString",
|
||||||
|
"BoolPtrString",
|
||||||
|
"StringPtrString",
|
||||||
|
"NumberPtrString",
|
||||||
|
"StructHeadInt",
|
||||||
|
"StructHeadOmitEmptyInt",
|
||||||
|
"StructPtrHeadInt",
|
||||||
|
"StructPtrHeadOmitEmptyInt",
|
||||||
|
"StructHeadUint",
|
||||||
|
"StructHeadOmitEmptyUint",
|
||||||
|
"StructPtrHeadUint",
|
||||||
|
"StructPtrHeadOmitEmptyUint",
|
||||||
|
"StructHeadFloat32",
|
||||||
|
"StructHeadOmitEmptyFloat32",
|
||||||
|
"StructPtrHeadFloat32",
|
||||||
|
"StructPtrHeadOmitEmptyFloat32",
|
||||||
|
"StructHeadFloat64",
|
||||||
|
"StructHeadOmitEmptyFloat64",
|
||||||
|
"StructPtrHeadFloat64",
|
||||||
|
"StructPtrHeadOmitEmptyFloat64",
|
||||||
|
"StructHeadBool",
|
||||||
|
"StructHeadOmitEmptyBool",
|
||||||
|
"StructPtrHeadBool",
|
||||||
|
"StructPtrHeadOmitEmptyBool",
|
||||||
|
"StructHeadString",
|
||||||
|
"StructHeadOmitEmptyString",
|
||||||
|
"StructPtrHeadString",
|
||||||
|
"StructPtrHeadOmitEmptyString",
|
||||||
|
"StructHeadBytes",
|
||||||
|
"StructHeadOmitEmptyBytes",
|
||||||
|
"StructPtrHeadBytes",
|
||||||
|
"StructPtrHeadOmitEmptyBytes",
|
||||||
|
"StructHeadNumber",
|
||||||
|
"StructHeadOmitEmptyNumber",
|
||||||
|
"StructPtrHeadNumber",
|
||||||
|
"StructPtrHeadOmitEmptyNumber",
|
||||||
|
"StructHeadArray",
|
||||||
|
"StructHeadOmitEmptyArray",
|
||||||
|
"StructPtrHeadArray",
|
||||||
|
"StructPtrHeadOmitEmptyArray",
|
||||||
|
"StructHeadMap",
|
||||||
|
"StructHeadOmitEmptyMap",
|
||||||
|
"StructPtrHeadMap",
|
||||||
|
"StructPtrHeadOmitEmptyMap",
|
||||||
|
"StructHeadSlice",
|
||||||
|
"StructHeadOmitEmptySlice",
|
||||||
|
"StructPtrHeadSlice",
|
||||||
|
"StructPtrHeadOmitEmptySlice",
|
||||||
|
"StructHeadStruct",
|
||||||
|
"StructHeadOmitEmptyStruct",
|
||||||
|
"StructPtrHeadStruct",
|
||||||
|
"StructPtrHeadOmitEmptyStruct",
|
||||||
|
"StructHeadMarshalJSON",
|
||||||
|
"StructHeadOmitEmptyMarshalJSON",
|
||||||
|
"StructPtrHeadMarshalJSON",
|
||||||
|
"StructPtrHeadOmitEmptyMarshalJSON",
|
||||||
|
"StructHeadMarshalText",
|
||||||
|
"StructHeadOmitEmptyMarshalText",
|
||||||
|
"StructPtrHeadMarshalText",
|
||||||
|
"StructPtrHeadOmitEmptyMarshalText",
|
||||||
|
"StructHeadIntString",
|
||||||
|
"StructHeadOmitEmptyIntString",
|
||||||
|
"StructPtrHeadIntString",
|
||||||
|
"StructPtrHeadOmitEmptyIntString",
|
||||||
|
"StructHeadUintString",
|
||||||
|
"StructHeadOmitEmptyUintString",
|
||||||
|
"StructPtrHeadUintString",
|
||||||
|
"StructPtrHeadOmitEmptyUintString",
|
||||||
|
"StructHeadFloat32String",
|
||||||
|
"StructHeadOmitEmptyFloat32String",
|
||||||
|
"StructPtrHeadFloat32String",
|
||||||
|
"StructPtrHeadOmitEmptyFloat32String",
|
||||||
|
"StructHeadFloat64String",
|
||||||
|
"StructHeadOmitEmptyFloat64String",
|
||||||
|
"StructPtrHeadFloat64String",
|
||||||
|
"StructPtrHeadOmitEmptyFloat64String",
|
||||||
|
"StructHeadBoolString",
|
||||||
|
"StructHeadOmitEmptyBoolString",
|
||||||
|
"StructPtrHeadBoolString",
|
||||||
|
"StructPtrHeadOmitEmptyBoolString",
|
||||||
|
"StructHeadStringString",
|
||||||
|
"StructHeadOmitEmptyStringString",
|
||||||
|
"StructPtrHeadStringString",
|
||||||
|
"StructPtrHeadOmitEmptyStringString",
|
||||||
|
"StructHeadNumberString",
|
||||||
|
"StructHeadOmitEmptyNumberString",
|
||||||
|
"StructPtrHeadNumberString",
|
||||||
|
"StructPtrHeadOmitEmptyNumberString",
|
||||||
|
"StructHeadIntPtr",
|
||||||
|
"StructHeadOmitEmptyIntPtr",
|
||||||
|
"StructPtrHeadIntPtr",
|
||||||
|
"StructPtrHeadOmitEmptyIntPtr",
|
||||||
|
"StructHeadUintPtr",
|
||||||
|
"StructHeadOmitEmptyUintPtr",
|
||||||
|
"StructPtrHeadUintPtr",
|
||||||
|
"StructPtrHeadOmitEmptyUintPtr",
|
||||||
|
"StructHeadFloat32Ptr",
|
||||||
|
"StructHeadOmitEmptyFloat32Ptr",
|
||||||
|
"StructPtrHeadFloat32Ptr",
|
||||||
|
"StructPtrHeadOmitEmptyFloat32Ptr",
|
||||||
|
"StructHeadFloat64Ptr",
|
||||||
|
"StructHeadOmitEmptyFloat64Ptr",
|
||||||
|
"StructPtrHeadFloat64Ptr",
|
||||||
|
"StructPtrHeadOmitEmptyFloat64Ptr",
|
||||||
|
"StructHeadBoolPtr",
|
||||||
|
"StructHeadOmitEmptyBoolPtr",
|
||||||
|
"StructPtrHeadBoolPtr",
|
||||||
|
"StructPtrHeadOmitEmptyBoolPtr",
|
||||||
|
"StructHeadStringPtr",
|
||||||
|
"StructHeadOmitEmptyStringPtr",
|
||||||
|
"StructPtrHeadStringPtr",
|
||||||
|
"StructPtrHeadOmitEmptyStringPtr",
|
||||||
|
"StructHeadBytesPtr",
|
||||||
|
"StructHeadOmitEmptyBytesPtr",
|
||||||
|
"StructPtrHeadBytesPtr",
|
||||||
|
"StructPtrHeadOmitEmptyBytesPtr",
|
||||||
|
"StructHeadNumberPtr",
|
||||||
|
"StructHeadOmitEmptyNumberPtr",
|
||||||
|
"StructPtrHeadNumberPtr",
|
||||||
|
"StructPtrHeadOmitEmptyNumberPtr",
|
||||||
|
"StructHeadArrayPtr",
|
||||||
|
"StructHeadOmitEmptyArrayPtr",
|
||||||
|
"StructPtrHeadArrayPtr",
|
||||||
|
"StructPtrHeadOmitEmptyArrayPtr",
|
||||||
|
"StructHeadMapPtr",
|
||||||
|
"StructHeadOmitEmptyMapPtr",
|
||||||
|
"StructPtrHeadMapPtr",
|
||||||
|
"StructPtrHeadOmitEmptyMapPtr",
|
||||||
|
"StructHeadSlicePtr",
|
||||||
|
"StructHeadOmitEmptySlicePtr",
|
||||||
|
"StructPtrHeadSlicePtr",
|
||||||
|
"StructPtrHeadOmitEmptySlicePtr",
|
||||||
|
"StructHeadMarshalJSONPtr",
|
||||||
|
"StructHeadOmitEmptyMarshalJSONPtr",
|
||||||
|
"StructPtrHeadMarshalJSONPtr",
|
||||||
|
"StructPtrHeadOmitEmptyMarshalJSONPtr",
|
||||||
|
"StructHeadMarshalTextPtr",
|
||||||
|
"StructHeadOmitEmptyMarshalTextPtr",
|
||||||
|
"StructPtrHeadMarshalTextPtr",
|
||||||
|
"StructPtrHeadOmitEmptyMarshalTextPtr",
|
||||||
|
"StructHeadInterfacePtr",
|
||||||
|
"StructHeadOmitEmptyInterfacePtr",
|
||||||
|
"StructPtrHeadInterfacePtr",
|
||||||
|
"StructPtrHeadOmitEmptyInterfacePtr",
|
||||||
|
"StructHeadIntPtrString",
|
||||||
|
"StructHeadOmitEmptyIntPtrString",
|
||||||
|
"StructPtrHeadIntPtrString",
|
||||||
|
"StructPtrHeadOmitEmptyIntPtrString",
|
||||||
|
"StructHeadUintPtrString",
|
||||||
|
"StructHeadOmitEmptyUintPtrString",
|
||||||
|
"StructPtrHeadUintPtrString",
|
||||||
|
"StructPtrHeadOmitEmptyUintPtrString",
|
||||||
|
"StructHeadFloat32PtrString",
|
||||||
|
"StructHeadOmitEmptyFloat32PtrString",
|
||||||
|
"StructPtrHeadFloat32PtrString",
|
||||||
|
"StructPtrHeadOmitEmptyFloat32PtrString",
|
||||||
|
"StructHeadFloat64PtrString",
|
||||||
|
"StructHeadOmitEmptyFloat64PtrString",
|
||||||
|
"StructPtrHeadFloat64PtrString",
|
||||||
|
"StructPtrHeadOmitEmptyFloat64PtrString",
|
||||||
|
"StructHeadBoolPtrString",
|
||||||
|
"StructHeadOmitEmptyBoolPtrString",
|
||||||
|
"StructPtrHeadBoolPtrString",
|
||||||
|
"StructPtrHeadOmitEmptyBoolPtrString",
|
||||||
|
"StructHeadStringPtrString",
|
||||||
|
"StructHeadOmitEmptyStringPtrString",
|
||||||
|
"StructPtrHeadStringPtrString",
|
||||||
|
"StructPtrHeadOmitEmptyStringPtrString",
|
||||||
|
"StructHeadNumberPtrString",
|
||||||
|
"StructHeadOmitEmptyNumberPtrString",
|
||||||
|
"StructPtrHeadNumberPtrString",
|
||||||
|
"StructPtrHeadOmitEmptyNumberPtrString",
|
||||||
|
"StructHead",
|
||||||
|
"StructHeadOmitEmpty",
|
||||||
|
"StructPtrHead",
|
||||||
|
"StructPtrHeadOmitEmpty",
|
||||||
|
"StructFieldInt",
|
||||||
|
"StructFieldOmitEmptyInt",
|
||||||
|
"StructEndInt",
|
||||||
|
"StructEndOmitEmptyInt",
|
||||||
|
"StructFieldUint",
|
||||||
|
"StructFieldOmitEmptyUint",
|
||||||
|
"StructEndUint",
|
||||||
|
"StructEndOmitEmptyUint",
|
||||||
|
"StructFieldFloat32",
|
||||||
|
"StructFieldOmitEmptyFloat32",
|
||||||
|
"StructEndFloat32",
|
||||||
|
"StructEndOmitEmptyFloat32",
|
||||||
|
"StructFieldFloat64",
|
||||||
|
"StructFieldOmitEmptyFloat64",
|
||||||
|
"StructEndFloat64",
|
||||||
|
"StructEndOmitEmptyFloat64",
|
||||||
|
"StructFieldBool",
|
||||||
|
"StructFieldOmitEmptyBool",
|
||||||
|
"StructEndBool",
|
||||||
|
"StructEndOmitEmptyBool",
|
||||||
|
"StructFieldString",
|
||||||
|
"StructFieldOmitEmptyString",
|
||||||
|
"StructEndString",
|
||||||
|
"StructEndOmitEmptyString",
|
||||||
|
"StructFieldBytes",
|
||||||
|
"StructFieldOmitEmptyBytes",
|
||||||
|
"StructEndBytes",
|
||||||
|
"StructEndOmitEmptyBytes",
|
||||||
|
"StructFieldNumber",
|
||||||
|
"StructFieldOmitEmptyNumber",
|
||||||
|
"StructEndNumber",
|
||||||
|
"StructEndOmitEmptyNumber",
|
||||||
|
"StructFieldArray",
|
||||||
|
"StructFieldOmitEmptyArray",
|
||||||
|
"StructEndArray",
|
||||||
|
"StructEndOmitEmptyArray",
|
||||||
|
"StructFieldMap",
|
||||||
|
"StructFieldOmitEmptyMap",
|
||||||
|
"StructEndMap",
|
||||||
|
"StructEndOmitEmptyMap",
|
||||||
|
"StructFieldSlice",
|
||||||
|
"StructFieldOmitEmptySlice",
|
||||||
|
"StructEndSlice",
|
||||||
|
"StructEndOmitEmptySlice",
|
||||||
|
"StructFieldStruct",
|
||||||
|
"StructFieldOmitEmptyStruct",
|
||||||
|
"StructEndStruct",
|
||||||
|
"StructEndOmitEmptyStruct",
|
||||||
|
"StructFieldMarshalJSON",
|
||||||
|
"StructFieldOmitEmptyMarshalJSON",
|
||||||
|
"StructEndMarshalJSON",
|
||||||
|
"StructEndOmitEmptyMarshalJSON",
|
||||||
|
"StructFieldMarshalText",
|
||||||
|
"StructFieldOmitEmptyMarshalText",
|
||||||
|
"StructEndMarshalText",
|
||||||
|
"StructEndOmitEmptyMarshalText",
|
||||||
|
"StructFieldIntString",
|
||||||
|
"StructFieldOmitEmptyIntString",
|
||||||
|
"StructEndIntString",
|
||||||
|
"StructEndOmitEmptyIntString",
|
||||||
|
"StructFieldUintString",
|
||||||
|
"StructFieldOmitEmptyUintString",
|
||||||
|
"StructEndUintString",
|
||||||
|
"StructEndOmitEmptyUintString",
|
||||||
|
"StructFieldFloat32String",
|
||||||
|
"StructFieldOmitEmptyFloat32String",
|
||||||
|
"StructEndFloat32String",
|
||||||
|
"StructEndOmitEmptyFloat32String",
|
||||||
|
"StructFieldFloat64String",
|
||||||
|
"StructFieldOmitEmptyFloat64String",
|
||||||
|
"StructEndFloat64String",
|
||||||
|
"StructEndOmitEmptyFloat64String",
|
||||||
|
"StructFieldBoolString",
|
||||||
|
"StructFieldOmitEmptyBoolString",
|
||||||
|
"StructEndBoolString",
|
||||||
|
"StructEndOmitEmptyBoolString",
|
||||||
|
"StructFieldStringString",
|
||||||
|
"StructFieldOmitEmptyStringString",
|
||||||
|
"StructEndStringString",
|
||||||
|
"StructEndOmitEmptyStringString",
|
||||||
|
"StructFieldNumberString",
|
||||||
|
"StructFieldOmitEmptyNumberString",
|
||||||
|
"StructEndNumberString",
|
||||||
|
"StructEndOmitEmptyNumberString",
|
||||||
|
"StructFieldIntPtr",
|
||||||
|
"StructFieldOmitEmptyIntPtr",
|
||||||
|
"StructEndIntPtr",
|
||||||
|
"StructEndOmitEmptyIntPtr",
|
||||||
|
"StructFieldUintPtr",
|
||||||
|
"StructFieldOmitEmptyUintPtr",
|
||||||
|
"StructEndUintPtr",
|
||||||
|
"StructEndOmitEmptyUintPtr",
|
||||||
|
"StructFieldFloat32Ptr",
|
||||||
|
"StructFieldOmitEmptyFloat32Ptr",
|
||||||
|
"StructEndFloat32Ptr",
|
||||||
|
"StructEndOmitEmptyFloat32Ptr",
|
||||||
|
"StructFieldFloat64Ptr",
|
||||||
|
"StructFieldOmitEmptyFloat64Ptr",
|
||||||
|
"StructEndFloat64Ptr",
|
||||||
|
"StructEndOmitEmptyFloat64Ptr",
|
||||||
|
"StructFieldBoolPtr",
|
||||||
|
"StructFieldOmitEmptyBoolPtr",
|
||||||
|
"StructEndBoolPtr",
|
||||||
|
"StructEndOmitEmptyBoolPtr",
|
||||||
|
"StructFieldStringPtr",
|
||||||
|
"StructFieldOmitEmptyStringPtr",
|
||||||
|
"StructEndStringPtr",
|
||||||
|
"StructEndOmitEmptyStringPtr",
|
||||||
|
"StructFieldBytesPtr",
|
||||||
|
"StructFieldOmitEmptyBytesPtr",
|
||||||
|
"StructEndBytesPtr",
|
||||||
|
"StructEndOmitEmptyBytesPtr",
|
||||||
|
"StructFieldNumberPtr",
|
||||||
|
"StructFieldOmitEmptyNumberPtr",
|
||||||
|
"StructEndNumberPtr",
|
||||||
|
"StructEndOmitEmptyNumberPtr",
|
||||||
|
"StructFieldArrayPtr",
|
||||||
|
"StructFieldOmitEmptyArrayPtr",
|
||||||
|
"StructEndArrayPtr",
|
||||||
|
"StructEndOmitEmptyArrayPtr",
|
||||||
|
"StructFieldMapPtr",
|
||||||
|
"StructFieldOmitEmptyMapPtr",
|
||||||
|
"StructEndMapPtr",
|
||||||
|
"StructEndOmitEmptyMapPtr",
|
||||||
|
"StructFieldSlicePtr",
|
||||||
|
"StructFieldOmitEmptySlicePtr",
|
||||||
|
"StructEndSlicePtr",
|
||||||
|
"StructEndOmitEmptySlicePtr",
|
||||||
|
"StructFieldMarshalJSONPtr",
|
||||||
|
"StructFieldOmitEmptyMarshalJSONPtr",
|
||||||
|
"StructEndMarshalJSONPtr",
|
||||||
|
"StructEndOmitEmptyMarshalJSONPtr",
|
||||||
|
"StructFieldMarshalTextPtr",
|
||||||
|
"StructFieldOmitEmptyMarshalTextPtr",
|
||||||
|
"StructEndMarshalTextPtr",
|
||||||
|
"StructEndOmitEmptyMarshalTextPtr",
|
||||||
|
"StructFieldInterfacePtr",
|
||||||
|
"StructFieldOmitEmptyInterfacePtr",
|
||||||
|
"StructEndInterfacePtr",
|
||||||
|
"StructEndOmitEmptyInterfacePtr",
|
||||||
|
"StructFieldIntPtrString",
|
||||||
|
"StructFieldOmitEmptyIntPtrString",
|
||||||
|
"StructEndIntPtrString",
|
||||||
|
"StructEndOmitEmptyIntPtrString",
|
||||||
|
"StructFieldUintPtrString",
|
||||||
|
"StructFieldOmitEmptyUintPtrString",
|
||||||
|
"StructEndUintPtrString",
|
||||||
|
"StructEndOmitEmptyUintPtrString",
|
||||||
|
"StructFieldFloat32PtrString",
|
||||||
|
"StructFieldOmitEmptyFloat32PtrString",
|
||||||
|
"StructEndFloat32PtrString",
|
||||||
|
"StructEndOmitEmptyFloat32PtrString",
|
||||||
|
"StructFieldFloat64PtrString",
|
||||||
|
"StructFieldOmitEmptyFloat64PtrString",
|
||||||
|
"StructEndFloat64PtrString",
|
||||||
|
"StructEndOmitEmptyFloat64PtrString",
|
||||||
|
"StructFieldBoolPtrString",
|
||||||
|
"StructFieldOmitEmptyBoolPtrString",
|
||||||
|
"StructEndBoolPtrString",
|
||||||
|
"StructEndOmitEmptyBoolPtrString",
|
||||||
|
"StructFieldStringPtrString",
|
||||||
|
"StructFieldOmitEmptyStringPtrString",
|
||||||
|
"StructEndStringPtrString",
|
||||||
|
"StructEndOmitEmptyStringPtrString",
|
||||||
|
"StructFieldNumberPtrString",
|
||||||
|
"StructFieldOmitEmptyNumberPtrString",
|
||||||
|
"StructEndNumberPtrString",
|
||||||
|
"StructEndOmitEmptyNumberPtrString",
|
||||||
|
"StructField",
|
||||||
|
"StructFieldOmitEmpty",
|
||||||
|
"StructEnd",
|
||||||
|
"StructEndOmitEmpty",
|
||||||
|
}
|
||||||
|
|
||||||
|
type OpType uint16
|
||||||
|
|
||||||
|
const (
|
||||||
|
OpEnd OpType = 0
|
||||||
|
OpInterface OpType = 1
|
||||||
|
OpPtr OpType = 2
|
||||||
|
OpSliceElem OpType = 3
|
||||||
|
OpSliceEnd OpType = 4
|
||||||
|
OpArrayElem OpType = 5
|
||||||
|
OpArrayEnd OpType = 6
|
||||||
|
OpMapKey OpType = 7
|
||||||
|
OpMapValue OpType = 8
|
||||||
|
OpMapEnd OpType = 9
|
||||||
|
OpRecursive OpType = 10
|
||||||
|
OpRecursivePtr OpType = 11
|
||||||
|
OpRecursiveEnd OpType = 12
|
||||||
|
OpInterfaceEnd OpType = 13
|
||||||
|
OpStructAnonymousEnd OpType = 14
|
||||||
|
OpInt OpType = 15
|
||||||
|
OpUint OpType = 16
|
||||||
|
OpFloat32 OpType = 17
|
||||||
|
OpFloat64 OpType = 18
|
||||||
|
OpBool OpType = 19
|
||||||
|
OpString OpType = 20
|
||||||
|
OpBytes OpType = 21
|
||||||
|
OpNumber OpType = 22
|
||||||
|
OpArray OpType = 23
|
||||||
|
OpMap OpType = 24
|
||||||
|
OpSlice OpType = 25
|
||||||
|
OpStruct OpType = 26
|
||||||
|
OpMarshalJSON OpType = 27
|
||||||
|
OpMarshalText OpType = 28
|
||||||
|
OpIntString OpType = 29
|
||||||
|
OpUintString OpType = 30
|
||||||
|
OpFloat32String OpType = 31
|
||||||
|
OpFloat64String OpType = 32
|
||||||
|
OpBoolString OpType = 33
|
||||||
|
OpStringString OpType = 34
|
||||||
|
OpNumberString OpType = 35
|
||||||
|
OpIntPtr OpType = 36
|
||||||
|
OpUintPtr OpType = 37
|
||||||
|
OpFloat32Ptr OpType = 38
|
||||||
|
OpFloat64Ptr OpType = 39
|
||||||
|
OpBoolPtr OpType = 40
|
||||||
|
OpStringPtr OpType = 41
|
||||||
|
OpBytesPtr OpType = 42
|
||||||
|
OpNumberPtr OpType = 43
|
||||||
|
OpArrayPtr OpType = 44
|
||||||
|
OpMapPtr OpType = 45
|
||||||
|
OpSlicePtr OpType = 46
|
||||||
|
OpMarshalJSONPtr OpType = 47
|
||||||
|
OpMarshalTextPtr OpType = 48
|
||||||
|
OpInterfacePtr OpType = 49
|
||||||
|
OpIntPtrString OpType = 50
|
||||||
|
OpUintPtrString OpType = 51
|
||||||
|
OpFloat32PtrString OpType = 52
|
||||||
|
OpFloat64PtrString OpType = 53
|
||||||
|
OpBoolPtrString OpType = 54
|
||||||
|
OpStringPtrString OpType = 55
|
||||||
|
OpNumberPtrString OpType = 56
|
||||||
|
OpStructHeadInt OpType = 57
|
||||||
|
OpStructHeadOmitEmptyInt OpType = 58
|
||||||
|
OpStructPtrHeadInt OpType = 59
|
||||||
|
OpStructPtrHeadOmitEmptyInt OpType = 60
|
||||||
|
OpStructHeadUint OpType = 61
|
||||||
|
OpStructHeadOmitEmptyUint OpType = 62
|
||||||
|
OpStructPtrHeadUint OpType = 63
|
||||||
|
OpStructPtrHeadOmitEmptyUint OpType = 64
|
||||||
|
OpStructHeadFloat32 OpType = 65
|
||||||
|
OpStructHeadOmitEmptyFloat32 OpType = 66
|
||||||
|
OpStructPtrHeadFloat32 OpType = 67
|
||||||
|
OpStructPtrHeadOmitEmptyFloat32 OpType = 68
|
||||||
|
OpStructHeadFloat64 OpType = 69
|
||||||
|
OpStructHeadOmitEmptyFloat64 OpType = 70
|
||||||
|
OpStructPtrHeadFloat64 OpType = 71
|
||||||
|
OpStructPtrHeadOmitEmptyFloat64 OpType = 72
|
||||||
|
OpStructHeadBool OpType = 73
|
||||||
|
OpStructHeadOmitEmptyBool OpType = 74
|
||||||
|
OpStructPtrHeadBool OpType = 75
|
||||||
|
OpStructPtrHeadOmitEmptyBool OpType = 76
|
||||||
|
OpStructHeadString OpType = 77
|
||||||
|
OpStructHeadOmitEmptyString OpType = 78
|
||||||
|
OpStructPtrHeadString OpType = 79
|
||||||
|
OpStructPtrHeadOmitEmptyString OpType = 80
|
||||||
|
OpStructHeadBytes OpType = 81
|
||||||
|
OpStructHeadOmitEmptyBytes OpType = 82
|
||||||
|
OpStructPtrHeadBytes OpType = 83
|
||||||
|
OpStructPtrHeadOmitEmptyBytes OpType = 84
|
||||||
|
OpStructHeadNumber OpType = 85
|
||||||
|
OpStructHeadOmitEmptyNumber OpType = 86
|
||||||
|
OpStructPtrHeadNumber OpType = 87
|
||||||
|
OpStructPtrHeadOmitEmptyNumber OpType = 88
|
||||||
|
OpStructHeadArray OpType = 89
|
||||||
|
OpStructHeadOmitEmptyArray OpType = 90
|
||||||
|
OpStructPtrHeadArray OpType = 91
|
||||||
|
OpStructPtrHeadOmitEmptyArray OpType = 92
|
||||||
|
OpStructHeadMap OpType = 93
|
||||||
|
OpStructHeadOmitEmptyMap OpType = 94
|
||||||
|
OpStructPtrHeadMap OpType = 95
|
||||||
|
OpStructPtrHeadOmitEmptyMap OpType = 96
|
||||||
|
OpStructHeadSlice OpType = 97
|
||||||
|
OpStructHeadOmitEmptySlice OpType = 98
|
||||||
|
OpStructPtrHeadSlice OpType = 99
|
||||||
|
OpStructPtrHeadOmitEmptySlice OpType = 100
|
||||||
|
OpStructHeadStruct OpType = 101
|
||||||
|
OpStructHeadOmitEmptyStruct OpType = 102
|
||||||
|
OpStructPtrHeadStruct OpType = 103
|
||||||
|
OpStructPtrHeadOmitEmptyStruct OpType = 104
|
||||||
|
OpStructHeadMarshalJSON OpType = 105
|
||||||
|
OpStructHeadOmitEmptyMarshalJSON OpType = 106
|
||||||
|
OpStructPtrHeadMarshalJSON OpType = 107
|
||||||
|
OpStructPtrHeadOmitEmptyMarshalJSON OpType = 108
|
||||||
|
OpStructHeadMarshalText OpType = 109
|
||||||
|
OpStructHeadOmitEmptyMarshalText OpType = 110
|
||||||
|
OpStructPtrHeadMarshalText OpType = 111
|
||||||
|
OpStructPtrHeadOmitEmptyMarshalText OpType = 112
|
||||||
|
OpStructHeadIntString OpType = 113
|
||||||
|
OpStructHeadOmitEmptyIntString OpType = 114
|
||||||
|
OpStructPtrHeadIntString OpType = 115
|
||||||
|
OpStructPtrHeadOmitEmptyIntString OpType = 116
|
||||||
|
OpStructHeadUintString OpType = 117
|
||||||
|
OpStructHeadOmitEmptyUintString OpType = 118
|
||||||
|
OpStructPtrHeadUintString OpType = 119
|
||||||
|
OpStructPtrHeadOmitEmptyUintString OpType = 120
|
||||||
|
OpStructHeadFloat32String OpType = 121
|
||||||
|
OpStructHeadOmitEmptyFloat32String OpType = 122
|
||||||
|
OpStructPtrHeadFloat32String OpType = 123
|
||||||
|
OpStructPtrHeadOmitEmptyFloat32String OpType = 124
|
||||||
|
OpStructHeadFloat64String OpType = 125
|
||||||
|
OpStructHeadOmitEmptyFloat64String OpType = 126
|
||||||
|
OpStructPtrHeadFloat64String OpType = 127
|
||||||
|
OpStructPtrHeadOmitEmptyFloat64String OpType = 128
|
||||||
|
OpStructHeadBoolString OpType = 129
|
||||||
|
OpStructHeadOmitEmptyBoolString OpType = 130
|
||||||
|
OpStructPtrHeadBoolString OpType = 131
|
||||||
|
OpStructPtrHeadOmitEmptyBoolString OpType = 132
|
||||||
|
OpStructHeadStringString OpType = 133
|
||||||
|
OpStructHeadOmitEmptyStringString OpType = 134
|
||||||
|
OpStructPtrHeadStringString OpType = 135
|
||||||
|
OpStructPtrHeadOmitEmptyStringString OpType = 136
|
||||||
|
OpStructHeadNumberString OpType = 137
|
||||||
|
OpStructHeadOmitEmptyNumberString OpType = 138
|
||||||
|
OpStructPtrHeadNumberString OpType = 139
|
||||||
|
OpStructPtrHeadOmitEmptyNumberString OpType = 140
|
||||||
|
OpStructHeadIntPtr OpType = 141
|
||||||
|
OpStructHeadOmitEmptyIntPtr OpType = 142
|
||||||
|
OpStructPtrHeadIntPtr OpType = 143
|
||||||
|
OpStructPtrHeadOmitEmptyIntPtr OpType = 144
|
||||||
|
OpStructHeadUintPtr OpType = 145
|
||||||
|
OpStructHeadOmitEmptyUintPtr OpType = 146
|
||||||
|
OpStructPtrHeadUintPtr OpType = 147
|
||||||
|
OpStructPtrHeadOmitEmptyUintPtr OpType = 148
|
||||||
|
OpStructHeadFloat32Ptr OpType = 149
|
||||||
|
OpStructHeadOmitEmptyFloat32Ptr OpType = 150
|
||||||
|
OpStructPtrHeadFloat32Ptr OpType = 151
|
||||||
|
OpStructPtrHeadOmitEmptyFloat32Ptr OpType = 152
|
||||||
|
OpStructHeadFloat64Ptr OpType = 153
|
||||||
|
OpStructHeadOmitEmptyFloat64Ptr OpType = 154
|
||||||
|
OpStructPtrHeadFloat64Ptr OpType = 155
|
||||||
|
OpStructPtrHeadOmitEmptyFloat64Ptr OpType = 156
|
||||||
|
OpStructHeadBoolPtr OpType = 157
|
||||||
|
OpStructHeadOmitEmptyBoolPtr OpType = 158
|
||||||
|
OpStructPtrHeadBoolPtr OpType = 159
|
||||||
|
OpStructPtrHeadOmitEmptyBoolPtr OpType = 160
|
||||||
|
OpStructHeadStringPtr OpType = 161
|
||||||
|
OpStructHeadOmitEmptyStringPtr OpType = 162
|
||||||
|
OpStructPtrHeadStringPtr OpType = 163
|
||||||
|
OpStructPtrHeadOmitEmptyStringPtr OpType = 164
|
||||||
|
OpStructHeadBytesPtr OpType = 165
|
||||||
|
OpStructHeadOmitEmptyBytesPtr OpType = 166
|
||||||
|
OpStructPtrHeadBytesPtr OpType = 167
|
||||||
|
OpStructPtrHeadOmitEmptyBytesPtr OpType = 168
|
||||||
|
OpStructHeadNumberPtr OpType = 169
|
||||||
|
OpStructHeadOmitEmptyNumberPtr OpType = 170
|
||||||
|
OpStructPtrHeadNumberPtr OpType = 171
|
||||||
|
OpStructPtrHeadOmitEmptyNumberPtr OpType = 172
|
||||||
|
OpStructHeadArrayPtr OpType = 173
|
||||||
|
OpStructHeadOmitEmptyArrayPtr OpType = 174
|
||||||
|
OpStructPtrHeadArrayPtr OpType = 175
|
||||||
|
OpStructPtrHeadOmitEmptyArrayPtr OpType = 176
|
||||||
|
OpStructHeadMapPtr OpType = 177
|
||||||
|
OpStructHeadOmitEmptyMapPtr OpType = 178
|
||||||
|
OpStructPtrHeadMapPtr OpType = 179
|
||||||
|
OpStructPtrHeadOmitEmptyMapPtr OpType = 180
|
||||||
|
OpStructHeadSlicePtr OpType = 181
|
||||||
|
OpStructHeadOmitEmptySlicePtr OpType = 182
|
||||||
|
OpStructPtrHeadSlicePtr OpType = 183
|
||||||
|
OpStructPtrHeadOmitEmptySlicePtr OpType = 184
|
||||||
|
OpStructHeadMarshalJSONPtr OpType = 185
|
||||||
|
OpStructHeadOmitEmptyMarshalJSONPtr OpType = 186
|
||||||
|
OpStructPtrHeadMarshalJSONPtr OpType = 187
|
||||||
|
OpStructPtrHeadOmitEmptyMarshalJSONPtr OpType = 188
|
||||||
|
OpStructHeadMarshalTextPtr OpType = 189
|
||||||
|
OpStructHeadOmitEmptyMarshalTextPtr OpType = 190
|
||||||
|
OpStructPtrHeadMarshalTextPtr OpType = 191
|
||||||
|
OpStructPtrHeadOmitEmptyMarshalTextPtr OpType = 192
|
||||||
|
OpStructHeadInterfacePtr OpType = 193
|
||||||
|
OpStructHeadOmitEmptyInterfacePtr OpType = 194
|
||||||
|
OpStructPtrHeadInterfacePtr OpType = 195
|
||||||
|
OpStructPtrHeadOmitEmptyInterfacePtr OpType = 196
|
||||||
|
OpStructHeadIntPtrString OpType = 197
|
||||||
|
OpStructHeadOmitEmptyIntPtrString OpType = 198
|
||||||
|
OpStructPtrHeadIntPtrString OpType = 199
|
||||||
|
OpStructPtrHeadOmitEmptyIntPtrString OpType = 200
|
||||||
|
OpStructHeadUintPtrString OpType = 201
|
||||||
|
OpStructHeadOmitEmptyUintPtrString OpType = 202
|
||||||
|
OpStructPtrHeadUintPtrString OpType = 203
|
||||||
|
OpStructPtrHeadOmitEmptyUintPtrString OpType = 204
|
||||||
|
OpStructHeadFloat32PtrString OpType = 205
|
||||||
|
OpStructHeadOmitEmptyFloat32PtrString OpType = 206
|
||||||
|
OpStructPtrHeadFloat32PtrString OpType = 207
|
||||||
|
OpStructPtrHeadOmitEmptyFloat32PtrString OpType = 208
|
||||||
|
OpStructHeadFloat64PtrString OpType = 209
|
||||||
|
OpStructHeadOmitEmptyFloat64PtrString OpType = 210
|
||||||
|
OpStructPtrHeadFloat64PtrString OpType = 211
|
||||||
|
OpStructPtrHeadOmitEmptyFloat64PtrString OpType = 212
|
||||||
|
OpStructHeadBoolPtrString OpType = 213
|
||||||
|
OpStructHeadOmitEmptyBoolPtrString OpType = 214
|
||||||
|
OpStructPtrHeadBoolPtrString OpType = 215
|
||||||
|
OpStructPtrHeadOmitEmptyBoolPtrString OpType = 216
|
||||||
|
OpStructHeadStringPtrString OpType = 217
|
||||||
|
OpStructHeadOmitEmptyStringPtrString OpType = 218
|
||||||
|
OpStructPtrHeadStringPtrString OpType = 219
|
||||||
|
OpStructPtrHeadOmitEmptyStringPtrString OpType = 220
|
||||||
|
OpStructHeadNumberPtrString OpType = 221
|
||||||
|
OpStructHeadOmitEmptyNumberPtrString OpType = 222
|
||||||
|
OpStructPtrHeadNumberPtrString OpType = 223
|
||||||
|
OpStructPtrHeadOmitEmptyNumberPtrString OpType = 224
|
||||||
|
OpStructHead OpType = 225
|
||||||
|
OpStructHeadOmitEmpty OpType = 226
|
||||||
|
OpStructPtrHead OpType = 227
|
||||||
|
OpStructPtrHeadOmitEmpty OpType = 228
|
||||||
|
OpStructFieldInt OpType = 229
|
||||||
|
OpStructFieldOmitEmptyInt OpType = 230
|
||||||
|
OpStructEndInt OpType = 231
|
||||||
|
OpStructEndOmitEmptyInt OpType = 232
|
||||||
|
OpStructFieldUint OpType = 233
|
||||||
|
OpStructFieldOmitEmptyUint OpType = 234
|
||||||
|
OpStructEndUint OpType = 235
|
||||||
|
OpStructEndOmitEmptyUint OpType = 236
|
||||||
|
OpStructFieldFloat32 OpType = 237
|
||||||
|
OpStructFieldOmitEmptyFloat32 OpType = 238
|
||||||
|
OpStructEndFloat32 OpType = 239
|
||||||
|
OpStructEndOmitEmptyFloat32 OpType = 240
|
||||||
|
OpStructFieldFloat64 OpType = 241
|
||||||
|
OpStructFieldOmitEmptyFloat64 OpType = 242
|
||||||
|
OpStructEndFloat64 OpType = 243
|
||||||
|
OpStructEndOmitEmptyFloat64 OpType = 244
|
||||||
|
OpStructFieldBool OpType = 245
|
||||||
|
OpStructFieldOmitEmptyBool OpType = 246
|
||||||
|
OpStructEndBool OpType = 247
|
||||||
|
OpStructEndOmitEmptyBool OpType = 248
|
||||||
|
OpStructFieldString OpType = 249
|
||||||
|
OpStructFieldOmitEmptyString OpType = 250
|
||||||
|
OpStructEndString OpType = 251
|
||||||
|
OpStructEndOmitEmptyString OpType = 252
|
||||||
|
OpStructFieldBytes OpType = 253
|
||||||
|
OpStructFieldOmitEmptyBytes OpType = 254
|
||||||
|
OpStructEndBytes OpType = 255
|
||||||
|
OpStructEndOmitEmptyBytes OpType = 256
|
||||||
|
OpStructFieldNumber OpType = 257
|
||||||
|
OpStructFieldOmitEmptyNumber OpType = 258
|
||||||
|
OpStructEndNumber OpType = 259
|
||||||
|
OpStructEndOmitEmptyNumber OpType = 260
|
||||||
|
OpStructFieldArray OpType = 261
|
||||||
|
OpStructFieldOmitEmptyArray OpType = 262
|
||||||
|
OpStructEndArray OpType = 263
|
||||||
|
OpStructEndOmitEmptyArray OpType = 264
|
||||||
|
OpStructFieldMap OpType = 265
|
||||||
|
OpStructFieldOmitEmptyMap OpType = 266
|
||||||
|
OpStructEndMap OpType = 267
|
||||||
|
OpStructEndOmitEmptyMap OpType = 268
|
||||||
|
OpStructFieldSlice OpType = 269
|
||||||
|
OpStructFieldOmitEmptySlice OpType = 270
|
||||||
|
OpStructEndSlice OpType = 271
|
||||||
|
OpStructEndOmitEmptySlice OpType = 272
|
||||||
|
OpStructFieldStruct OpType = 273
|
||||||
|
OpStructFieldOmitEmptyStruct OpType = 274
|
||||||
|
OpStructEndStruct OpType = 275
|
||||||
|
OpStructEndOmitEmptyStruct OpType = 276
|
||||||
|
OpStructFieldMarshalJSON OpType = 277
|
||||||
|
OpStructFieldOmitEmptyMarshalJSON OpType = 278
|
||||||
|
OpStructEndMarshalJSON OpType = 279
|
||||||
|
OpStructEndOmitEmptyMarshalJSON OpType = 280
|
||||||
|
OpStructFieldMarshalText OpType = 281
|
||||||
|
OpStructFieldOmitEmptyMarshalText OpType = 282
|
||||||
|
OpStructEndMarshalText OpType = 283
|
||||||
|
OpStructEndOmitEmptyMarshalText OpType = 284
|
||||||
|
OpStructFieldIntString OpType = 285
|
||||||
|
OpStructFieldOmitEmptyIntString OpType = 286
|
||||||
|
OpStructEndIntString OpType = 287
|
||||||
|
OpStructEndOmitEmptyIntString OpType = 288
|
||||||
|
OpStructFieldUintString OpType = 289
|
||||||
|
OpStructFieldOmitEmptyUintString OpType = 290
|
||||||
|
OpStructEndUintString OpType = 291
|
||||||
|
OpStructEndOmitEmptyUintString OpType = 292
|
||||||
|
OpStructFieldFloat32String OpType = 293
|
||||||
|
OpStructFieldOmitEmptyFloat32String OpType = 294
|
||||||
|
OpStructEndFloat32String OpType = 295
|
||||||
|
OpStructEndOmitEmptyFloat32String OpType = 296
|
||||||
|
OpStructFieldFloat64String OpType = 297
|
||||||
|
OpStructFieldOmitEmptyFloat64String OpType = 298
|
||||||
|
OpStructEndFloat64String OpType = 299
|
||||||
|
OpStructEndOmitEmptyFloat64String OpType = 300
|
||||||
|
OpStructFieldBoolString OpType = 301
|
||||||
|
OpStructFieldOmitEmptyBoolString OpType = 302
|
||||||
|
OpStructEndBoolString OpType = 303
|
||||||
|
OpStructEndOmitEmptyBoolString OpType = 304
|
||||||
|
OpStructFieldStringString OpType = 305
|
||||||
|
OpStructFieldOmitEmptyStringString OpType = 306
|
||||||
|
OpStructEndStringString OpType = 307
|
||||||
|
OpStructEndOmitEmptyStringString OpType = 308
|
||||||
|
OpStructFieldNumberString OpType = 309
|
||||||
|
OpStructFieldOmitEmptyNumberString OpType = 310
|
||||||
|
OpStructEndNumberString OpType = 311
|
||||||
|
OpStructEndOmitEmptyNumberString OpType = 312
|
||||||
|
OpStructFieldIntPtr OpType = 313
|
||||||
|
OpStructFieldOmitEmptyIntPtr OpType = 314
|
||||||
|
OpStructEndIntPtr OpType = 315
|
||||||
|
OpStructEndOmitEmptyIntPtr OpType = 316
|
||||||
|
OpStructFieldUintPtr OpType = 317
|
||||||
|
OpStructFieldOmitEmptyUintPtr OpType = 318
|
||||||
|
OpStructEndUintPtr OpType = 319
|
||||||
|
OpStructEndOmitEmptyUintPtr OpType = 320
|
||||||
|
OpStructFieldFloat32Ptr OpType = 321
|
||||||
|
OpStructFieldOmitEmptyFloat32Ptr OpType = 322
|
||||||
|
OpStructEndFloat32Ptr OpType = 323
|
||||||
|
OpStructEndOmitEmptyFloat32Ptr OpType = 324
|
||||||
|
OpStructFieldFloat64Ptr OpType = 325
|
||||||
|
OpStructFieldOmitEmptyFloat64Ptr OpType = 326
|
||||||
|
OpStructEndFloat64Ptr OpType = 327
|
||||||
|
OpStructEndOmitEmptyFloat64Ptr OpType = 328
|
||||||
|
OpStructFieldBoolPtr OpType = 329
|
||||||
|
OpStructFieldOmitEmptyBoolPtr OpType = 330
|
||||||
|
OpStructEndBoolPtr OpType = 331
|
||||||
|
OpStructEndOmitEmptyBoolPtr OpType = 332
|
||||||
|
OpStructFieldStringPtr OpType = 333
|
||||||
|
OpStructFieldOmitEmptyStringPtr OpType = 334
|
||||||
|
OpStructEndStringPtr OpType = 335
|
||||||
|
OpStructEndOmitEmptyStringPtr OpType = 336
|
||||||
|
OpStructFieldBytesPtr OpType = 337
|
||||||
|
OpStructFieldOmitEmptyBytesPtr OpType = 338
|
||||||
|
OpStructEndBytesPtr OpType = 339
|
||||||
|
OpStructEndOmitEmptyBytesPtr OpType = 340
|
||||||
|
OpStructFieldNumberPtr OpType = 341
|
||||||
|
OpStructFieldOmitEmptyNumberPtr OpType = 342
|
||||||
|
OpStructEndNumberPtr OpType = 343
|
||||||
|
OpStructEndOmitEmptyNumberPtr OpType = 344
|
||||||
|
OpStructFieldArrayPtr OpType = 345
|
||||||
|
OpStructFieldOmitEmptyArrayPtr OpType = 346
|
||||||
|
OpStructEndArrayPtr OpType = 347
|
||||||
|
OpStructEndOmitEmptyArrayPtr OpType = 348
|
||||||
|
OpStructFieldMapPtr OpType = 349
|
||||||
|
OpStructFieldOmitEmptyMapPtr OpType = 350
|
||||||
|
OpStructEndMapPtr OpType = 351
|
||||||
|
OpStructEndOmitEmptyMapPtr OpType = 352
|
||||||
|
OpStructFieldSlicePtr OpType = 353
|
||||||
|
OpStructFieldOmitEmptySlicePtr OpType = 354
|
||||||
|
OpStructEndSlicePtr OpType = 355
|
||||||
|
OpStructEndOmitEmptySlicePtr OpType = 356
|
||||||
|
OpStructFieldMarshalJSONPtr OpType = 357
|
||||||
|
OpStructFieldOmitEmptyMarshalJSONPtr OpType = 358
|
||||||
|
OpStructEndMarshalJSONPtr OpType = 359
|
||||||
|
OpStructEndOmitEmptyMarshalJSONPtr OpType = 360
|
||||||
|
OpStructFieldMarshalTextPtr OpType = 361
|
||||||
|
OpStructFieldOmitEmptyMarshalTextPtr OpType = 362
|
||||||
|
OpStructEndMarshalTextPtr OpType = 363
|
||||||
|
OpStructEndOmitEmptyMarshalTextPtr OpType = 364
|
||||||
|
OpStructFieldInterfacePtr OpType = 365
|
||||||
|
OpStructFieldOmitEmptyInterfacePtr OpType = 366
|
||||||
|
OpStructEndInterfacePtr OpType = 367
|
||||||
|
OpStructEndOmitEmptyInterfacePtr OpType = 368
|
||||||
|
OpStructFieldIntPtrString OpType = 369
|
||||||
|
OpStructFieldOmitEmptyIntPtrString OpType = 370
|
||||||
|
OpStructEndIntPtrString OpType = 371
|
||||||
|
OpStructEndOmitEmptyIntPtrString OpType = 372
|
||||||
|
OpStructFieldUintPtrString OpType = 373
|
||||||
|
OpStructFieldOmitEmptyUintPtrString OpType = 374
|
||||||
|
OpStructEndUintPtrString OpType = 375
|
||||||
|
OpStructEndOmitEmptyUintPtrString OpType = 376
|
||||||
|
OpStructFieldFloat32PtrString OpType = 377
|
||||||
|
OpStructFieldOmitEmptyFloat32PtrString OpType = 378
|
||||||
|
OpStructEndFloat32PtrString OpType = 379
|
||||||
|
OpStructEndOmitEmptyFloat32PtrString OpType = 380
|
||||||
|
OpStructFieldFloat64PtrString OpType = 381
|
||||||
|
OpStructFieldOmitEmptyFloat64PtrString OpType = 382
|
||||||
|
OpStructEndFloat64PtrString OpType = 383
|
||||||
|
OpStructEndOmitEmptyFloat64PtrString OpType = 384
|
||||||
|
OpStructFieldBoolPtrString OpType = 385
|
||||||
|
OpStructFieldOmitEmptyBoolPtrString OpType = 386
|
||||||
|
OpStructEndBoolPtrString OpType = 387
|
||||||
|
OpStructEndOmitEmptyBoolPtrString OpType = 388
|
||||||
|
OpStructFieldStringPtrString OpType = 389
|
||||||
|
OpStructFieldOmitEmptyStringPtrString OpType = 390
|
||||||
|
OpStructEndStringPtrString OpType = 391
|
||||||
|
OpStructEndOmitEmptyStringPtrString OpType = 392
|
||||||
|
OpStructFieldNumberPtrString OpType = 393
|
||||||
|
OpStructFieldOmitEmptyNumberPtrString OpType = 394
|
||||||
|
OpStructEndNumberPtrString OpType = 395
|
||||||
|
OpStructEndOmitEmptyNumberPtrString OpType = 396
|
||||||
|
OpStructField OpType = 397
|
||||||
|
OpStructFieldOmitEmpty OpType = 398
|
||||||
|
OpStructEnd OpType = 399
|
||||||
|
OpStructEndOmitEmpty OpType = 400
|
||||||
|
)
|
||||||
|
|
||||||
|
func (t OpType) String() string {
|
||||||
|
if int(t) >= 401 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return opTypeStrings[int(t)]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t OpType) CodeType() CodeType {
|
||||||
|
if strings.Contains(t.String(), "Struct") {
|
||||||
|
if strings.Contains(t.String(), "End") {
|
||||||
|
return CodeStructEnd
|
||||||
|
}
|
||||||
|
return CodeStructField
|
||||||
|
}
|
||||||
|
switch t {
|
||||||
|
case OpArray, OpArrayPtr:
|
||||||
|
return CodeArrayHead
|
||||||
|
case OpArrayElem:
|
||||||
|
return CodeArrayElem
|
||||||
|
case OpSlice, OpSlicePtr:
|
||||||
|
return CodeSliceHead
|
||||||
|
case OpSliceElem:
|
||||||
|
return CodeSliceElem
|
||||||
|
case OpMap, OpMapPtr:
|
||||||
|
return CodeMapHead
|
||||||
|
case OpMapKey:
|
||||||
|
return CodeMapKey
|
||||||
|
case OpMapValue:
|
||||||
|
return CodeMapValue
|
||||||
|
case OpMapEnd:
|
||||||
|
return CodeMapEnd
|
||||||
|
}
|
||||||
|
|
||||||
|
return CodeOp
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t OpType) HeadToPtrHead() OpType {
|
||||||
|
if strings.Index(t.String(), "PtrHead") > 0 {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
idx := strings.Index(t.String(), "Head")
|
||||||
|
if idx == -1 {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
suffix := "PtrHead" + t.String()[idx+len("Head"):]
|
||||||
|
|
||||||
|
const toPtrOffset = 2
|
||||||
|
if strings.Contains(OpType(int(t)+toPtrOffset).String(), suffix) {
|
||||||
|
return OpType(int(t) + toPtrOffset)
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t OpType) HeadToOmitEmptyHead() OpType {
|
||||||
|
const toOmitEmptyOffset = 1
|
||||||
|
if strings.Contains(OpType(int(t)+toOmitEmptyOffset).String(), "OmitEmpty") {
|
||||||
|
return OpType(int(t) + toOmitEmptyOffset)
|
||||||
|
}
|
||||||
|
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t OpType) PtrHeadToHead() OpType {
|
||||||
|
idx := strings.Index(t.String(), "Ptr")
|
||||||
|
if idx == -1 {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
suffix := t.String()[idx+len("Ptr"):]
|
||||||
|
|
||||||
|
const toPtrOffset = 2
|
||||||
|
if strings.Contains(OpType(int(t)-toPtrOffset).String(), suffix) {
|
||||||
|
return OpType(int(t) - toPtrOffset)
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t OpType) FieldToEnd() OpType {
|
||||||
|
idx := strings.Index(t.String(), "Field")
|
||||||
|
if idx == -1 {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
suffix := t.String()[idx+len("Field"):]
|
||||||
|
if suffix == "" || suffix == "OmitEmpty" {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
const toEndOffset = 2
|
||||||
|
if strings.Contains(OpType(int(t)+toEndOffset).String(), "End"+suffix) {
|
||||||
|
return OpType(int(t) + toEndOffset)
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t OpType) FieldToOmitEmptyField() OpType {
|
||||||
|
const toOmitEmptyOffset = 1
|
||||||
|
if strings.Contains(OpType(int(t)+toOmitEmptyOffset).String(), "OmitEmpty") {
|
||||||
|
return OpType(int(t) + toOmitEmptyOffset)
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
640
vendor/github.com/goccy/go-json/internal/encoder/string.go
generated
vendored
Normal file
640
vendor/github.com/goccy/go-json/internal/encoder/string.go
generated
vendored
Normal file
|
@ -0,0 +1,640 @@
|
||||||
|
package encoder
|
||||||
|
|
||||||
|
import (
|
||||||
|
"math/bits"
|
||||||
|
"reflect"
|
||||||
|
"unicode/utf8"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
lsb = 0x0101010101010101
|
||||||
|
msb = 0x8080808080808080
|
||||||
|
)
|
||||||
|
|
||||||
|
var needEscapeWithHTML = [256]bool{
|
||||||
|
'"': true,
|
||||||
|
'&': true,
|
||||||
|
'<': true,
|
||||||
|
'>': true,
|
||||||
|
'\\': true,
|
||||||
|
0x00: true,
|
||||||
|
0x01: true,
|
||||||
|
0x02: true,
|
||||||
|
0x03: true,
|
||||||
|
0x04: true,
|
||||||
|
0x05: true,
|
||||||
|
0x06: true,
|
||||||
|
0x07: true,
|
||||||
|
0x08: true,
|
||||||
|
0x09: true,
|
||||||
|
0x0a: true,
|
||||||
|
0x0b: true,
|
||||||
|
0x0c: true,
|
||||||
|
0x0d: true,
|
||||||
|
0x0e: true,
|
||||||
|
0x0f: true,
|
||||||
|
0x10: true,
|
||||||
|
0x11: true,
|
||||||
|
0x12: true,
|
||||||
|
0x13: true,
|
||||||
|
0x14: true,
|
||||||
|
0x15: true,
|
||||||
|
0x16: true,
|
||||||
|
0x17: true,
|
||||||
|
0x18: true,
|
||||||
|
0x19: true,
|
||||||
|
0x1a: true,
|
||||||
|
0x1b: true,
|
||||||
|
0x1c: true,
|
||||||
|
0x1d: true,
|
||||||
|
0x1e: true,
|
||||||
|
0x1f: true,
|
||||||
|
/* 0x20 - 0x7f */
|
||||||
|
0x80: true,
|
||||||
|
0x81: true,
|
||||||
|
0x82: true,
|
||||||
|
0x83: true,
|
||||||
|
0x84: true,
|
||||||
|
0x85: true,
|
||||||
|
0x86: true,
|
||||||
|
0x87: true,
|
||||||
|
0x88: true,
|
||||||
|
0x89: true,
|
||||||
|
0x8a: true,
|
||||||
|
0x8b: true,
|
||||||
|
0x8c: true,
|
||||||
|
0x8d: true,
|
||||||
|
0x8e: true,
|
||||||
|
0x8f: true,
|
||||||
|
0x90: true,
|
||||||
|
0x91: true,
|
||||||
|
0x92: true,
|
||||||
|
0x93: true,
|
||||||
|
0x94: true,
|
||||||
|
0x95: true,
|
||||||
|
0x96: true,
|
||||||
|
0x97: true,
|
||||||
|
0x98: true,
|
||||||
|
0x99: true,
|
||||||
|
0x9a: true,
|
||||||
|
0x9b: true,
|
||||||
|
0x9c: true,
|
||||||
|
0x9d: true,
|
||||||
|
0x9e: true,
|
||||||
|
0x9f: true,
|
||||||
|
0xa0: true,
|
||||||
|
0xa1: true,
|
||||||
|
0xa2: true,
|
||||||
|
0xa3: true,
|
||||||
|
0xa4: true,
|
||||||
|
0xa5: true,
|
||||||
|
0xa6: true,
|
||||||
|
0xa7: true,
|
||||||
|
0xa8: true,
|
||||||
|
0xa9: true,
|
||||||
|
0xaa: true,
|
||||||
|
0xab: true,
|
||||||
|
0xac: true,
|
||||||
|
0xad: true,
|
||||||
|
0xae: true,
|
||||||
|
0xaf: true,
|
||||||
|
0xb0: true,
|
||||||
|
0xb1: true,
|
||||||
|
0xb2: true,
|
||||||
|
0xb3: true,
|
||||||
|
0xb4: true,
|
||||||
|
0xb5: true,
|
||||||
|
0xb6: true,
|
||||||
|
0xb7: true,
|
||||||
|
0xb8: true,
|
||||||
|
0xb9: true,
|
||||||
|
0xba: true,
|
||||||
|
0xbb: true,
|
||||||
|
0xbc: true,
|
||||||
|
0xbd: true,
|
||||||
|
0xbe: true,
|
||||||
|
0xbf: true,
|
||||||
|
0xc0: true,
|
||||||
|
0xc1: true,
|
||||||
|
0xc2: true,
|
||||||
|
0xc3: true,
|
||||||
|
0xc4: true,
|
||||||
|
0xc5: true,
|
||||||
|
0xc6: true,
|
||||||
|
0xc7: true,
|
||||||
|
0xc8: true,
|
||||||
|
0xc9: true,
|
||||||
|
0xca: true,
|
||||||
|
0xcb: true,
|
||||||
|
0xcc: true,
|
||||||
|
0xcd: true,
|
||||||
|
0xce: true,
|
||||||
|
0xcf: true,
|
||||||
|
0xd0: true,
|
||||||
|
0xd1: true,
|
||||||
|
0xd2: true,
|
||||||
|
0xd3: true,
|
||||||
|
0xd4: true,
|
||||||
|
0xd5: true,
|
||||||
|
0xd6: true,
|
||||||
|
0xd7: true,
|
||||||
|
0xd8: true,
|
||||||
|
0xd9: true,
|
||||||
|
0xda: true,
|
||||||
|
0xdb: true,
|
||||||
|
0xdc: true,
|
||||||
|
0xdd: true,
|
||||||
|
0xde: true,
|
||||||
|
0xdf: true,
|
||||||
|
0xe0: true,
|
||||||
|
0xe1: true,
|
||||||
|
0xe2: true,
|
||||||
|
0xe3: true,
|
||||||
|
0xe4: true,
|
||||||
|
0xe5: true,
|
||||||
|
0xe6: true,
|
||||||
|
0xe7: true,
|
||||||
|
0xe8: true,
|
||||||
|
0xe9: true,
|
||||||
|
0xea: true,
|
||||||
|
0xeb: true,
|
||||||
|
0xec: true,
|
||||||
|
0xed: true,
|
||||||
|
0xee: true,
|
||||||
|
0xef: true,
|
||||||
|
0xf0: true,
|
||||||
|
0xf1: true,
|
||||||
|
0xf2: true,
|
||||||
|
0xf3: true,
|
||||||
|
0xf4: true,
|
||||||
|
0xf5: true,
|
||||||
|
0xf6: true,
|
||||||
|
0xf7: true,
|
||||||
|
0xf8: true,
|
||||||
|
0xf9: true,
|
||||||
|
0xfa: true,
|
||||||
|
0xfb: true,
|
||||||
|
0xfc: true,
|
||||||
|
0xfd: true,
|
||||||
|
0xfe: true,
|
||||||
|
0xff: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var needEscape = [256]bool{
|
||||||
|
'"': true,
|
||||||
|
'\\': true,
|
||||||
|
0x00: true,
|
||||||
|
0x01: true,
|
||||||
|
0x02: true,
|
||||||
|
0x03: true,
|
||||||
|
0x04: true,
|
||||||
|
0x05: true,
|
||||||
|
0x06: true,
|
||||||
|
0x07: true,
|
||||||
|
0x08: true,
|
||||||
|
0x09: true,
|
||||||
|
0x0a: true,
|
||||||
|
0x0b: true,
|
||||||
|
0x0c: true,
|
||||||
|
0x0d: true,
|
||||||
|
0x0e: true,
|
||||||
|
0x0f: true,
|
||||||
|
0x10: true,
|
||||||
|
0x11: true,
|
||||||
|
0x12: true,
|
||||||
|
0x13: true,
|
||||||
|
0x14: true,
|
||||||
|
0x15: true,
|
||||||
|
0x16: true,
|
||||||
|
0x17: true,
|
||||||
|
0x18: true,
|
||||||
|
0x19: true,
|
||||||
|
0x1a: true,
|
||||||
|
0x1b: true,
|
||||||
|
0x1c: true,
|
||||||
|
0x1d: true,
|
||||||
|
0x1e: true,
|
||||||
|
0x1f: true,
|
||||||
|
/* 0x20 - 0x7f */
|
||||||
|
0x80: true,
|
||||||
|
0x81: true,
|
||||||
|
0x82: true,
|
||||||
|
0x83: true,
|
||||||
|
0x84: true,
|
||||||
|
0x85: true,
|
||||||
|
0x86: true,
|
||||||
|
0x87: true,
|
||||||
|
0x88: true,
|
||||||
|
0x89: true,
|
||||||
|
0x8a: true,
|
||||||
|
0x8b: true,
|
||||||
|
0x8c: true,
|
||||||
|
0x8d: true,
|
||||||
|
0x8e: true,
|
||||||
|
0x8f: true,
|
||||||
|
0x90: true,
|
||||||
|
0x91: true,
|
||||||
|
0x92: true,
|
||||||
|
0x93: true,
|
||||||
|
0x94: true,
|
||||||
|
0x95: true,
|
||||||
|
0x96: true,
|
||||||
|
0x97: true,
|
||||||
|
0x98: true,
|
||||||
|
0x99: true,
|
||||||
|
0x9a: true,
|
||||||
|
0x9b: true,
|
||||||
|
0x9c: true,
|
||||||
|
0x9d: true,
|
||||||
|
0x9e: true,
|
||||||
|
0x9f: true,
|
||||||
|
0xa0: true,
|
||||||
|
0xa1: true,
|
||||||
|
0xa2: true,
|
||||||
|
0xa3: true,
|
||||||
|
0xa4: true,
|
||||||
|
0xa5: true,
|
||||||
|
0xa6: true,
|
||||||
|
0xa7: true,
|
||||||
|
0xa8: true,
|
||||||
|
0xa9: true,
|
||||||
|
0xaa: true,
|
||||||
|
0xab: true,
|
||||||
|
0xac: true,
|
||||||
|
0xad: true,
|
||||||
|
0xae: true,
|
||||||
|
0xaf: true,
|
||||||
|
0xb0: true,
|
||||||
|
0xb1: true,
|
||||||
|
0xb2: true,
|
||||||
|
0xb3: true,
|
||||||
|
0xb4: true,
|
||||||
|
0xb5: true,
|
||||||
|
0xb6: true,
|
||||||
|
0xb7: true,
|
||||||
|
0xb8: true,
|
||||||
|
0xb9: true,
|
||||||
|
0xba: true,
|
||||||
|
0xbb: true,
|
||||||
|
0xbc: true,
|
||||||
|
0xbd: true,
|
||||||
|
0xbe: true,
|
||||||
|
0xbf: true,
|
||||||
|
0xc0: true,
|
||||||
|
0xc1: true,
|
||||||
|
0xc2: true,
|
||||||
|
0xc3: true,
|
||||||
|
0xc4: true,
|
||||||
|
0xc5: true,
|
||||||
|
0xc6: true,
|
||||||
|
0xc7: true,
|
||||||
|
0xc8: true,
|
||||||
|
0xc9: true,
|
||||||
|
0xca: true,
|
||||||
|
0xcb: true,
|
||||||
|
0xcc: true,
|
||||||
|
0xcd: true,
|
||||||
|
0xce: true,
|
||||||
|
0xcf: true,
|
||||||
|
0xd0: true,
|
||||||
|
0xd1: true,
|
||||||
|
0xd2: true,
|
||||||
|
0xd3: true,
|
||||||
|
0xd4: true,
|
||||||
|
0xd5: true,
|
||||||
|
0xd6: true,
|
||||||
|
0xd7: true,
|
||||||
|
0xd8: true,
|
||||||
|
0xd9: true,
|
||||||
|
0xda: true,
|
||||||
|
0xdb: true,
|
||||||
|
0xdc: true,
|
||||||
|
0xdd: true,
|
||||||
|
0xde: true,
|
||||||
|
0xdf: true,
|
||||||
|
0xe0: true,
|
||||||
|
0xe1: true,
|
||||||
|
0xe2: true,
|
||||||
|
0xe3: true,
|
||||||
|
0xe4: true,
|
||||||
|
0xe5: true,
|
||||||
|
0xe6: true,
|
||||||
|
0xe7: true,
|
||||||
|
0xe8: true,
|
||||||
|
0xe9: true,
|
||||||
|
0xea: true,
|
||||||
|
0xeb: true,
|
||||||
|
0xec: true,
|
||||||
|
0xed: true,
|
||||||
|
0xee: true,
|
||||||
|
0xef: true,
|
||||||
|
0xf0: true,
|
||||||
|
0xf1: true,
|
||||||
|
0xf2: true,
|
||||||
|
0xf3: true,
|
||||||
|
0xf4: true,
|
||||||
|
0xf5: true,
|
||||||
|
0xf6: true,
|
||||||
|
0xf7: true,
|
||||||
|
0xf8: true,
|
||||||
|
0xf9: true,
|
||||||
|
0xfa: true,
|
||||||
|
0xfb: true,
|
||||||
|
0xfc: true,
|
||||||
|
0xfd: true,
|
||||||
|
0xfe: true,
|
||||||
|
0xff: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var hex = "0123456789abcdef"
|
||||||
|
|
||||||
|
// escapeIndex finds the index of the first char in `s` that requires escaping.
|
||||||
|
// A char requires escaping if it's outside of the range of [0x20, 0x7F] or if
|
||||||
|
// it includes a double quote or backslash.
|
||||||
|
// If no chars in `s` require escaping, the return value is -1.
|
||||||
|
func escapeIndex(s string) int {
|
||||||
|
chunks := stringToUint64Slice(s)
|
||||||
|
for _, n := range chunks {
|
||||||
|
// combine masks before checking for the MSB of each byte. We include
|
||||||
|
// `n` in the mask to check whether any of the *input* byte MSBs were
|
||||||
|
// set (i.e. the byte was outside the ASCII range).
|
||||||
|
mask := n | below(n, 0x20) | contains(n, '"') | contains(n, '\\')
|
||||||
|
if (mask & msb) != 0 {
|
||||||
|
return bits.TrailingZeros64(mask&msb) / 8
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
valLen := len(s)
|
||||||
|
for i := len(chunks) * 8; i < valLen; i++ {
|
||||||
|
if needEscape[s[i]] {
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
|
||||||
|
// below return a mask that can be used to determine if any of the bytes
|
||||||
|
// in `n` are below `b`. If a byte's MSB is set in the mask then that byte was
|
||||||
|
// below `b`. The result is only valid if `b`, and each byte in `n`, is below
|
||||||
|
// 0x80.
|
||||||
|
func below(n uint64, b byte) uint64 {
|
||||||
|
return n - expand(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// contains returns a mask that can be used to determine if any of the
|
||||||
|
// bytes in `n` are equal to `b`. If a byte's MSB is set in the mask then
|
||||||
|
// that byte is equal to `b`. The result is only valid if `b`, and each
|
||||||
|
// byte in `n`, is below 0x80.
|
||||||
|
func contains(n uint64, b byte) uint64 {
|
||||||
|
return (n ^ expand(b)) - lsb
|
||||||
|
}
|
||||||
|
|
||||||
|
// expand puts the specified byte into each of the 8 bytes of a uint64.
|
||||||
|
func expand(b byte) uint64 {
|
||||||
|
return lsb * uint64(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
//nolint:govet
|
||||||
|
func stringToUint64Slice(s string) []uint64 {
|
||||||
|
return *(*[]uint64)(unsafe.Pointer(&reflect.SliceHeader{
|
||||||
|
Data: ((*reflect.StringHeader)(unsafe.Pointer(&s))).Data,
|
||||||
|
Len: len(s) / 8,
|
||||||
|
Cap: len(s) / 8,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func AppendString(ctx *RuntimeContext, buf []byte, s string) []byte {
|
||||||
|
if ctx.Option.Flag&HTMLEscapeOption == 0 {
|
||||||
|
return appendString(buf, s)
|
||||||
|
}
|
||||||
|
valLen := len(s)
|
||||||
|
if valLen == 0 {
|
||||||
|
return append(buf, `""`...)
|
||||||
|
}
|
||||||
|
buf = append(buf, '"')
|
||||||
|
var (
|
||||||
|
i, j int
|
||||||
|
)
|
||||||
|
if valLen >= 8 {
|
||||||
|
chunks := stringToUint64Slice(s)
|
||||||
|
for _, n := range chunks {
|
||||||
|
// combine masks before checking for the MSB of each byte. We include
|
||||||
|
// `n` in the mask to check whether any of the *input* byte MSBs were
|
||||||
|
// set (i.e. the byte was outside the ASCII range).
|
||||||
|
mask := n | (n - (lsb * 0x20)) |
|
||||||
|
((n ^ (lsb * '"')) - lsb) |
|
||||||
|
((n ^ (lsb * '\\')) - lsb) |
|
||||||
|
((n ^ (lsb * '<')) - lsb) |
|
||||||
|
((n ^ (lsb * '>')) - lsb) |
|
||||||
|
((n ^ (lsb * '&')) - lsb)
|
||||||
|
if (mask & msb) != 0 {
|
||||||
|
j = bits.TrailingZeros64(mask&msb) / 8
|
||||||
|
goto ESCAPE_END
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for i := len(chunks) * 8; i < valLen; i++ {
|
||||||
|
if needEscapeWithHTML[s[i]] {
|
||||||
|
j = i
|
||||||
|
goto ESCAPE_END
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// no found any escape characters.
|
||||||
|
return append(append(buf, s...), '"')
|
||||||
|
}
|
||||||
|
ESCAPE_END:
|
||||||
|
for j < valLen {
|
||||||
|
c := s[j]
|
||||||
|
|
||||||
|
if !needEscapeWithHTML[c] {
|
||||||
|
// fast path: most of the time, printable ascii characters are used
|
||||||
|
j++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch c {
|
||||||
|
case '\\', '"':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', c)
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '\n':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', 'n')
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '\r':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', 'r')
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '\t':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', 't')
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '<', '>', '&':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\u00`...)
|
||||||
|
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// This encodes bytes < 0x20 except for \t, \n and \r.
|
||||||
|
if c < 0x20 {
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\u00`...)
|
||||||
|
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
r, size := utf8.DecodeRuneInString(s[j:])
|
||||||
|
|
||||||
|
if r == utf8.RuneError && size == 1 {
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\ufffd`...)
|
||||||
|
i = j + size
|
||||||
|
j = j + size
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch r {
|
||||||
|
case '\u2028', '\u2029':
|
||||||
|
// U+2028 is LINE SEPARATOR.
|
||||||
|
// U+2029 is PARAGRAPH SEPARATOR.
|
||||||
|
// They are both technically valid characters in JSON strings,
|
||||||
|
// but don't work in JSONP, which has to be evaluated as JavaScript,
|
||||||
|
// and can lead to security holes there. It is valid JSON to
|
||||||
|
// escape them, so we do so unconditionally.
|
||||||
|
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\u202`...)
|
||||||
|
buf = append(buf, hex[r&0xF])
|
||||||
|
i = j + size
|
||||||
|
j = j + size
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
j += size
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(append(buf, s[i:]...), '"')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendString(buf []byte, s string) []byte {
|
||||||
|
valLen := len(s)
|
||||||
|
if valLen == 0 {
|
||||||
|
return append(buf, `""`...)
|
||||||
|
}
|
||||||
|
buf = append(buf, '"')
|
||||||
|
var escapeIdx int
|
||||||
|
if valLen >= 8 {
|
||||||
|
if escapeIdx = escapeIndex(s); escapeIdx < 0 {
|
||||||
|
return append(append(buf, s...), '"')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
i := 0
|
||||||
|
j := escapeIdx
|
||||||
|
for j < valLen {
|
||||||
|
c := s[j]
|
||||||
|
|
||||||
|
if c >= 0x20 && c <= 0x7f && c != '\\' && c != '"' {
|
||||||
|
// fast path: most of the time, printable ascii characters are used
|
||||||
|
j++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch c {
|
||||||
|
case '\\', '"':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', c)
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '\n':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', 'n')
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '\r':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', 'r')
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '\t':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, '\\', 't')
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
case '<', '>', '&':
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\u00`...)
|
||||||
|
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// This encodes bytes < 0x20 except for \t, \n and \r.
|
||||||
|
if c < 0x20 {
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\u00`...)
|
||||||
|
buf = append(buf, hex[c>>4], hex[c&0xF])
|
||||||
|
i = j + 1
|
||||||
|
j = j + 1
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
r, size := utf8.DecodeRuneInString(s[j:])
|
||||||
|
|
||||||
|
if r == utf8.RuneError && size == 1 {
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\ufffd`...)
|
||||||
|
i = j + size
|
||||||
|
j = j + size
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
switch r {
|
||||||
|
case '\u2028', '\u2029':
|
||||||
|
// U+2028 is LINE SEPARATOR.
|
||||||
|
// U+2029 is PARAGRAPH SEPARATOR.
|
||||||
|
// They are both technically valid characters in JSON strings,
|
||||||
|
// but don't work in JSONP, which has to be evaluated as JavaScript,
|
||||||
|
// and can lead to security holes there. It is valid JSON to
|
||||||
|
// escape them, so we do so unconditionally.
|
||||||
|
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
|
||||||
|
buf = append(buf, s[i:j]...)
|
||||||
|
buf = append(buf, `\u202`...)
|
||||||
|
buf = append(buf, hex[r&0xF])
|
||||||
|
i = j + size
|
||||||
|
j = j + size
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
j += size
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(append(buf, s[i:]...), '"')
|
||||||
|
}
|
34
vendor/github.com/goccy/go-json/internal/encoder/vm/debug_vm.go
generated
vendored
Normal file
34
vendor/github.com/goccy/go-json/internal/encoder/vm/debug_vm.go
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
package vm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
)
|
||||||
|
|
||||||
|
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||||
|
defer func() {
|
||||||
|
var code *encoder.Opcode
|
||||||
|
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||||
|
code = codeSet.EscapeKeyCode
|
||||||
|
} else {
|
||||||
|
code = codeSet.NoescapeKeyCode
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
fmt.Println("=============[DEBUG]===============")
|
||||||
|
fmt.Println("* [TYPE]")
|
||||||
|
fmt.Println(codeSet.Type)
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [ALL OPCODE]")
|
||||||
|
fmt.Println(code.Dump())
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [CONTEXT]")
|
||||||
|
fmt.Printf("%+v\n", ctx)
|
||||||
|
fmt.Println("===================================")
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return Run(ctx, b, codeSet)
|
||||||
|
}
|
9
vendor/github.com/goccy/go-json/internal/encoder/vm/hack.go
generated
vendored
Normal file
9
vendor/github.com/goccy/go-json/internal/encoder/vm/hack.go
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
package vm
|
||||||
|
|
||||||
|
import (
|
||||||
|
// HACK: compile order
|
||||||
|
// `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile,
|
||||||
|
// so forcibly make dependencies and avoid compiling in concurrent.
|
||||||
|
// dependency order: vm => vm_indent => vm_color => vm_color_indent
|
||||||
|
_ "github.com/goccy/go-json/internal/encoder/vm_indent"
|
||||||
|
)
|
182
vendor/github.com/goccy/go-json/internal/encoder/vm/util.go
generated
vendored
Normal file
182
vendor/github.com/goccy/go-json/internal/encoder/vm/util.go
generated
vendored
Normal file
|
@ -0,0 +1,182 @@
|
||||||
|
package vm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||||
|
|
||||||
|
var (
|
||||||
|
appendInt = encoder.AppendInt
|
||||||
|
appendUint = encoder.AppendUint
|
||||||
|
appendFloat32 = encoder.AppendFloat32
|
||||||
|
appendFloat64 = encoder.AppendFloat64
|
||||||
|
appendString = encoder.AppendString
|
||||||
|
appendByteSlice = encoder.AppendByteSlice
|
||||||
|
appendNumber = encoder.AppendNumber
|
||||||
|
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||||
|
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||||
|
mapiterinit = encoder.MapIterInit
|
||||||
|
mapiterkey = encoder.MapIterKey
|
||||||
|
mapitervalue = encoder.MapIterValue
|
||||||
|
mapiternext = encoder.MapIterNext
|
||||||
|
maplen = encoder.MapLen
|
||||||
|
)
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func errUnimplementedOp(op encoder.OpType) error {
|
||||||
|
return fmt.Errorf("encoder: opcode %s has not been implemented", op)
|
||||||
|
}
|
||||||
|
|
||||||
|
func load(base uintptr, idx uint32) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func store(base uintptr, idx uint32, p uintptr) {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUint64(p uintptr) uint64 { return **(**uint64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToPtr(p uintptr) uintptr {
|
||||||
|
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||||
|
}
|
||||||
|
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||||
|
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||||
|
}
|
||||||
|
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||||
|
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: code.Type,
|
||||||
|
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendBool(_ *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||||
|
if v {
|
||||||
|
return append(b, "true"...)
|
||||||
|
}
|
||||||
|
return append(b, "false"...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendNull(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, "null"...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = ':'
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapKeyValue(_ *encoder.RuntimeContext, _ *encoder.Opcode, b, key, value []byte) []byte {
|
||||||
|
b = append(b, key...)
|
||||||
|
b[len(b)-1] = ':'
|
||||||
|
return append(b, value...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
b[len(b)-1] = '}'
|
||||||
|
b = append(b, ',')
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
return encoder.AppendMarshalJSON(ctx, code, b, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
return encoder.AppendMarshalText(ctx, code, b, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayHead(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
return append(b, '[')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = ']'
|
||||||
|
return append(b, ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '[', ']', ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{', '}', ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendObjectEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = '}'
|
||||||
|
return append(b, ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructKey(_ *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
return append(b, code.Key...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
return append(b, '}', ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
if b[last] == ',' {
|
||||||
|
b[last] = '}'
|
||||||
|
return appendComma(ctx, b)
|
||||||
|
}
|
||||||
|
return appendStructEnd(ctx, code, b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func restoreIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, _ uintptr) {}
|
||||||
|
func storeIndent(_ uintptr, _ *encoder.Opcode, _ uintptr) {}
|
||||||
|
func appendMapKeyIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
||||||
|
func appendArrayElemIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
5043
vendor/github.com/goccy/go-json/internal/encoder/vm/vm.go
generated
vendored
Normal file
5043
vendor/github.com/goccy/go-json/internal/encoder/vm/vm.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
34
vendor/github.com/goccy/go-json/internal/encoder/vm_color/debug_vm.go
generated
vendored
Normal file
34
vendor/github.com/goccy/go-json/internal/encoder/vm_color/debug_vm.go
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
package vm_color
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
)
|
||||||
|
|
||||||
|
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||||
|
var code *encoder.Opcode
|
||||||
|
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||||
|
code = codeSet.EscapeKeyCode
|
||||||
|
} else {
|
||||||
|
code = codeSet.NoescapeKeyCode
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
fmt.Println("=============[DEBUG]===============")
|
||||||
|
fmt.Println("* [TYPE]")
|
||||||
|
fmt.Println(codeSet.Type)
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [ALL OPCODE]")
|
||||||
|
fmt.Println(code.Dump())
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [CONTEXT]")
|
||||||
|
fmt.Printf("%+v\n", ctx)
|
||||||
|
fmt.Println("===================================")
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return Run(ctx, b, codeSet)
|
||||||
|
}
|
9
vendor/github.com/goccy/go-json/internal/encoder/vm_color/hack.go
generated
vendored
Normal file
9
vendor/github.com/goccy/go-json/internal/encoder/vm_color/hack.go
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
package vm_color
|
||||||
|
|
||||||
|
import (
|
||||||
|
// HACK: compile order
|
||||||
|
// `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile,
|
||||||
|
// so forcibly make dependencies and avoid compiling in concurrent.
|
||||||
|
// dependency order: vm => vm_indent => vm_color => vm_color_indent
|
||||||
|
_ "github.com/goccy/go-json/internal/encoder/vm_color_indent"
|
||||||
|
)
|
246
vendor/github.com/goccy/go-json/internal/encoder/vm_color/util.go
generated
vendored
Normal file
246
vendor/github.com/goccy/go-json/internal/encoder/vm_color/util.go
generated
vendored
Normal file
|
@ -0,0 +1,246 @@
|
||||||
|
package vm_color
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||||
|
|
||||||
|
var (
|
||||||
|
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||||
|
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||||
|
mapiterinit = encoder.MapIterInit
|
||||||
|
mapiterkey = encoder.MapIterKey
|
||||||
|
mapitervalue = encoder.MapIterValue
|
||||||
|
mapiternext = encoder.MapIterNext
|
||||||
|
maplen = encoder.MapLen
|
||||||
|
)
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func errUnimplementedOp(op encoder.OpType) error {
|
||||||
|
return fmt.Errorf("encoder: opcode %s has not been implemented", op)
|
||||||
|
}
|
||||||
|
|
||||||
|
func load(base uintptr, idx uint32) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func store(base uintptr, idx uint32, p uintptr) {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUint64(p uintptr) uint64 { return **(**uint64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToPtr(p uintptr) uintptr {
|
||||||
|
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||||
|
}
|
||||||
|
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||||
|
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||||
|
}
|
||||||
|
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||||
|
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: code.Type,
|
||||||
|
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendInt(ctx *encoder.RuntimeContext, b []byte, v uint64, code *encoder.Opcode) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Int
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendInt(ctx, b, v, code)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendUint(ctx *encoder.RuntimeContext, b []byte, v uint64, code *encoder.Opcode) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Uint
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendUint(ctx, b, v, code)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendFloat32(ctx *encoder.RuntimeContext, b []byte, v float32) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Float
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendFloat32(ctx, b, v)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendFloat64(ctx *encoder.RuntimeContext, b []byte, v float64) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Float
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendFloat64(ctx, b, v)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendString(ctx *encoder.RuntimeContext, b []byte, v string) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.String
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendString(ctx, b, v)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendByteSlice(ctx *encoder.RuntimeContext, b []byte, src []byte) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Binary
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendByteSlice(ctx, b, src)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendNumber(ctx *encoder.RuntimeContext, b []byte, n json.Number) ([]byte, error) {
|
||||||
|
format := ctx.Option.ColorScheme.Int
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
bb, err := encoder.AppendNumber(ctx, b, n)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return append(bb, format.Footer...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendBool(ctx *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Bool
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
if v {
|
||||||
|
b = append(b, "true"...)
|
||||||
|
} else {
|
||||||
|
b = append(b, "false"...)
|
||||||
|
}
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendNull(ctx *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Null
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = append(b, "null"...)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = ':'
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapKeyValue(_ *encoder.RuntimeContext, _ *encoder.Opcode, b, key, value []byte) []byte {
|
||||||
|
b = append(b, key[:len(key)-1]...)
|
||||||
|
b = append(b, ':')
|
||||||
|
return append(b, value...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = '}'
|
||||||
|
b = append(b, ',')
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
return encoder.AppendMarshalJSON(ctx, code, b, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
format := ctx.Option.ColorScheme.String
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
bb, err := encoder.AppendMarshalText(ctx, code, b, v)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return append(bb, format.Footer...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayHead(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
return append(b, '[')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = ']'
|
||||||
|
return append(b, ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '[', ']', ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{', '}', ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendObjectEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = '}'
|
||||||
|
return append(b, ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.ObjectKey
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = append(b, code.Key[:len(code.Key)-1]...)
|
||||||
|
b = append(b, format.Footer...)
|
||||||
|
|
||||||
|
return append(b, ':')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructEnd(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte {
|
||||||
|
return append(b, '}', ',')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
if b[last] == ',' {
|
||||||
|
b[last] = '}'
|
||||||
|
return appendComma(ctx, b)
|
||||||
|
}
|
||||||
|
return appendStructEnd(ctx, code, b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func restoreIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, _ uintptr) {}
|
||||||
|
func storeIndent(_ uintptr, _ *encoder.Opcode, _ uintptr) {}
|
||||||
|
func appendMapKeyIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
||||||
|
func appendArrayElemIndent(_ *encoder.RuntimeContext, _ *encoder.Opcode, b []byte) []byte { return b }
|
5043
vendor/github.com/goccy/go-json/internal/encoder/vm_color/vm.go
generated
vendored
Normal file
5043
vendor/github.com/goccy/go-json/internal/encoder/vm_color/vm.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
34
vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/debug_vm.go
generated
vendored
Normal file
34
vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/debug_vm.go
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
package vm_color_indent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
)
|
||||||
|
|
||||||
|
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||||
|
var code *encoder.Opcode
|
||||||
|
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||||
|
code = codeSet.EscapeKeyCode
|
||||||
|
} else {
|
||||||
|
code = codeSet.NoescapeKeyCode
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
fmt.Println("=============[DEBUG]===============")
|
||||||
|
fmt.Println("* [TYPE]")
|
||||||
|
fmt.Println(codeSet.Type)
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [ALL OPCODE]")
|
||||||
|
fmt.Println(code.Dump())
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [CONTEXT]")
|
||||||
|
fmt.Printf("%+v\n", ctx)
|
||||||
|
fmt.Println("===================================")
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return Run(ctx, b, codeSet)
|
||||||
|
}
|
267
vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/util.go
generated
vendored
Normal file
267
vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/util.go
generated
vendored
Normal file
|
@ -0,0 +1,267 @@
|
||||||
|
package vm_color_indent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||||
|
|
||||||
|
var (
|
||||||
|
appendIndent = encoder.AppendIndent
|
||||||
|
appendStructEnd = encoder.AppendStructEndIndent
|
||||||
|
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||||
|
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||||
|
mapiterinit = encoder.MapIterInit
|
||||||
|
mapiterkey = encoder.MapIterKey
|
||||||
|
mapitervalue = encoder.MapIterValue
|
||||||
|
mapiternext = encoder.MapIterNext
|
||||||
|
maplen = encoder.MapLen
|
||||||
|
)
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func errUnimplementedOp(op encoder.OpType) error {
|
||||||
|
return fmt.Errorf("encoder (indent): opcode %s has not been implemented", op)
|
||||||
|
}
|
||||||
|
|
||||||
|
func load(base uintptr, idx uint32) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func store(base uintptr, idx uint32, p uintptr) {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUint64(p uintptr) uint64 { return **(**uint64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToPtr(p uintptr) uintptr {
|
||||||
|
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||||
|
}
|
||||||
|
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||||
|
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||||
|
}
|
||||||
|
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||||
|
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: code.Type,
|
||||||
|
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendInt(ctx *encoder.RuntimeContext, b []byte, v uint64, code *encoder.Opcode) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Int
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendInt(ctx, b, v, code)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendUint(ctx *encoder.RuntimeContext, b []byte, v uint64, code *encoder.Opcode) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Uint
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendUint(ctx, b, v, code)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendFloat32(ctx *encoder.RuntimeContext, b []byte, v float32) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Float
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendFloat32(ctx, b, v)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendFloat64(ctx *encoder.RuntimeContext, b []byte, v float64) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Float
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendFloat64(ctx, b, v)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendString(ctx *encoder.RuntimeContext, b []byte, v string) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.String
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendString(ctx, b, v)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendByteSlice(ctx *encoder.RuntimeContext, b []byte, src []byte) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Binary
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = encoder.AppendByteSlice(ctx, b, src)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendNumber(ctx *encoder.RuntimeContext, b []byte, n json.Number) ([]byte, error) {
|
||||||
|
format := ctx.Option.ColorScheme.Int
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
bb, err := encoder.AppendNumber(ctx, b, n)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return append(bb, format.Footer...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendBool(ctx *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Bool
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
if v {
|
||||||
|
b = append(b, "true"...)
|
||||||
|
} else {
|
||||||
|
b = append(b, "false"...)
|
||||||
|
}
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendNull(ctx *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
format := ctx.Option.ColorScheme.Null
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = append(b, "null"...)
|
||||||
|
return append(b, format.Footer...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ':', ' ')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapKeyValue(ctx *encoder.RuntimeContext, code *encoder.Opcode, b, key, value []byte) []byte {
|
||||||
|
b = appendIndent(ctx, b, code.Indent+1)
|
||||||
|
b = append(b, key...)
|
||||||
|
b[len(b)-2] = ':'
|
||||||
|
b[len(b)-1] = ' '
|
||||||
|
return append(b, value...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = b[:len(b)-2]
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = appendIndent(ctx, b, code.Indent)
|
||||||
|
return append(b, '}', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayHead(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = append(b, '[', '\n')
|
||||||
|
return appendIndent(ctx, b, code.Indent+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = b[:len(b)-2]
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = appendIndent(ctx, b, code.Indent)
|
||||||
|
return append(b, ']', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '[', ']', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{', '}', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendObjectEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = '\n'
|
||||||
|
b = appendIndent(ctx, b, code.Indent-1)
|
||||||
|
return append(b, '}', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
return encoder.AppendMarshalJSONIndent(ctx, code, b, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
format := ctx.Option.ColorScheme.String
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
bb, err := encoder.AppendMarshalTextIndent(ctx, code, b, v)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return append(bb, format.Footer...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = appendIndent(ctx, b, code.Indent)
|
||||||
|
|
||||||
|
format := ctx.Option.ColorScheme.ObjectKey
|
||||||
|
b = append(b, format.Header...)
|
||||||
|
b = append(b, code.Key[:len(code.Key)-1]...)
|
||||||
|
b = append(b, format.Footer...)
|
||||||
|
|
||||||
|
return append(b, ':', ' ')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
if b[last-1] == '{' {
|
||||||
|
b[last] = '}'
|
||||||
|
} else {
|
||||||
|
if b[last] == '\n' {
|
||||||
|
// to remove ',' and '\n' characters
|
||||||
|
b = b[:len(b)-2]
|
||||||
|
}
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = appendIndent(ctx, b, code.Indent-1)
|
||||||
|
b = append(b, '}')
|
||||||
|
}
|
||||||
|
return appendComma(ctx, b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func restoreIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, ctxptr uintptr) {
|
||||||
|
ctx.BaseIndent = uint32(load(ctxptr, code.Length))
|
||||||
|
}
|
||||||
|
|
||||||
|
func storeIndent(ctxptr uintptr, code *encoder.Opcode, indent uintptr) {
|
||||||
|
store(ctxptr, code.Length, indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayElemIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
return appendIndent(ctx, b, code.Indent+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapKeyIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
return appendIndent(ctx, b, code.Indent)
|
||||||
|
}
|
5043
vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/vm.go
generated
vendored
Normal file
5043
vendor/github.com/goccy/go-json/internal/encoder/vm_color_indent/vm.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
34
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/debug_vm.go
generated
vendored
Normal file
34
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/debug_vm.go
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
package vm_indent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
)
|
||||||
|
|
||||||
|
func DebugRun(ctx *encoder.RuntimeContext, b []byte, codeSet *encoder.OpcodeSet) ([]byte, error) {
|
||||||
|
var code *encoder.Opcode
|
||||||
|
if (ctx.Option.Flag & encoder.HTMLEscapeOption) != 0 {
|
||||||
|
code = codeSet.EscapeKeyCode
|
||||||
|
} else {
|
||||||
|
code = codeSet.NoescapeKeyCode
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if err := recover(); err != nil {
|
||||||
|
fmt.Println("=============[DEBUG]===============")
|
||||||
|
fmt.Println("* [TYPE]")
|
||||||
|
fmt.Println(codeSet.Type)
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [ALL OPCODE]")
|
||||||
|
fmt.Println(code.Dump())
|
||||||
|
fmt.Printf("\n")
|
||||||
|
fmt.Println("* [CONTEXT]")
|
||||||
|
fmt.Printf("%+v\n", ctx)
|
||||||
|
fmt.Println("===================================")
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
return Run(ctx, b, codeSet)
|
||||||
|
}
|
9
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/hack.go
generated
vendored
Normal file
9
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/hack.go
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
package vm_indent
|
||||||
|
|
||||||
|
import (
|
||||||
|
// HACK: compile order
|
||||||
|
// `vm`, `vm_indent`, `vm_color`, `vm_color_indent` packages uses a lot of memory to compile,
|
||||||
|
// so forcibly make dependencies and avoid compiling in concurrent.
|
||||||
|
// dependency order: vm => vm_indent => vm_color => vm_color_indent
|
||||||
|
_ "github.com/goccy/go-json/internal/encoder/vm_color"
|
||||||
|
)
|
204
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/util.go
generated
vendored
Normal file
204
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/util.go
generated
vendored
Normal file
|
@ -0,0 +1,204 @@
|
||||||
|
package vm_indent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"unsafe"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
"github.com/goccy/go-json/internal/runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
const uintptrSize = 4 << (^uintptr(0) >> 63)
|
||||||
|
|
||||||
|
var (
|
||||||
|
appendInt = encoder.AppendInt
|
||||||
|
appendUint = encoder.AppendUint
|
||||||
|
appendFloat32 = encoder.AppendFloat32
|
||||||
|
appendFloat64 = encoder.AppendFloat64
|
||||||
|
appendString = encoder.AppendString
|
||||||
|
appendByteSlice = encoder.AppendByteSlice
|
||||||
|
appendNumber = encoder.AppendNumber
|
||||||
|
appendStructEnd = encoder.AppendStructEndIndent
|
||||||
|
appendIndent = encoder.AppendIndent
|
||||||
|
errUnsupportedValue = encoder.ErrUnsupportedValue
|
||||||
|
errUnsupportedFloat = encoder.ErrUnsupportedFloat
|
||||||
|
mapiterinit = encoder.MapIterInit
|
||||||
|
mapiterkey = encoder.MapIterKey
|
||||||
|
mapitervalue = encoder.MapIterValue
|
||||||
|
mapiternext = encoder.MapIterNext
|
||||||
|
maplen = encoder.MapLen
|
||||||
|
)
|
||||||
|
|
||||||
|
type emptyInterface struct {
|
||||||
|
typ *runtime.Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func errUnimplementedOp(op encoder.OpType) error {
|
||||||
|
return fmt.Errorf("encoder (indent): opcode %s has not been implemented", op)
|
||||||
|
}
|
||||||
|
|
||||||
|
func load(base uintptr, idx uint32) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
return **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
}
|
||||||
|
|
||||||
|
func store(base uintptr, idx uint32, p uintptr) {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
**(**uintptr)(unsafe.Pointer(&addr)) = p
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadNPtr(base uintptr, idx uint32, ptrNum uint8) uintptr {
|
||||||
|
addr := base + uintptr(idx)
|
||||||
|
p := **(**uintptr)(unsafe.Pointer(&addr))
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUint64(p uintptr) uint64 { return **(**uint64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat32(p uintptr) float32 { return **(**float32)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToFloat64(p uintptr) float64 { return **(**float64)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBool(p uintptr) bool { return **(**bool)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToBytes(p uintptr) []byte { return **(**[]byte)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToNumber(p uintptr) json.Number { return **(**json.Number)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToString(p uintptr) string { return **(**string)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToSlice(p uintptr) *runtime.SliceHeader { return *(**runtime.SliceHeader)(unsafe.Pointer(&p)) }
|
||||||
|
func ptrToPtr(p uintptr) uintptr {
|
||||||
|
return uintptr(**(**unsafe.Pointer)(unsafe.Pointer(&p)))
|
||||||
|
}
|
||||||
|
func ptrToNPtr(p uintptr, ptrNum uint8) uintptr {
|
||||||
|
for i := uint8(0); i < ptrNum; i++ {
|
||||||
|
if p == 0 {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
p = ptrToPtr(p)
|
||||||
|
}
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
func ptrToUnsafePtr(p uintptr) unsafe.Pointer {
|
||||||
|
return *(*unsafe.Pointer)(unsafe.Pointer(&p))
|
||||||
|
}
|
||||||
|
func ptrToInterface(code *encoder.Opcode, p uintptr) interface{} {
|
||||||
|
return *(*interface{})(unsafe.Pointer(&emptyInterface{
|
||||||
|
typ: code.Type,
|
||||||
|
ptr: *(*unsafe.Pointer)(unsafe.Pointer(&p)),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendBool(_ *encoder.RuntimeContext, b []byte, v bool) []byte {
|
||||||
|
if v {
|
||||||
|
return append(b, "true"...)
|
||||||
|
}
|
||||||
|
return append(b, "false"...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendNull(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, "null"...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendComma(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendColon(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, ':', ' ')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapKeyValue(ctx *encoder.RuntimeContext, code *encoder.Opcode, b, key, value []byte) []byte {
|
||||||
|
b = appendIndent(ctx, b, code.Indent+1)
|
||||||
|
b = append(b, key...)
|
||||||
|
b[len(b)-2] = ':'
|
||||||
|
b[len(b)-1] = ' '
|
||||||
|
return append(b, value...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = b[:len(b)-2]
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = appendIndent(ctx, b, code.Indent)
|
||||||
|
return append(b, '}', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayHead(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = append(b, '[', '\n')
|
||||||
|
return appendIndent(ctx, b, code.Indent+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = b[:len(b)-2]
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = appendIndent(ctx, b, code.Indent)
|
||||||
|
return append(b, ']', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyArray(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '[', ']', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendEmptyObject(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{', '}', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendObjectEnd(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
b[last] = '\n'
|
||||||
|
b = appendIndent(ctx, b, code.Indent-1)
|
||||||
|
return append(b, '}', ',', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalJSON(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
return encoder.AppendMarshalJSONIndent(ctx, code, b, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMarshalText(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte, v interface{}) ([]byte, error) {
|
||||||
|
return encoder.AppendMarshalTextIndent(ctx, code, b, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructHead(_ *encoder.RuntimeContext, b []byte) []byte {
|
||||||
|
return append(b, '{', '\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructKey(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
b = appendIndent(ctx, b, code.Indent)
|
||||||
|
b = append(b, code.Key...)
|
||||||
|
return append(b, ' ')
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendStructEndSkipLast(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
last := len(b) - 1
|
||||||
|
if b[last-1] == '{' {
|
||||||
|
b[last] = '}'
|
||||||
|
} else {
|
||||||
|
if b[last] == '\n' {
|
||||||
|
// to remove ',' and '\n' characters
|
||||||
|
b = b[:len(b)-2]
|
||||||
|
}
|
||||||
|
b = append(b, '\n')
|
||||||
|
b = appendIndent(ctx, b, code.Indent-1)
|
||||||
|
b = append(b, '}')
|
||||||
|
}
|
||||||
|
return appendComma(ctx, b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func restoreIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, ctxptr uintptr) {
|
||||||
|
ctx.BaseIndent = uint32(load(ctxptr, code.Length))
|
||||||
|
}
|
||||||
|
|
||||||
|
func storeIndent(ctxptr uintptr, code *encoder.Opcode, indent uintptr) {
|
||||||
|
store(ctxptr, code.Length, indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendArrayElemIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
return appendIndent(ctx, b, code.Indent+1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func appendMapKeyIndent(ctx *encoder.RuntimeContext, code *encoder.Opcode, b []byte) []byte {
|
||||||
|
return appendIndent(ctx, b, code.Indent)
|
||||||
|
}
|
5043
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/vm.go
generated
vendored
Normal file
5043
vendor/github.com/goccy/go-json/internal/encoder/vm_indent/vm.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
164
vendor/github.com/goccy/go-json/internal/errors/error.go
generated
vendored
Normal file
164
vendor/github.com/goccy/go-json/internal/errors/error.go
generated
vendored
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
package errors
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
type InvalidUTF8Error struct {
|
||||||
|
S string // the whole string value that caused the error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *InvalidUTF8Error) Error() string {
|
||||||
|
return fmt.Sprintf("json: invalid UTF-8 in string: %s", strconv.Quote(e.S))
|
||||||
|
}
|
||||||
|
|
||||||
|
type InvalidUnmarshalError struct {
|
||||||
|
Type reflect.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *InvalidUnmarshalError) Error() string {
|
||||||
|
if e.Type == nil {
|
||||||
|
return "json: Unmarshal(nil)"
|
||||||
|
}
|
||||||
|
|
||||||
|
if e.Type.Kind() != reflect.Ptr {
|
||||||
|
return fmt.Sprintf("json: Unmarshal(non-pointer %s)", e.Type)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("json: Unmarshal(nil %s)", e.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A MarshalerError represents an error from calling a MarshalJSON or MarshalText method.
|
||||||
|
type MarshalerError struct {
|
||||||
|
Type reflect.Type
|
||||||
|
Err error
|
||||||
|
sourceFunc string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *MarshalerError) Error() string {
|
||||||
|
srcFunc := e.sourceFunc
|
||||||
|
if srcFunc == "" {
|
||||||
|
srcFunc = "MarshalJSON"
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("json: error calling %s for type %s: %s", srcFunc, e.Type, e.Err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unwrap returns the underlying error.
|
||||||
|
func (e *MarshalerError) Unwrap() error { return e.Err }
|
||||||
|
|
||||||
|
// A SyntaxError is a description of a JSON syntax error.
|
||||||
|
type SyntaxError struct {
|
||||||
|
msg string // description of error
|
||||||
|
Offset int64 // error occurred after reading Offset bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *SyntaxError) Error() string { return e.msg }
|
||||||
|
|
||||||
|
// An UnmarshalFieldError describes a JSON object key that
|
||||||
|
// led to an unexported (and therefore unwritable) struct field.
|
||||||
|
//
|
||||||
|
// Deprecated: No longer used; kept for compatibility.
|
||||||
|
type UnmarshalFieldError struct {
|
||||||
|
Key string
|
||||||
|
Type reflect.Type
|
||||||
|
Field reflect.StructField
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *UnmarshalFieldError) Error() string {
|
||||||
|
return fmt.Sprintf("json: cannot unmarshal object key %s into unexported field %s of type %s",
|
||||||
|
strconv.Quote(e.Key), e.Field.Name, e.Type.String(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// An UnmarshalTypeError describes a JSON value that was
|
||||||
|
// not appropriate for a value of a specific Go type.
|
||||||
|
type UnmarshalTypeError struct {
|
||||||
|
Value string // description of JSON value - "bool", "array", "number -5"
|
||||||
|
Type reflect.Type // type of Go value it could not be assigned to
|
||||||
|
Offset int64 // error occurred after reading Offset bytes
|
||||||
|
Struct string // name of the struct type containing the field
|
||||||
|
Field string // the full path from root node to the field
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *UnmarshalTypeError) Error() string {
|
||||||
|
if e.Struct != "" || e.Field != "" {
|
||||||
|
return fmt.Sprintf("json: cannot unmarshal %s into Go struct field %s.%s of type %s",
|
||||||
|
e.Value, e.Struct, e.Field, e.Type,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("json: cannot unmarshal %s into Go value of type %s", e.Value, e.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
// An UnsupportedTypeError is returned by Marshal when attempting
|
||||||
|
// to encode an unsupported value type.
|
||||||
|
type UnsupportedTypeError struct {
|
||||||
|
Type reflect.Type
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *UnsupportedTypeError) Error() string {
|
||||||
|
return fmt.Sprintf("json: unsupported type: %s", e.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
type UnsupportedValueError struct {
|
||||||
|
Value reflect.Value
|
||||||
|
Str string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *UnsupportedValueError) Error() string {
|
||||||
|
return fmt.Sprintf("json: unsupported value: %s", e.Str)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrSyntax(msg string, offset int64) *SyntaxError {
|
||||||
|
return &SyntaxError{msg: msg, Offset: offset}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrMarshaler(typ reflect.Type, err error, msg string) *MarshalerError {
|
||||||
|
return &MarshalerError{
|
||||||
|
Type: typ,
|
||||||
|
Err: err,
|
||||||
|
sourceFunc: msg,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrExceededMaxDepth(c byte, cursor int64) *SyntaxError {
|
||||||
|
return &SyntaxError{
|
||||||
|
msg: fmt.Sprintf(`invalid character "%c" exceeded max depth`, c),
|
||||||
|
Offset: cursor,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrNotAtBeginningOfValue(cursor int64) *SyntaxError {
|
||||||
|
return &SyntaxError{msg: "not at beginning of value", Offset: cursor}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrUnexpectedEndOfJSON(msg string, cursor int64) *SyntaxError {
|
||||||
|
return &SyntaxError{
|
||||||
|
msg: fmt.Sprintf("json: %s unexpected end of JSON input", msg),
|
||||||
|
Offset: cursor,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrExpected(msg string, cursor int64) *SyntaxError {
|
||||||
|
return &SyntaxError{msg: fmt.Sprintf("expected %s", msg), Offset: cursor}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrInvalidCharacter(c byte, context string, cursor int64) *SyntaxError {
|
||||||
|
if c == 0 {
|
||||||
|
return &SyntaxError{
|
||||||
|
msg: fmt.Sprintf("json: invalid character as %s", context),
|
||||||
|
Offset: cursor,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &SyntaxError{
|
||||||
|
msg: fmt.Sprintf("json: invalid character %c as %s", c, context),
|
||||||
|
Offset: cursor,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func ErrInvalidBeginningOfValue(c byte, cursor int64) *SyntaxError {
|
||||||
|
return &SyntaxError{
|
||||||
|
msg: fmt.Sprintf("invalid character '%c' looking for beginning of value", c),
|
||||||
|
Offset: cursor,
|
||||||
|
}
|
||||||
|
}
|
263
vendor/github.com/goccy/go-json/internal/runtime/rtype.go
generated
vendored
Normal file
263
vendor/github.com/goccy/go-json/internal/runtime/rtype.go
generated
vendored
Normal file
|
@ -0,0 +1,263 @@
|
||||||
|
package runtime
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Type representing reflect.rtype for noescape trick
|
||||||
|
type Type struct{}
|
||||||
|
|
||||||
|
//go:linkname rtype_Align reflect.(*rtype).Align
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Align(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) Align() int {
|
||||||
|
return rtype_Align(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_FieldAlign reflect.(*rtype).FieldAlign
|
||||||
|
//go:noescape
|
||||||
|
func rtype_FieldAlign(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) FieldAlign() int {
|
||||||
|
return rtype_FieldAlign(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Method reflect.(*rtype).Method
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Method(*Type, int) reflect.Method
|
||||||
|
|
||||||
|
func (t *Type) Method(a0 int) reflect.Method {
|
||||||
|
return rtype_Method(t, a0)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_MethodByName reflect.(*rtype).MethodByName
|
||||||
|
//go:noescape
|
||||||
|
func rtype_MethodByName(*Type, string) (reflect.Method, bool)
|
||||||
|
|
||||||
|
func (t *Type) MethodByName(a0 string) (reflect.Method, bool) {
|
||||||
|
return rtype_MethodByName(t, a0)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_NumMethod reflect.(*rtype).NumMethod
|
||||||
|
//go:noescape
|
||||||
|
func rtype_NumMethod(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) NumMethod() int {
|
||||||
|
return rtype_NumMethod(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Name reflect.(*rtype).Name
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Name(*Type) string
|
||||||
|
|
||||||
|
func (t *Type) Name() string {
|
||||||
|
return rtype_Name(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_PkgPath reflect.(*rtype).PkgPath
|
||||||
|
//go:noescape
|
||||||
|
func rtype_PkgPath(*Type) string
|
||||||
|
|
||||||
|
func (t *Type) PkgPath() string {
|
||||||
|
return rtype_PkgPath(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Size reflect.(*rtype).Size
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Size(*Type) uintptr
|
||||||
|
|
||||||
|
func (t *Type) Size() uintptr {
|
||||||
|
return rtype_Size(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_String reflect.(*rtype).String
|
||||||
|
//go:noescape
|
||||||
|
func rtype_String(*Type) string
|
||||||
|
|
||||||
|
func (t *Type) String() string {
|
||||||
|
return rtype_String(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Kind reflect.(*rtype).Kind
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Kind(*Type) reflect.Kind
|
||||||
|
|
||||||
|
func (t *Type) Kind() reflect.Kind {
|
||||||
|
return rtype_Kind(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Implements reflect.(*rtype).Implements
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Implements(*Type, reflect.Type) bool
|
||||||
|
|
||||||
|
func (t *Type) Implements(u reflect.Type) bool {
|
||||||
|
return rtype_Implements(t, u)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_AssignableTo reflect.(*rtype).AssignableTo
|
||||||
|
//go:noescape
|
||||||
|
func rtype_AssignableTo(*Type, reflect.Type) bool
|
||||||
|
|
||||||
|
func (t *Type) AssignableTo(u reflect.Type) bool {
|
||||||
|
return rtype_AssignableTo(t, u)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_ConvertibleTo reflect.(*rtype).ConvertibleTo
|
||||||
|
//go:noescape
|
||||||
|
func rtype_ConvertibleTo(*Type, reflect.Type) bool
|
||||||
|
|
||||||
|
func (t *Type) ConvertibleTo(u reflect.Type) bool {
|
||||||
|
return rtype_ConvertibleTo(t, u)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Comparable reflect.(*rtype).Comparable
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Comparable(*Type) bool
|
||||||
|
|
||||||
|
func (t *Type) Comparable() bool {
|
||||||
|
return rtype_Comparable(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Bits reflect.(*rtype).Bits
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Bits(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) Bits() int {
|
||||||
|
return rtype_Bits(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_ChanDir reflect.(*rtype).ChanDir
|
||||||
|
//go:noescape
|
||||||
|
func rtype_ChanDir(*Type) reflect.ChanDir
|
||||||
|
|
||||||
|
func (t *Type) ChanDir() reflect.ChanDir {
|
||||||
|
return rtype_ChanDir(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_IsVariadic reflect.(*rtype).IsVariadic
|
||||||
|
//go:noescape
|
||||||
|
func rtype_IsVariadic(*Type) bool
|
||||||
|
|
||||||
|
func (t *Type) IsVariadic() bool {
|
||||||
|
return rtype_IsVariadic(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Elem reflect.(*rtype).Elem
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Elem(*Type) reflect.Type
|
||||||
|
|
||||||
|
func (t *Type) Elem() *Type {
|
||||||
|
return Type2RType(rtype_Elem(t))
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Field reflect.(*rtype).Field
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Field(*Type, int) reflect.StructField
|
||||||
|
|
||||||
|
func (t *Type) Field(i int) reflect.StructField {
|
||||||
|
return rtype_Field(t, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_FieldByIndex reflect.(*rtype).FieldByIndex
|
||||||
|
//go:noescape
|
||||||
|
func rtype_FieldByIndex(*Type, []int) reflect.StructField
|
||||||
|
|
||||||
|
func (t *Type) FieldByIndex(index []int) reflect.StructField {
|
||||||
|
return rtype_FieldByIndex(t, index)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_FieldByName reflect.(*rtype).FieldByName
|
||||||
|
//go:noescape
|
||||||
|
func rtype_FieldByName(*Type, string) (reflect.StructField, bool)
|
||||||
|
|
||||||
|
func (t *Type) FieldByName(name string) (reflect.StructField, bool) {
|
||||||
|
return rtype_FieldByName(t, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_FieldByNameFunc reflect.(*rtype).FieldByNameFunc
|
||||||
|
//go:noescape
|
||||||
|
func rtype_FieldByNameFunc(*Type, func(string) bool) (reflect.StructField, bool)
|
||||||
|
|
||||||
|
func (t *Type) FieldByNameFunc(match func(string) bool) (reflect.StructField, bool) {
|
||||||
|
return rtype_FieldByNameFunc(t, match)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_In reflect.(*rtype).In
|
||||||
|
//go:noescape
|
||||||
|
func rtype_In(*Type, int) reflect.Type
|
||||||
|
|
||||||
|
func (t *Type) In(i int) reflect.Type {
|
||||||
|
return rtype_In(t, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Key reflect.(*rtype).Key
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Key(*Type) reflect.Type
|
||||||
|
|
||||||
|
func (t *Type) Key() *Type {
|
||||||
|
return Type2RType(rtype_Key(t))
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Len reflect.(*rtype).Len
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Len(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) Len() int {
|
||||||
|
return rtype_Len(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_NumField reflect.(*rtype).NumField
|
||||||
|
//go:noescape
|
||||||
|
func rtype_NumField(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) NumField() int {
|
||||||
|
return rtype_NumField(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_NumIn reflect.(*rtype).NumIn
|
||||||
|
//go:noescape
|
||||||
|
func rtype_NumIn(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) NumIn() int {
|
||||||
|
return rtype_NumIn(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_NumOut reflect.(*rtype).NumOut
|
||||||
|
//go:noescape
|
||||||
|
func rtype_NumOut(*Type) int
|
||||||
|
|
||||||
|
func (t *Type) NumOut() int {
|
||||||
|
return rtype_NumOut(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname rtype_Out reflect.(*rtype).Out
|
||||||
|
//go:noescape
|
||||||
|
func rtype_Out(*Type, int) reflect.Type
|
||||||
|
|
||||||
|
//go:linkname PtrTo reflect.(*rtype).ptrTo
|
||||||
|
//go:noescape
|
||||||
|
func PtrTo(*Type) *Type
|
||||||
|
|
||||||
|
func (t *Type) Out(i int) reflect.Type {
|
||||||
|
return rtype_Out(t, i)
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:linkname IfaceIndir reflect.ifaceIndir
|
||||||
|
//go:noescape
|
||||||
|
func IfaceIndir(*Type) bool
|
||||||
|
|
||||||
|
//go:linkname RType2Type reflect.toType
|
||||||
|
//go:noescape
|
||||||
|
func RType2Type(t *Type) reflect.Type
|
||||||
|
|
||||||
|
//go:nolint structcheck
|
||||||
|
type emptyInterface struct {
|
||||||
|
_ *Type
|
||||||
|
ptr unsafe.Pointer
|
||||||
|
}
|
||||||
|
|
||||||
|
func Type2RType(t reflect.Type) *Type {
|
||||||
|
return (*Type)(((*emptyInterface)(unsafe.Pointer(&t))).ptr)
|
||||||
|
}
|
87
vendor/github.com/goccy/go-json/internal/runtime/struct_field.go
generated
vendored
Normal file
87
vendor/github.com/goccy/go-json/internal/runtime/struct_field.go
generated
vendored
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
package runtime
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
)
|
||||||
|
|
||||||
|
func getTag(field reflect.StructField) string {
|
||||||
|
return field.Tag.Get("json")
|
||||||
|
}
|
||||||
|
|
||||||
|
func IsIgnoredStructField(field reflect.StructField) bool {
|
||||||
|
if field.PkgPath != "" {
|
||||||
|
if field.Anonymous {
|
||||||
|
if !(field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct) && field.Type.Kind() != reflect.Struct {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// private field
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tag := getTag(field)
|
||||||
|
return tag == "-"
|
||||||
|
}
|
||||||
|
|
||||||
|
type StructTag struct {
|
||||||
|
Key string
|
||||||
|
IsTaggedKey bool
|
||||||
|
IsOmitEmpty bool
|
||||||
|
IsString bool
|
||||||
|
Field reflect.StructField
|
||||||
|
}
|
||||||
|
|
||||||
|
type StructTags []*StructTag
|
||||||
|
|
||||||
|
func (t StructTags) ExistsKey(key string) bool {
|
||||||
|
for _, tt := range t {
|
||||||
|
if tt.Key == key {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isValidTag(s string) bool {
|
||||||
|
if s == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, c := range s {
|
||||||
|
switch {
|
||||||
|
case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
|
||||||
|
// Backslash and quote chars are reserved, but
|
||||||
|
// otherwise any punctuation chars are allowed
|
||||||
|
// in a tag name.
|
||||||
|
case !unicode.IsLetter(c) && !unicode.IsDigit(c):
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func StructTagFromField(field reflect.StructField) *StructTag {
|
||||||
|
keyName := field.Name
|
||||||
|
tag := getTag(field)
|
||||||
|
st := &StructTag{Field: field}
|
||||||
|
opts := strings.Split(tag, ",")
|
||||||
|
if len(opts) > 0 {
|
||||||
|
if opts[0] != "" && isValidTag(opts[0]) {
|
||||||
|
keyName = opts[0]
|
||||||
|
st.IsTaggedKey = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
st.Key = keyName
|
||||||
|
if len(opts) > 1 {
|
||||||
|
for _, opt := range opts[1:] {
|
||||||
|
switch opt {
|
||||||
|
case "omitempty":
|
||||||
|
st.IsOmitEmpty = true
|
||||||
|
case "string":
|
||||||
|
st.IsString = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return st
|
||||||
|
}
|
100
vendor/github.com/goccy/go-json/internal/runtime/type.go
generated
vendored
Normal file
100
vendor/github.com/goccy/go-json/internal/runtime/type.go
generated
vendored
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
package runtime
|
||||||
|
|
||||||
|
import (
|
||||||
|
"reflect"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SliceHeader struct {
|
||||||
|
Data unsafe.Pointer
|
||||||
|
Len int
|
||||||
|
Cap int
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxAcceptableTypeAddrRange = 1024 * 1024 * 2 // 2 Mib
|
||||||
|
)
|
||||||
|
|
||||||
|
type TypeAddr struct {
|
||||||
|
BaseTypeAddr uintptr
|
||||||
|
MaxTypeAddr uintptr
|
||||||
|
AddrRange uintptr
|
||||||
|
AddrShift uintptr
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
typeAddr *TypeAddr
|
||||||
|
alreadyAnalyzed bool
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:linkname typelinks reflect.typelinks
|
||||||
|
func typelinks() ([]unsafe.Pointer, [][]int32)
|
||||||
|
|
||||||
|
//go:linkname rtypeOff reflect.rtypeOff
|
||||||
|
func rtypeOff(unsafe.Pointer, int32) unsafe.Pointer
|
||||||
|
|
||||||
|
func AnalyzeTypeAddr() *TypeAddr {
|
||||||
|
defer func() {
|
||||||
|
alreadyAnalyzed = true
|
||||||
|
}()
|
||||||
|
if alreadyAnalyzed {
|
||||||
|
return typeAddr
|
||||||
|
}
|
||||||
|
sections, offsets := typelinks()
|
||||||
|
if len(sections) != 1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if len(offsets) != 1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
section := sections[0]
|
||||||
|
offset := offsets[0]
|
||||||
|
var (
|
||||||
|
min uintptr = uintptr(^uint(0))
|
||||||
|
max uintptr = 0
|
||||||
|
isAligned64 = true
|
||||||
|
isAligned32 = true
|
||||||
|
)
|
||||||
|
for i := 0; i < len(offset); i++ {
|
||||||
|
typ := (*Type)(rtypeOff(section, offset[i]))
|
||||||
|
addr := uintptr(unsafe.Pointer(typ))
|
||||||
|
if min > addr {
|
||||||
|
min = addr
|
||||||
|
}
|
||||||
|
if max < addr {
|
||||||
|
max = addr
|
||||||
|
}
|
||||||
|
if typ.Kind() == reflect.Ptr {
|
||||||
|
addr = uintptr(unsafe.Pointer(typ.Elem()))
|
||||||
|
if min > addr {
|
||||||
|
min = addr
|
||||||
|
}
|
||||||
|
if max < addr {
|
||||||
|
max = addr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
isAligned64 = isAligned64 && (addr-min)&63 == 0
|
||||||
|
isAligned32 = isAligned32 && (addr-min)&31 == 0
|
||||||
|
}
|
||||||
|
addrRange := max - min
|
||||||
|
if addrRange == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var addrShift uintptr
|
||||||
|
if isAligned64 {
|
||||||
|
addrShift = 6
|
||||||
|
} else if isAligned32 {
|
||||||
|
addrShift = 5
|
||||||
|
}
|
||||||
|
cacheSize := addrRange >> addrShift
|
||||||
|
if cacheSize > maxAcceptableTypeAddrRange {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
typeAddr = &TypeAddr{
|
||||||
|
BaseTypeAddr: min,
|
||||||
|
MaxTypeAddr: max,
|
||||||
|
AddrRange: addrRange,
|
||||||
|
AddrShift: addrShift,
|
||||||
|
}
|
||||||
|
return typeAddr
|
||||||
|
}
|
366
vendor/github.com/goccy/go-json/json.go
generated
vendored
Normal file
366
vendor/github.com/goccy/go-json/json.go
generated
vendored
Normal file
|
@ -0,0 +1,366 @@
|
||||||
|
package json
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Marshaler is the interface implemented by types that
|
||||||
|
// can marshal themselves into valid JSON.
|
||||||
|
type Marshaler interface {
|
||||||
|
MarshalJSON() ([]byte, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalerContext is the interface implemented by types that
|
||||||
|
// can marshal themselves into valid JSON with context.Context.
|
||||||
|
type MarshalerContext interface {
|
||||||
|
MarshalJSON(context.Context) ([]byte, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshaler is the interface implemented by types
|
||||||
|
// that can unmarshal a JSON description of themselves.
|
||||||
|
// The input can be assumed to be a valid encoding of
|
||||||
|
// a JSON value. UnmarshalJSON must copy the JSON data
|
||||||
|
// if it wishes to retain the data after returning.
|
||||||
|
//
|
||||||
|
// By convention, to approximate the behavior of Unmarshal itself,
|
||||||
|
// Unmarshalers implement UnmarshalJSON([]byte("null")) as a no-op.
|
||||||
|
type Unmarshaler interface {
|
||||||
|
UnmarshalJSON([]byte) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalerContext is the interface implemented by types
|
||||||
|
// that can unmarshal with context.Context a JSON description of themselves.
|
||||||
|
type UnmarshalerContext interface {
|
||||||
|
UnmarshalJSON(context.Context, []byte) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal returns the JSON encoding of v.
|
||||||
|
//
|
||||||
|
// Marshal traverses the value v recursively.
|
||||||
|
// If an encountered value implements the Marshaler interface
|
||||||
|
// and is not a nil pointer, Marshal calls its MarshalJSON method
|
||||||
|
// to produce JSON. If no MarshalJSON method is present but the
|
||||||
|
// value implements encoding.TextMarshaler instead, Marshal calls
|
||||||
|
// its MarshalText method and encodes the result as a JSON string.
|
||||||
|
// The nil pointer exception is not strictly necessary
|
||||||
|
// but mimics a similar, necessary exception in the behavior of
|
||||||
|
// UnmarshalJSON.
|
||||||
|
//
|
||||||
|
// Otherwise, Marshal uses the following type-dependent default encodings:
|
||||||
|
//
|
||||||
|
// Boolean values encode as JSON booleans.
|
||||||
|
//
|
||||||
|
// Floating point, integer, and Number values encode as JSON numbers.
|
||||||
|
//
|
||||||
|
// String values encode as JSON strings coerced to valid UTF-8,
|
||||||
|
// replacing invalid bytes with the Unicode replacement rune.
|
||||||
|
// The angle brackets "<" and ">" are escaped to "\u003c" and "\u003e"
|
||||||
|
// to keep some browsers from misinterpreting JSON output as HTML.
|
||||||
|
// Ampersand "&" is also escaped to "\u0026" for the same reason.
|
||||||
|
// This escaping can be disabled using an Encoder that had SetEscapeHTML(false)
|
||||||
|
// called on it.
|
||||||
|
//
|
||||||
|
// Array and slice values encode as JSON arrays, except that
|
||||||
|
// []byte encodes as a base64-encoded string, and a nil slice
|
||||||
|
// encodes as the null JSON value.
|
||||||
|
//
|
||||||
|
// Struct values encode as JSON objects.
|
||||||
|
// Each exported struct field becomes a member of the object, using the
|
||||||
|
// field name as the object key, unless the field is omitted for one of the
|
||||||
|
// reasons given below.
|
||||||
|
//
|
||||||
|
// The encoding of each struct field can be customized by the format string
|
||||||
|
// stored under the "json" key in the struct field's tag.
|
||||||
|
// The format string gives the name of the field, possibly followed by a
|
||||||
|
// comma-separated list of options. The name may be empty in order to
|
||||||
|
// specify options without overriding the default field name.
|
||||||
|
//
|
||||||
|
// The "omitempty" option specifies that the field should be omitted
|
||||||
|
// from the encoding if the field has an empty value, defined as
|
||||||
|
// false, 0, a nil pointer, a nil interface value, and any empty array,
|
||||||
|
// slice, map, or string.
|
||||||
|
//
|
||||||
|
// As a special case, if the field tag is "-", the field is always omitted.
|
||||||
|
// Note that a field with name "-" can still be generated using the tag "-,".
|
||||||
|
//
|
||||||
|
// Examples of struct field tags and their meanings:
|
||||||
|
//
|
||||||
|
// // Field appears in JSON as key "myName".
|
||||||
|
// Field int `json:"myName"`
|
||||||
|
//
|
||||||
|
// // Field appears in JSON as key "myName" and
|
||||||
|
// // the field is omitted from the object if its value is empty,
|
||||||
|
// // as defined above.
|
||||||
|
// Field int `json:"myName,omitempty"`
|
||||||
|
//
|
||||||
|
// // Field appears in JSON as key "Field" (the default), but
|
||||||
|
// // the field is skipped if empty.
|
||||||
|
// // Note the leading comma.
|
||||||
|
// Field int `json:",omitempty"`
|
||||||
|
//
|
||||||
|
// // Field is ignored by this package.
|
||||||
|
// Field int `json:"-"`
|
||||||
|
//
|
||||||
|
// // Field appears in JSON as key "-".
|
||||||
|
// Field int `json:"-,"`
|
||||||
|
//
|
||||||
|
// The "string" option signals that a field is stored as JSON inside a
|
||||||
|
// JSON-encoded string. It applies only to fields of string, floating point,
|
||||||
|
// integer, or boolean types. This extra level of encoding is sometimes used
|
||||||
|
// when communicating with JavaScript programs:
|
||||||
|
//
|
||||||
|
// Int64String int64 `json:",string"`
|
||||||
|
//
|
||||||
|
// The key name will be used if it's a non-empty string consisting of
|
||||||
|
// only Unicode letters, digits, and ASCII punctuation except quotation
|
||||||
|
// marks, backslash, and comma.
|
||||||
|
//
|
||||||
|
// Anonymous struct fields are usually marshaled as if their inner exported fields
|
||||||
|
// were fields in the outer struct, subject to the usual Go visibility rules amended
|
||||||
|
// as described in the next paragraph.
|
||||||
|
// An anonymous struct field with a name given in its JSON tag is treated as
|
||||||
|
// having that name, rather than being anonymous.
|
||||||
|
// An anonymous struct field of interface type is treated the same as having
|
||||||
|
// that type as its name, rather than being anonymous.
|
||||||
|
//
|
||||||
|
// The Go visibility rules for struct fields are amended for JSON when
|
||||||
|
// deciding which field to marshal or unmarshal. If there are
|
||||||
|
// multiple fields at the same level, and that level is the least
|
||||||
|
// nested (and would therefore be the nesting level selected by the
|
||||||
|
// usual Go rules), the following extra rules apply:
|
||||||
|
//
|
||||||
|
// 1) Of those fields, if any are JSON-tagged, only tagged fields are considered,
|
||||||
|
// even if there are multiple untagged fields that would otherwise conflict.
|
||||||
|
//
|
||||||
|
// 2) If there is exactly one field (tagged or not according to the first rule), that is selected.
|
||||||
|
//
|
||||||
|
// 3) Otherwise there are multiple fields, and all are ignored; no error occurs.
|
||||||
|
//
|
||||||
|
// Handling of anonymous struct fields is new in Go 1.1.
|
||||||
|
// Prior to Go 1.1, anonymous struct fields were ignored. To force ignoring of
|
||||||
|
// an anonymous struct field in both current and earlier versions, give the field
|
||||||
|
// a JSON tag of "-".
|
||||||
|
//
|
||||||
|
// Map values encode as JSON objects. The map's key type must either be a
|
||||||
|
// string, an integer type, or implement encoding.TextMarshaler. The map keys
|
||||||
|
// are sorted and used as JSON object keys by applying the following rules,
|
||||||
|
// subject to the UTF-8 coercion described for string values above:
|
||||||
|
// - string keys are used directly
|
||||||
|
// - encoding.TextMarshalers are marshaled
|
||||||
|
// - integer keys are converted to strings
|
||||||
|
//
|
||||||
|
// Pointer values encode as the value pointed to.
|
||||||
|
// A nil pointer encodes as the null JSON value.
|
||||||
|
//
|
||||||
|
// Interface values encode as the value contained in the interface.
|
||||||
|
// A nil interface value encodes as the null JSON value.
|
||||||
|
//
|
||||||
|
// Channel, complex, and function values cannot be encoded in JSON.
|
||||||
|
// Attempting to encode such a value causes Marshal to return
|
||||||
|
// an UnsupportedTypeError.
|
||||||
|
//
|
||||||
|
// JSON cannot represent cyclic data structures and Marshal does not
|
||||||
|
// handle them. Passing cyclic structures to Marshal will result in
|
||||||
|
// an infinite recursion.
|
||||||
|
//
|
||||||
|
func Marshal(v interface{}) ([]byte, error) {
|
||||||
|
return MarshalWithOption(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalNoEscape returns the JSON encoding of v and doesn't escape v.
|
||||||
|
func MarshalNoEscape(v interface{}) ([]byte, error) {
|
||||||
|
return marshalNoEscape(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalContext returns the JSON encoding of v with context.Context and EncodeOption.
|
||||||
|
func MarshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||||
|
return marshalContext(ctx, v, optFuncs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalWithOption returns the JSON encoding of v with EncodeOption.
|
||||||
|
func MarshalWithOption(v interface{}, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||||
|
return marshal(v, optFuncs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalIndent is like Marshal but applies Indent to format the output.
|
||||||
|
// Each JSON element in the output will begin on a new line beginning with prefix
|
||||||
|
// followed by one or more copies of indent according to the indentation nesting.
|
||||||
|
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||||
|
return MarshalIndentWithOption(v, prefix, indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalIndentWithOption is like Marshal but applies Indent to format the output with EncodeOption.
|
||||||
|
func MarshalIndentWithOption(v interface{}, prefix, indent string, optFuncs ...EncodeOptionFunc) ([]byte, error) {
|
||||||
|
return marshalIndent(v, prefix, indent, optFuncs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unmarshal parses the JSON-encoded data and stores the result
|
||||||
|
// in the value pointed to by v. If v is nil or not a pointer,
|
||||||
|
// Unmarshal returns an InvalidUnmarshalError.
|
||||||
|
//
|
||||||
|
// Unmarshal uses the inverse of the encodings that
|
||||||
|
// Marshal uses, allocating maps, slices, and pointers as necessary,
|
||||||
|
// with the following additional rules:
|
||||||
|
//
|
||||||
|
// To unmarshal JSON into a pointer, Unmarshal first handles the case of
|
||||||
|
// the JSON being the JSON literal null. In that case, Unmarshal sets
|
||||||
|
// the pointer to nil. Otherwise, Unmarshal unmarshals the JSON into
|
||||||
|
// the value pointed at by the pointer. If the pointer is nil, Unmarshal
|
||||||
|
// allocates a new value for it to point to.
|
||||||
|
//
|
||||||
|
// To unmarshal JSON into a value implementing the Unmarshaler interface,
|
||||||
|
// Unmarshal calls that value's UnmarshalJSON method, including
|
||||||
|
// when the input is a JSON null.
|
||||||
|
// Otherwise, if the value implements encoding.TextUnmarshaler
|
||||||
|
// and the input is a JSON quoted string, Unmarshal calls that value's
|
||||||
|
// UnmarshalText method with the unquoted form of the string.
|
||||||
|
//
|
||||||
|
// To unmarshal JSON into a struct, Unmarshal matches incoming object
|
||||||
|
// keys to the keys used by Marshal (either the struct field name or its tag),
|
||||||
|
// preferring an exact match but also accepting a case-insensitive match. By
|
||||||
|
// default, object keys which don't have a corresponding struct field are
|
||||||
|
// ignored (see Decoder.DisallowUnknownFields for an alternative).
|
||||||
|
//
|
||||||
|
// To unmarshal JSON into an interface value,
|
||||||
|
// Unmarshal stores one of these in the interface value:
|
||||||
|
//
|
||||||
|
// bool, for JSON booleans
|
||||||
|
// float64, for JSON numbers
|
||||||
|
// string, for JSON strings
|
||||||
|
// []interface{}, for JSON arrays
|
||||||
|
// map[string]interface{}, for JSON objects
|
||||||
|
// nil for JSON null
|
||||||
|
//
|
||||||
|
// To unmarshal a JSON array into a slice, Unmarshal resets the slice length
|
||||||
|
// to zero and then appends each element to the slice.
|
||||||
|
// As a special case, to unmarshal an empty JSON array into a slice,
|
||||||
|
// Unmarshal replaces the slice with a new empty slice.
|
||||||
|
//
|
||||||
|
// To unmarshal a JSON array into a Go array, Unmarshal decodes
|
||||||
|
// JSON array elements into corresponding Go array elements.
|
||||||
|
// If the Go array is smaller than the JSON array,
|
||||||
|
// the additional JSON array elements are discarded.
|
||||||
|
// If the JSON array is smaller than the Go array,
|
||||||
|
// the additional Go array elements are set to zero values.
|
||||||
|
//
|
||||||
|
// To unmarshal a JSON object into a map, Unmarshal first establishes a map to
|
||||||
|
// use. If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal
|
||||||
|
// reuses the existing map, keeping existing entries. Unmarshal then stores
|
||||||
|
// key-value pairs from the JSON object into the map. The map's key type must
|
||||||
|
// either be any string type, an integer, implement json.Unmarshaler, or
|
||||||
|
// implement encoding.TextUnmarshaler.
|
||||||
|
//
|
||||||
|
// If a JSON value is not appropriate for a given target type,
|
||||||
|
// or if a JSON number overflows the target type, Unmarshal
|
||||||
|
// skips that field and completes the unmarshaling as best it can.
|
||||||
|
// If no more serious errors are encountered, Unmarshal returns
|
||||||
|
// an UnmarshalTypeError describing the earliest such error. In any
|
||||||
|
// case, it's not guaranteed that all the remaining fields following
|
||||||
|
// the problematic one will be unmarshaled into the target object.
|
||||||
|
//
|
||||||
|
// The JSON null value unmarshals into an interface, map, pointer, or slice
|
||||||
|
// by setting that Go value to nil. Because null is often used in JSON to mean
|
||||||
|
// ``not present,'' unmarshaling a JSON null into any other Go type has no effect
|
||||||
|
// on the value and produces no error.
|
||||||
|
//
|
||||||
|
// When unmarshaling quoted strings, invalid UTF-8 or
|
||||||
|
// invalid UTF-16 surrogate pairs are not treated as an error.
|
||||||
|
// Instead, they are replaced by the Unicode replacement
|
||||||
|
// character U+FFFD.
|
||||||
|
//
|
||||||
|
func Unmarshal(data []byte, v interface{}) error {
|
||||||
|
return unmarshal(data, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalContext parses the JSON-encoded data and stores the result
|
||||||
|
// in the value pointed to by v. If you implement the UnmarshalerContext interface,
|
||||||
|
// call it with ctx as an argument.
|
||||||
|
func UnmarshalContext(ctx context.Context, data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||||
|
return unmarshalContext(ctx, data, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
func UnmarshalWithOption(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||||
|
return unmarshal(data, v, optFuncs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func UnmarshalNoEscape(data []byte, v interface{}, optFuncs ...DecodeOptionFunc) error {
|
||||||
|
return unmarshalNoEscape(data, v, optFuncs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// A Token holds a value of one of these types:
|
||||||
|
//
|
||||||
|
// Delim, for the four JSON delimiters [ ] { }
|
||||||
|
// bool, for JSON booleans
|
||||||
|
// float64, for JSON numbers
|
||||||
|
// Number, for JSON numbers
|
||||||
|
// string, for JSON string literals
|
||||||
|
// nil, for JSON null
|
||||||
|
//
|
||||||
|
type Token = json.Token
|
||||||
|
|
||||||
|
// A Number represents a JSON number literal.
|
||||||
|
type Number = json.Number
|
||||||
|
|
||||||
|
// RawMessage is a raw encoded JSON value.
|
||||||
|
// It implements Marshaler and Unmarshaler and can
|
||||||
|
// be used to delay JSON decoding or precompute a JSON encoding.
|
||||||
|
type RawMessage = json.RawMessage
|
||||||
|
|
||||||
|
// A Delim is a JSON array or object delimiter, one of [ ] { or }.
|
||||||
|
type Delim = json.Delim
|
||||||
|
|
||||||
|
// Compact appends to dst the JSON-encoded src with
|
||||||
|
// insignificant space characters elided.
|
||||||
|
func Compact(dst *bytes.Buffer, src []byte) error {
|
||||||
|
return encoder.Compact(dst, src, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Indent appends to dst an indented form of the JSON-encoded src.
|
||||||
|
// Each element in a JSON object or array begins on a new,
|
||||||
|
// indented line beginning with prefix followed by one or more
|
||||||
|
// copies of indent according to the indentation nesting.
|
||||||
|
// The data appended to dst does not begin with the prefix nor
|
||||||
|
// any indentation, to make it easier to embed inside other formatted JSON data.
|
||||||
|
// Although leading space characters (space, tab, carriage return, newline)
|
||||||
|
// at the beginning of src are dropped, trailing space characters
|
||||||
|
// at the end of src are preserved and copied to dst.
|
||||||
|
// For example, if src has no trailing spaces, neither will dst;
|
||||||
|
// if src ends in a trailing newline, so will dst.
|
||||||
|
func Indent(dst *bytes.Buffer, src []byte, prefix, indent string) error {
|
||||||
|
return encoder.Indent(dst, src, prefix, indent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
|
||||||
|
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
|
||||||
|
// so that the JSON will be safe to embed inside HTML <script> tags.
|
||||||
|
// For historical reasons, web browsers don't honor standard HTML
|
||||||
|
// escaping within <script> tags, so an alternative JSON encoding must
|
||||||
|
// be used.
|
||||||
|
func HTMLEscape(dst *bytes.Buffer, src []byte) {
|
||||||
|
var v interface{}
|
||||||
|
dec := NewDecoder(bytes.NewBuffer(src))
|
||||||
|
dec.UseNumber()
|
||||||
|
if err := dec.Decode(&v); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
buf, _ := marshal(v)
|
||||||
|
dst.Write(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valid reports whether data is a valid JSON encoding.
|
||||||
|
func Valid(data []byte) bool {
|
||||||
|
var v interface{}
|
||||||
|
decoder := NewDecoder(bytes.NewReader(data))
|
||||||
|
err := decoder.Decode(&v)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !decoder.More() {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return decoder.InputOffset() >= int64(len(data))
|
||||||
|
}
|
46
vendor/github.com/goccy/go-json/option.go
generated
vendored
Normal file
46
vendor/github.com/goccy/go-json/option.go
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
package json
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/goccy/go-json/internal/decoder"
|
||||||
|
"github.com/goccy/go-json/internal/encoder"
|
||||||
|
)
|
||||||
|
|
||||||
|
type EncodeOption = encoder.Option
|
||||||
|
type EncodeOptionFunc func(*EncodeOption)
|
||||||
|
|
||||||
|
// UnorderedMap doesn't sort when encoding map type.
|
||||||
|
func UnorderedMap() EncodeOptionFunc {
|
||||||
|
return func(opt *EncodeOption) {
|
||||||
|
opt.Flag |= encoder.UnorderedMapOption
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debug outputs debug information when panic occurs during encoding.
|
||||||
|
func Debug() EncodeOptionFunc {
|
||||||
|
return func(opt *EncodeOption) {
|
||||||
|
opt.Flag |= encoder.DebugOption
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Colorize add an identifier for coloring to the string of the encoded result.
|
||||||
|
func Colorize(scheme *ColorScheme) EncodeOptionFunc {
|
||||||
|
return func(opt *EncodeOption) {
|
||||||
|
opt.Flag |= encoder.ColorizeOption
|
||||||
|
opt.ColorScheme = scheme
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type DecodeOption = decoder.Option
|
||||||
|
type DecodeOptionFunc func(*DecodeOption)
|
||||||
|
|
||||||
|
// DecodeFieldPriorityFirstWin
|
||||||
|
// in the default behavior, go-json, like encoding/json,
|
||||||
|
// will reflect the result of the last evaluation when a field with the same name exists.
|
||||||
|
// This option allow you to change this behavior.
|
||||||
|
// this option reflects the result of the first evaluation if a field with the same name exists.
|
||||||
|
// This behavior has a performance advantage as it allows the subsequent strings to be skipped if all fields have been evaluated.
|
||||||
|
func DecodeFieldPriorityFirstWin() DecodeOptionFunc {
|
||||||
|
return func(opt *DecodeOption) {
|
||||||
|
opt.Flags |= decoder.FirstWinOption
|
||||||
|
}
|
||||||
|
}
|
14
vendor/github.com/lestrrat-go/backoff/v2/.gitignore
generated
vendored
Normal file
14
vendor/github.com/lestrrat-go/backoff/v2/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# Binaries for programs and plugins
|
||||||
|
*.exe
|
||||||
|
*.dll
|
||||||
|
*.so
|
||||||
|
*.dylib
|
||||||
|
|
||||||
|
# Test binary, build with `go test -c`
|
||||||
|
*.test
|
||||||
|
|
||||||
|
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||||
|
*.out
|
||||||
|
|
||||||
|
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
|
||||||
|
.glide/
|
7
vendor/github.com/lestrrat-go/backoff/v2/.golangci.yml
generated
vendored
Normal file
7
vendor/github.com/lestrrat-go/backoff/v2/.golangci.yml
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
issues:
|
||||||
|
exclude-rules:
|
||||||
|
- path: /*_example_test.go
|
||||||
|
linters:
|
||||||
|
- errcheck
|
||||||
|
- forbidigo
|
||||||
|
|
8
vendor/github.com/lestrrat-go/backoff/v2/Changes
generated
vendored
Normal file
8
vendor/github.com/lestrrat-go/backoff/v2/Changes
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
v2.0.8 - 28 Feb 2021
|
||||||
|
* Fix possible goroutine leak (#30)
|
||||||
|
|
||||||
|
v2.0.7 - 26 Jan 2021
|
||||||
|
* Cosmetic go.mod / go.sum changes
|
||||||
|
|
||||||
|
v2.0.6 - 25 Jan 2021
|
||||||
|
* Add jitter to constant backoff
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue