用户登录和接口鉴权

This commit is contained in:
2025-09-07 21:13:15 +08:00
parent c4522b974b
commit 565cf3fa6a
380 changed files with 18330 additions and 16854 deletions

52
go.mod
View File

@@ -1,47 +1,49 @@
module git.huangwc.com/pig/pig-farm-controller module git.huangwc.com/pig/pig-farm-controller
go 1.24.0 go 1.23
require ( require (
github.com/gin-gonic/gin v1.10.1 github.com/gin-gonic/gin v1.10.0
github.com/golang-jwt/jwt/v5 v5.2.1
golang.org/x/crypto v0.27.0
gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 v2.4.0
gorm.io/driver/postgres v1.5.7 gorm.io/driver/postgres v1.5.9
gorm.io/gorm v1.25.7 gorm.io/gorm v1.25.10
) )
require ( require (
github.com/bytedance/gopkg v0.1.3 // indirect github.com/bytedance/sonic v1.11.6 // indirect
github.com/bytedance/sonic v1.14.1 // indirect github.com/bytedance/sonic/loader v0.1.1 // indirect
github.com/bytedance/sonic/loader v0.3.0 // indirect github.com/cloudwego/base64x v0.1.4 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect github.com/cloudwego/iasm v0.2.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.10 // indirect github.com/gabriel-vasile/mimetype v1.4.3 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect github.com/gin-contrib/sse v0.1.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.27.0 // indirect github.com/go-playground/validator/v10 v10.20.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect github.com/goccy/go-json v0.10.2 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
github.com/jackc/pgx/v5 v5.4.3 // indirect github.com/jackc/pgx/v5 v5.7.1 // indirect
github.com/jackc/puddle/v2 v2.2.2 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.5 // indirect github.com/jinzhu/now v1.1.5 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect github.com/klauspost/cpuid/v2 v2.2.7 // indirect
github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect
github.com/leodido/go-urn v1.4.0 // indirect github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect github.com/pelletier/go-toml/v2 v2.2.2 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/stretchr/testify v1.11.1 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.3.0 // indirect github.com/ugorji/go/codec v1.2.12 // indirect
golang.org/x/arch v0.21.0 // indirect golang.org/x/arch v0.8.0 // indirect
golang.org/x/crypto v0.41.0 // indirect golang.org/x/net v0.25.0 // indirect
golang.org/x/net v0.43.0 // indirect golang.org/x/sync v0.8.0 // indirect
golang.org/x/sys v0.36.0 // indirect golang.org/x/sys v0.26.0 // indirect
golang.org/x/text v0.28.0 // indirect golang.org/x/text v0.18.0 // indirect
google.golang.org/protobuf v1.36.8 // indirect google.golang.org/protobuf v1.34.1 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
) )

118
go.sum
View File

@@ -1,50 +1,56 @@
github.com/bytedance/gopkg v0.1.3 h1:TPBSwH8RsouGCBcMBktLt1AymVo2TVsBVCY4b6TnZ/M= github.com/bytedance/sonic v1.11.6 h1:oUp34TzMlL+OY1OUWxHqsdkgC/Zfc85zGqw9siXjrc0=
github.com/bytedance/gopkg v0.1.3/go.mod h1:576VvJ+eJgyCzdjS+c4+77QF3p7ubbtiKARP3TxducM= github.com/bytedance/sonic v1.11.6/go.mod h1:LysEHSvpvDySVdC2f87zGWf6CIKJcAvqab1ZaiQtds4=
github.com/bytedance/sonic v1.14.1 h1:FBMC0zVz5XUmE4z9wF4Jey0An5FueFvOsTKKKtwIl7w= github.com/bytedance/sonic/loader v0.1.1 h1:c+e5Pt1k/cy5wMveRDyk2X4B9hF4g7an8N3zCYjJFNM=
github.com/bytedance/sonic v1.14.1/go.mod h1:gi6uhQLMbTdeP0muCnrjHLeCUPyb70ujhnNlhOylAFc= github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
github.com/bytedance/sonic/loader v0.3.0 h1:dskwH8edlzNMctoruo8FPTJDF3vLtDT0sXZwvZJyqeA= github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
github.com/bytedance/sonic/loader v0.3.0/go.mod h1:N8A3vUdtUebEY2/VQC0MyhYeKUFosQU6FxH2JmUe6VI= github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
github.com/cloudwego/base64x v0.1.6 h1:t11wG9AECkCDk5fMSoxmufanudBtJ+/HemLstXDLI2M= github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
github.com/cloudwego/base64x v0.1.6/go.mod h1:OFcloc187FXDaYHvrNIjxSe8ncn0OOM8gEHfghB2IPU= github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/gabriel-vasile/mimetype v1.4.10 h1:zyueNbySn/z8mJZHLt6IPw0KoZsiQNszIpU+bX4+ZK0= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0=
github.com/gabriel-vasile/mimetype v1.4.10/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk=
github.com/gin-contrib/sse v1.1.0 h1:n0w2GMuUpWDVp7qSpvze6fAu9iRxJY4Hmj6AmBOU05w= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v1.1.0/go.mod h1:hxRZ5gVpWMT7Z0B0gSNYqqsSCNIJMjzvm6fqCz9vjwM= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.10.1 h1:T0ujvqyCSqRopADpgPgiTT63DUQVSfojyME59Ei63pQ= github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
github.com/gin-gonic/gin v1.10.1/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y= github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4= github.com/go-playground/validator/v10 v10.20.0 h1:K9ISHbSaI0lyB2eWMPJo+kOS/FBExVwjEviJTixqxL8=
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo= github.com/go-playground/validator/v10 v10.20.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgx/v5 v5.4.3 h1:cxFyXhxlvAifxnkKKdlxv8XqUf59tDlYjnV5YYfsJJY= github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs=
github.com/jackc/pgx/v5 v5.4.3/go.mod h1:Ig06C2Vu0t5qXC60W8sqIthScaEnFvojjj9dSljmHRA= github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA=
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/klauspost/cpuid/v2 v2.3.0 h1:S4CRMLnYUhGeDFDqkGriYKdfoFlDnMtqTiI/sFzhA9Y= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/klauspost/cpuid/v2 v2.3.0/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= github.com/klauspost/cpuid/v2 v2.2.7 h1:ZWSB3igEs+d0qvnxR/ZBzXVmxkgt8DdzP6m9pfuVLDM=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/klauspost/cpuid/v2 v2.2.7/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
@@ -56,41 +62,47 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4= github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY= github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ= github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc= github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/ugorji/go/codec v1.3.0 h1:Qd2W2sQawAfG8XSvzwhBeoGq71zXOC/Q1E9y/wUcsUA= github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
github.com/ugorji/go/codec v1.3.0/go.mod h1:pRBVtBSKl77K30Bv8R2P+cLSGaTtex6fsA2Wjqmfxj4= github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
golang.org/x/arch v0.21.0 h1:iTC9o7+wP6cPWpDWkivCvQFGAHDQ59SrSxsLPcnkArw= golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.21.0/go.mod h1:dNHoOeKiyja7GTvF9NJS1l3Z2yntpQNzgrjh1cU103A= golang.org/x/arch v0.8.0 h1:3wRIsP3pM4yUptoR96otTUOXI367OS0+c9eeRi9doIc=
golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= golang.org/x/arch v0.8.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.36.0 h1:KVRy2GtZBrk1cBYA7MKu5bEZFxQk4NIDV6RLVcC8o0k= golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.36.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
google.golang.org/protobuf v1.36.8/go.mod h1:fuxRtAxBytpl4zzqUh6/eyUujkJdNiuEkXntxiD/uRU= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
@@ -99,7 +111,9 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.5.7 h1:8ptbNJTDbEmhdr62uReG5BGkdQyeasu/FZHxI0IMGnM= gorm.io/driver/postgres v1.5.9 h1:DkegyItji119OlcaLjqN11kHoUgZ/j13E0jkJZgD6A8=
gorm.io/driver/postgres v1.5.7/go.mod h1:3e019WlBaYI5o5LIdNV+LyxCMNtLOQETBXL2h4chKpA= gorm.io/driver/postgres v1.5.9/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/gorm v1.25.7 h1:VsD6acwRjz2zFxGO50gPO6AkNs7KKnvfzUjHQhZDz/A= gorm.io/gorm v1.25.10 h1:dQpO+33KalOA+aFYGlK+EfxcI5MbO7EP2yYygwh9h+s=
gorm.io/gorm v1.25.7/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8= gorm.io/gorm v1.25.10/go.mod h1:hbnx/Oo0ChWMn1BIhpy1oYozzpM15i4YPuHDmfYtwg8=
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@@ -10,7 +10,9 @@ import (
"time" "time"
"git.huangwc.com/pig/pig-farm-controller/internal/config" "git.huangwc.com/pig/pig-farm-controller/internal/config"
"git.huangwc.com/pig/pig-farm-controller/internal/controller/user"
"git.huangwc.com/pig/pig-farm-controller/internal/logs" "git.huangwc.com/pig/pig-farm-controller/internal/logs"
"git.huangwc.com/pig/pig-farm-controller/internal/storage/repository"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
) )
@@ -26,13 +28,16 @@ type API struct {
// config 应用配置 // config 应用配置
config *config.Config config *config.Config
// userController 用户控制器
userController *user.Controller
// logger 日志记录器 // logger 日志记录器
logger *logs.Logger logger *logs.Logger
} }
// NewAPI 创建并返回一个新的API实例 // NewAPI 创建并返回一个新的API实例
// 初始化Gin引擎和相关配置 // 初始化Gin引擎和相关配置
func NewAPI(cfg *config.Config) *API { func NewAPI(cfg *config.Config, userRepo repository.UserRepo) *API {
// 设置Gin为发布模式 // 设置Gin为发布模式
gin.SetMode(gin.ReleaseMode) gin.SetMode(gin.ReleaseMode)
@@ -56,9 +61,13 @@ func NewAPI(cfg *config.Config) *API {
engine.Use(gin.Recovery()) engine.Use(gin.Recovery())
// 创建用户控制器
userController := user.NewController(userRepo)
return &API{ return &API{
engine: engine, engine: engine,
config: cfg, config: cfg,
userController: userController,
logger: logs.NewLogger(), logger: logs.NewLogger(),
} }
} }
@@ -79,11 +88,11 @@ func (a *API) Start() error {
} }
// 启动HTTP服务器 // 启动HTTP服务器
a.logger.Info(fmt.Sprintf("Starting HTTP server on %s:%d", a.config.Server.Host, a.config.Server.Port)) a.logger.Info(fmt.Sprintf("正在启动HTTP服务器 %s:%d", a.config.Server.Host, a.config.Server.Port))
go func() { go func() {
if err := a.server.ListenAndServe(); err != nil && err != http.ErrServerClosed { if err := a.server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
a.logger.Error(fmt.Sprintf("HTTP server startup failed: %v", err)) a.logger.Error(fmt.Sprintf("HTTP服务器启动失败: %v", err))
} }
}() }()
@@ -92,7 +101,7 @@ func (a *API) Start() error {
// Stop 停止HTTP服务器 // Stop 停止HTTP服务器
func (a *API) Stop() error { func (a *API) Stop() error {
a.logger.Info("Stopping HTTP server") a.logger.Info("正在停止HTTP服务器")
// 创建一个5秒的超时上下文 // 创建一个5秒的超时上下文
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
@@ -100,11 +109,11 @@ func (a *API) Stop() error {
// 优雅地关闭服务器 // 优雅地关闭服务器
if err := a.server.Shutdown(ctx); err != nil { if err := a.server.Shutdown(ctx); err != nil {
a.logger.Error(fmt.Sprintf("HTTP server shutdown error: %v", err)) a.logger.Error(fmt.Sprintf("HTTP服务器关闭错误: %v", err))
return err return err
} }
a.logger.Info("HTTP server stopped") a.logger.Info("HTTP服务器已停止")
return nil return nil
} }
@@ -113,6 +122,13 @@ func (a *API) setupRoutes() {
// 基础路由示例 // 基础路由示例
a.engine.GET("/health", a.healthHandler) a.engine.GET("/health", a.healthHandler)
// 用户相关路由
userGroup := a.engine.Group("/api/v1/user")
{
userGroup.POST("/register", a.userController.Register)
userGroup.POST("/login", a.userController.Login)
}
// TODO: 添加更多路由 // TODO: 添加更多路由
} }
@@ -127,6 +143,6 @@ func (a *API) setupRoutes() {
func (a *API) healthHandler(c *gin.Context) { func (a *API) healthHandler(c *gin.Context) {
c.JSON(http.StatusOK, gin.H{ c.JSON(http.StatusOK, gin.H{
"status": "ok", "status": "ok",
"message": "Pig Farm Controller API is running", "message": "猪场控制器API正在运行",
}) })
} }

View File

@@ -0,0 +1,148 @@
// Package middleware 提供HTTP中间件功能
// 包含鉴权、日志、恢复等中间件实现
package middleware
import (
"net/http"
"os"
"strings"
"time"
"git.huangwc.com/pig/pig-farm-controller/internal/logs"
"git.huangwc.com/pig/pig-farm-controller/internal/storage/repository"
"github.com/gin-gonic/gin"
"github.com/golang-jwt/jwt/v5"
"gorm.io/gorm"
)
// AuthMiddleware 鉴权中间件结构
type AuthMiddleware struct {
userRepo repository.UserRepo
logger *logs.Logger
}
// AuthUser 用于在上下文中存储的用户信息
type AuthUser struct {
ID uint `json:"id"`
Username string `json:"username"`
}
// JWTClaims 自定义JWT声明
type JWTClaims struct {
UserID uint `json:"user_id"`
Username string `json:"username"`
jwt.RegisteredClaims
}
// NewAuthMiddleware 创建鉴权中间件实例
func NewAuthMiddleware(userRepo repository.UserRepo) *AuthMiddleware {
return &AuthMiddleware{
userRepo: userRepo,
logger: logs.NewLogger(),
}
}
// getJWTSecret 获取JWT密钥
func (m *AuthMiddleware) getJWTSecret() []byte {
// 在实际项目中,应该从配置文件或环境变量中读取
secret := os.Getenv("JWT_SECRET")
if secret == "" {
secret = "pig-farm-controller-secret-key" // 默认密钥
}
return []byte(secret)
}
// GenerateToken 为用户生成JWT token
func (m *AuthMiddleware) GenerateToken(userID uint, username string) (string, error) {
claims := JWTClaims{
UserID: userID,
Username: username,
RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)), // 24小时过期
IssuedAt: jwt.NewNumericDate(time.Now()),
NotBefore: jwt.NewNumericDate(time.Now()),
Issuer: "pig-farm-controller",
},
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
return token.SignedString(m.getJWTSecret())
}
// Handle 鉴权中间件处理函数
func (m *AuthMiddleware) Handle() gin.HandlerFunc {
return func(c *gin.Context) {
// 从请求头中获取认证信息
authHeader := c.GetHeader("Authorization")
if authHeader == "" {
c.JSON(http.StatusUnauthorized, gin.H{"error": "缺少认证信息"})
c.Abort()
return
}
// 检查Bearer token格式
if !strings.HasPrefix(authHeader, "Bearer ") {
c.JSON(http.StatusUnauthorized, gin.H{"error": "认证信息格式错误"})
c.Abort()
return
}
// 解析token
tokenString := strings.TrimPrefix(authHeader, "Bearer ")
// 验证token并获取用户信息
user, err := m.getUserFromJWT(tokenString)
if err != nil {
if err == gorm.ErrRecordNotFound {
c.JSON(http.StatusUnauthorized, gin.H{"error": "用户不存在"})
} else {
m.logger.Error("Token验证失败: " + err.Error())
c.JSON(http.StatusUnauthorized, gin.H{"error": "无效的认证令牌"})
}
c.Abort()
return
}
// 将用户信息保存到上下文中,供后续处理函数使用
c.Set("user", user)
// 继续处理请求
c.Next()
}
}
// getUserFromJWT 从JWT token中获取用户信息
func (m *AuthMiddleware) getUserFromJWT(tokenString string) (*AuthUser, error) {
// 解析token
token, err := jwt.ParseWithClaims(tokenString, &JWTClaims{}, func(token *jwt.Token) (interface{}, error) {
return m.getJWTSecret(), nil
})
if err != nil {
return nil, err
}
// 验证token
if !token.Valid {
return nil, gorm.ErrRecordNotFound
}
// 获取声明
claims, ok := token.Claims.(*JWTClaims)
if !ok {
return nil, gorm.ErrRecordNotFound
}
// 根据用户ID查找用户
userModel, err := m.userRepo.FindByID(claims.UserID)
if err != nil {
return nil, err
}
user := &AuthUser{
ID: userModel.ID,
Username: userModel.Username,
}
return user, nil
}

View File

@@ -77,12 +77,12 @@ func (c *Config) Load(path string) error {
// 读取配置文件 // 读取配置文件
data, err := os.ReadFile(path) data, err := os.ReadFile(path)
if err != nil { if err != nil {
return fmt.Errorf("failed to read config file: %v", err) return fmt.Errorf("配置文件读取失败: %v", err)
} }
// 解析YAML配置 // 解析YAML配置
if err := yaml.Unmarshal(data, c); err != nil { if err := yaml.Unmarshal(data, c); err != nil {
return fmt.Errorf("failed to parse config file: %v", err) return fmt.Errorf("配置文件解析失败: %v", err)
} }
return nil return nil

View File

@@ -0,0 +1,126 @@
// Package user 提供用户相关功能的控制器
// 实现用户注册、登录等操作
package user
import (
"net/http"
"git.huangwc.com/pig/pig-farm-controller/internal/api/middleware"
"git.huangwc.com/pig/pig-farm-controller/internal/logs"
"git.huangwc.com/pig/pig-farm-controller/internal/storage/repository"
"github.com/gin-gonic/gin"
"golang.org/x/crypto/bcrypt"
)
// Controller 用户控制器
type Controller struct {
userRepo repository.UserRepo
logger *logs.Logger
}
// NewController 创建用户控制器实例
func NewController(userRepo repository.UserRepo) *Controller {
return &Controller{
userRepo: userRepo,
logger: logs.NewLogger(),
}
}
// RegisterRequest 注册请求结构体
type RegisterRequest struct {
Username string `json:"username" binding:"required"`
Password string `json:"password" binding:"required"`
}
// RegisterResponse 注册响应结构体
type RegisterResponse struct {
ID uint `json:"id"`
Username string `json:"username"`
CreatedAt string `json:"created_at"`
}
// Register 用户注册
func (c *Controller) Register(ctx *gin.Context) {
var req RegisterRequest
if err := ctx.ShouldBindJSON(&req); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": "请求参数错误"})
return
}
user, err := c.userRepo.CreateUser(req.Username, req.Password)
if err != nil {
c.logger.Error("创建用户失败: " + err.Error())
ctx.JSON(http.StatusInternalServerError, gin.H{"error": "创建用户失败"})
return
}
response := RegisterResponse{
ID: user.ID,
Username: user.Username,
CreatedAt: user.CreatedAt.Format("2006-01-02 15:04:05"),
}
ctx.JSON(http.StatusOK, gin.H{
"message": "用户创建成功",
"user": response,
})
}
// LoginRequest 登录请求结构体
type LoginRequest struct {
Username string `json:"username" binding:"required"`
Password string `json:"password" binding:"required"`
}
// LoginResponse 登录响应结构体
type LoginResponse struct {
ID uint `json:"id"`
Username string `json:"username"`
Token string `json:"token"`
CreatedAt string `json:"created_at"`
}
// Login 用户登录
func (c *Controller) Login(ctx *gin.Context) {
var req LoginRequest
if err := ctx.ShouldBindJSON(&req); err != nil {
ctx.JSON(http.StatusBadRequest, gin.H{"error": "请求参数错误"})
return
}
// 查找用户
user, err := c.userRepo.FindByUsername(req.Username)
if err != nil {
c.logger.Error("查找用户失败: " + err.Error())
ctx.JSON(http.StatusUnauthorized, gin.H{"error": "用户名或密码错误"})
return
}
// 验证密码
err = bcrypt.CompareHashAndPassword([]byte(user.PasswordHash), []byte(req.Password))
if err != nil {
ctx.JSON(http.StatusUnauthorized, gin.H{"error": "用户名或密码错误"})
return
}
// 生成JWT访问令牌
authMiddleware := middleware.NewAuthMiddleware(c.userRepo)
token, err := authMiddleware.GenerateToken(user.ID, user.Username)
if err != nil {
c.logger.Error("生成令牌失败: " + err.Error())
ctx.JSON(http.StatusInternalServerError, gin.H{"error": "登录失败"})
return
}
response := LoginResponse{
ID: user.ID,
Username: user.Username,
Token: token,
CreatedAt: user.CreatedAt.Format("2006-01-02 15:04:05"),
}
ctx.JSON(http.StatusOK, gin.H{
"message": "登录成功",
"user": response,
})
}

View File

@@ -10,6 +10,7 @@ import (
"git.huangwc.com/pig/pig-farm-controller/internal/config" "git.huangwc.com/pig/pig-farm-controller/internal/config"
"git.huangwc.com/pig/pig-farm-controller/internal/logs" "git.huangwc.com/pig/pig-farm-controller/internal/logs"
"git.huangwc.com/pig/pig-farm-controller/internal/storage/db" "git.huangwc.com/pig/pig-farm-controller/internal/storage/db"
"git.huangwc.com/pig/pig-farm-controller/internal/storage/repository"
"git.huangwc.com/pig/pig-farm-controller/internal/task" "git.huangwc.com/pig/pig-farm-controller/internal/task"
) )
@@ -25,6 +26,9 @@ type Application struct {
// TaskExecutor 任务执行器组件实例 // TaskExecutor 任务执行器组件实例
TaskExecutor *task.Executor TaskExecutor *task.Executor
// UserRepo 用户仓库实例
UserRepo repository.UserRepo
// Config 应用配置 // Config 应用配置
Config *config.Config Config *config.Config
@@ -46,8 +50,11 @@ func NewApplication(cfg *config.Config) *Application {
// 初始化存储组件 // 初始化存储组件
store := db.NewStorage(connectionString, maxOpenConns, maxIdleConns, connMaxLifetime) store := db.NewStorage(connectionString, maxOpenConns, maxIdleConns, connMaxLifetime)
// 初始化用户仓库
userRepo := repository.NewUserRepo(store.GetDB())
// 初始化API组件 // 初始化API组件
apiInstance := api.NewAPI(cfg) apiInstance := api.NewAPI(cfg, userRepo)
// 初始化任务执行器组件(使用5个工作协程) // 初始化任务执行器组件(使用5个工作协程)
taskExecutor := task.NewExecutor(5) taskExecutor := task.NewExecutor(5)
@@ -56,6 +63,7 @@ func NewApplication(cfg *config.Config) *Application {
Storage: store, Storage: store,
API: apiInstance, API: apiInstance,
TaskExecutor: taskExecutor, TaskExecutor: taskExecutor,
UserRepo: userRepo,
Config: cfg, Config: cfg,
logger: logs.NewLogger(), logger: logs.NewLogger(),
} }
@@ -66,19 +74,19 @@ func NewApplication(cfg *config.Config) *Application {
func (app *Application) Start() error { func (app *Application) Start() error {
// 启动存储组件 // 启动存储组件
if err := app.Storage.Connect(); err != nil { if err := app.Storage.Connect(); err != nil {
return fmt.Errorf("failed to connect to storage: %v", err) return fmt.Errorf("存储连接失败: %v", err)
} }
app.logger.Info("Storage connected successfully") app.logger.Info("存储连接成功")
// 启动API组件 // 启动API组件
if err := app.API.Start(); err != nil { if err := app.API.Start(); err != nil {
return fmt.Errorf("failed to start API: %v", err) return fmt.Errorf("API启动失败: %v", err)
} }
app.logger.Info("API started successfully") app.logger.Info("API启动成功")
// 启动任务执行器组件 // 启动任务执行器组件
app.TaskExecutor.Start() app.TaskExecutor.Start()
app.logger.Info("Task executor started successfully") app.logger.Info("任务执行器启动成功")
return nil return nil
} }
@@ -88,18 +96,18 @@ func (app *Application) Start() error {
func (app *Application) Stop() error { func (app *Application) Stop() error {
// 停止API组件 // 停止API组件
if err := app.API.Stop(); err != nil { if err := app.API.Stop(); err != nil {
app.logger.Error(fmt.Sprintf("Failed to stop API: %v", err)) app.logger.Error(fmt.Sprintf("API停止失败: %v", err))
} }
// 停止任务执行器组件 // 停止任务执行器组件
app.TaskExecutor.Stop() app.TaskExecutor.Stop()
app.logger.Info("Task executor stopped successfully") app.logger.Info("任务执行器已停止")
// 停止存储组件 // 停止存储组件
if err := app.Storage.Disconnect(); err != nil { if err := app.Storage.Disconnect(); err != nil {
return fmt.Errorf("failed to disconnect from storage: %v", err) return fmt.Errorf("存储断开连接失败: %v", err)
} }
app.logger.Info("Storage disconnected successfully") app.logger.Info("存储断开连接成功")
return nil return nil
} }

View File

@@ -23,20 +23,20 @@ func NewLogger() *Logger {
// Info 记录信息级别日志 // Info 记录信息级别日志
func (l *Logger) Info(message string) { func (l *Logger) Info(message string) {
l.logger.Printf("[INFO] %s %s", time.Now().Format(time.RFC3339), message) l.logger.Printf("[信息] %s %s", time.Now().Format(time.RFC3339), message)
} }
// Error 记录错误级别日志 // Error 记录错误级别日志
func (l *Logger) Error(message string) { func (l *Logger) Error(message string) {
l.logger.Printf("[ERROR] %s %s", time.Now().Format(time.RFC3339), message) l.logger.Printf("[错误] %s %s", time.Now().Format(time.RFC3339), message)
} }
// Debug 记录调试级别日志 // Debug 记录调试级别日志
func (l *Logger) Debug(message string) { func (l *Logger) Debug(message string) {
l.logger.Printf("[DEBUG] %s %s", time.Now().Format(time.RFC3339), message) l.logger.Printf("[调试] %s %s", time.Now().Format(time.RFC3339), message)
} }
// Warn 记录警告级别日志 // Warn 记录警告级别日志
func (l *Logger) Warn(message string) { func (l *Logger) Warn(message string) {
l.logger.Printf("[WARN] %s %s", time.Now().Format(time.RFC3339), message) l.logger.Printf("[警告] %s %s", time.Now().Format(time.RFC3339), message)
} }

35
internal/model/user.go Normal file
View File

@@ -0,0 +1,35 @@
// Package model 提供数据模型定义
// 包含用户、猪舍、饲料等相关数据结构
package model
import (
"time"
"gorm.io/gorm"
)
// User 代表系统用户
type User struct {
// ID 用户ID
ID uint `gorm:"primaryKey;column:id" json:"id"`
// Username 用户名
Username string `gorm:"uniqueIndex;not null;column:username" json:"username"`
// PasswordHash 密码哈希值
PasswordHash string `gorm:"not null;column:password_hash" json:"-"`
// CreatedAt 创建时间
CreatedAt time.Time `gorm:"column:created_at" json:"created_at"`
// UpdatedAt 更新时间
UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at"`
// DeletedAt 删除时间(用于软删除)
DeletedAt gorm.DeletedAt `gorm:"index;column:deleted_at" json:"-"`
}
// TableName 指定User模型对应的数据库表名
func (User) TableName() string {
return "users"
}

View File

@@ -49,25 +49,25 @@ func NewPostgresStorage(connectionString string, maxOpenConns, maxIdleConns, con
// Connect 建立与PostgreSQL数据库的连接 // Connect 建立与PostgreSQL数据库的连接
// 使用GORM建立数据库连接 // 使用GORM建立数据库连接
func (ps *PostgresStorage) Connect() error { func (ps *PostgresStorage) Connect() error {
ps.logger.Info("Connecting to PostgreSQL database") ps.logger.Info("正在连接PostgreSQL数据库")
var err error var err error
ps.db, err = gorm.Open(postgres.Open(ps.connectionString), &gorm.Config{}) ps.db, err = gorm.Open(postgres.Open(ps.connectionString), &gorm.Config{})
if err != nil { if err != nil {
ps.logger.Error(fmt.Sprintf("Failed to connect to database: %v", err)) ps.logger.Error(fmt.Sprintf("数据库连接失败: %v", err))
return fmt.Errorf("failed to connect to database: %v", err) return fmt.Errorf("数据库连接失败: %v", err)
} }
// 测试连接 // 测试连接
sqlDB, err := ps.db.DB() sqlDB, err := ps.db.DB()
if err != nil { if err != nil {
ps.logger.Error(fmt.Sprintf("Failed to get database instance: %v", err)) ps.logger.Error(fmt.Sprintf("获取数据库实例失败: %v", err))
return fmt.Errorf("failed to get database instance: %v", err) return fmt.Errorf("获取数据库实例失败: %v", err)
} }
if err = sqlDB.Ping(); err != nil { if err = sqlDB.Ping(); err != nil {
ps.logger.Error(fmt.Sprintf("Failed to ping database: %v", err)) ps.logger.Error(fmt.Sprintf("数据库连接测试失败: %v", err))
return fmt.Errorf("failed to ping database: %v", err) return fmt.Errorf("数据库连接测试失败: %v", err)
} }
// 设置连接池参数 // 设置连接池参数
@@ -75,7 +75,7 @@ func (ps *PostgresStorage) Connect() error {
sqlDB.SetMaxIdleConns(ps.maxIdleConns) sqlDB.SetMaxIdleConns(ps.maxIdleConns)
sqlDB.SetConnMaxLifetime(time.Duration(ps.connMaxLifetime) * time.Second) sqlDB.SetConnMaxLifetime(time.Duration(ps.connMaxLifetime) * time.Second)
ps.logger.Info("Successfully connected to PostgreSQL database") ps.logger.Info("PostgreSQL数据库连接成功")
return nil return nil
} }
@@ -83,19 +83,19 @@ func (ps *PostgresStorage) Connect() error {
// 安全地关闭所有数据库连接 // 安全地关闭所有数据库连接
func (ps *PostgresStorage) Disconnect() error { func (ps *PostgresStorage) Disconnect() error {
if ps.db != nil { if ps.db != nil {
ps.logger.Info("Disconnecting from PostgreSQL database") ps.logger.Info("正在断开PostgreSQL数据库连接")
sqlDB, err := ps.db.DB() sqlDB, err := ps.db.DB()
if err != nil { if err != nil {
ps.logger.Error(fmt.Sprintf("Failed to get database instance: %v", err)) ps.logger.Error(fmt.Sprintf("获取数据库实例失败: %v", err))
return fmt.Errorf("failed to get database instance: %v", err) return fmt.Errorf("获取数据库实例失败: %v", err)
} }
if err := sqlDB.Close(); err != nil { if err := sqlDB.Close(); err != nil {
ps.logger.Error(fmt.Sprintf("Failed to close database connection: %v", err)) ps.logger.Error(fmt.Sprintf("关闭数据库连接失败: %v", err))
return fmt.Errorf("failed to close database connection: %v", err) return fmt.Errorf("关闭数据库连接失败: %v", err)
} }
ps.logger.Info("Successfully disconnected from PostgreSQL database") ps.logger.Info("PostgreSQL数据库连接已断开")
} }
return nil return nil
} }

View File

@@ -0,0 +1,84 @@
// Package repository 提供数据访问层实现
// 包含各种数据实体的仓库接口和实现
package repository
import (
"fmt"
"git.huangwc.com/pig/pig-farm-controller/internal/model"
"golang.org/x/crypto/bcrypt"
"gorm.io/gorm"
)
// UserRepo 用户仓库接口
type UserRepo interface {
// CreateUser 创建新用户
CreateUser(username, password string) (*model.User, error)
// FindByUsername 根据用户名查找用户
FindByUsername(username string) (*model.User, error)
// FindByID 根据ID查找用户
FindByID(id uint) (*model.User, error)
}
// userRepo 用户仓库实现
type userRepo struct {
db *gorm.DB
}
// NewUserRepo 创建用户仓库实例
func NewUserRepo(db *gorm.DB) UserRepo {
return &userRepo{
db: db,
}
}
// CreateUser 创建新用户
func (r *userRepo) CreateUser(username, password string) (*model.User, error) {
// 检查用户是否已存在
var existingUser model.User
result := r.db.Where("username = ?", username).First(&existingUser)
if result.Error == nil {
return nil, fmt.Errorf("用户已存在")
}
// 对密码进行哈希处理
hashedPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
if err != nil {
return nil, fmt.Errorf("密码加密失败: %v", err)
}
// 创建新用户
user := &model.User{
Username: username,
PasswordHash: string(hashedPassword),
}
result = r.db.Create(user)
if result.Error != nil {
return nil, fmt.Errorf("用户创建失败: %v", result.Error)
}
return user, nil
}
// FindByUsername 根据用户名查找用户
func (r *userRepo) FindByUsername(username string) (*model.User, error) {
var user model.User
result := r.db.Where("username = ?", username).First(&user)
if result.Error != nil {
return nil, result.Error
}
return &user, nil
}
// FindByID 根据ID查找用户
func (r *userRepo) FindByID(id uint) (*model.User, error) {
var user model.User
result := r.db.First(&user, id)
if result.Error != nil {
return nil, result.Error
}
return &user, nil
}

View File

@@ -65,7 +65,7 @@ func (tq *TaskQueue) AddTask(task Task) {
priority: task.GetPriority(), priority: task.GetPriority(),
} }
heap.Push(tq.queue, item) heap.Push(tq.queue, item)
tq.logger.Info("Task added to queue: " + task.GetID()) tq.logger.Info("任务已添加到队列: " + task.GetID())
} }
// GetNextTask 获取下一个要执行的任务(优先级最高的任务) // GetNextTask 获取下一个要执行的任务(优先级最高的任务)
@@ -79,7 +79,7 @@ func (tq *TaskQueue) GetNextTask() Task {
// 获取优先级最高的任务 // 获取优先级最高的任务
item := heap.Pop(tq.queue).(*taskItem) item := heap.Pop(tq.queue).(*taskItem)
tq.logger.Info("Task retrieved from queue: " + item.task.GetID()) tq.logger.Info("从队列中获取任务: " + item.task.GetID())
return item.task return item.task
} }
@@ -160,7 +160,7 @@ func NewExecutor(workers int) *Executor {
// Start 启动任务执行器 // Start 启动任务执行器
func (e *Executor) Start() { func (e *Executor) Start() {
e.logger.Info(fmt.Sprintf("Starting task executor with %d workers", e.workers)) e.logger.Info(fmt.Sprintf("正在启动任务执行器,工作协程数: %d", e.workers))
// 启动工作协程 // 启动工作协程
for i := 0; i < e.workers; i++ { for i := 0; i < e.workers; i++ {
@@ -168,12 +168,12 @@ func (e *Executor) Start() {
go e.worker(i) go e.worker(i)
} }
e.logger.Info("Task executor started successfully") e.logger.Info("任务执行器启动成功")
} }
// Stop 停止任务执行器 // Stop 停止任务执行器
func (e *Executor) Stop() { func (e *Executor) Stop() {
e.logger.Info("Stopping task executor") e.logger.Info("正在停止任务执行器")
// 取消上下文 // 取消上下文
e.cancel() e.cancel()
@@ -181,37 +181,37 @@ func (e *Executor) Stop() {
// 等待所有工作协程结束 // 等待所有工作协程结束
e.wg.Wait() e.wg.Wait()
e.logger.Info("Task executor stopped successfully") e.logger.Info("任务执行器已停止")
} }
// SubmitTask 提交任务到执行器 // SubmitTask 提交任务到执行器
func (e *Executor) SubmitTask(task Task) { func (e *Executor) SubmitTask(task Task) {
e.taskQueue.AddTask(task) e.taskQueue.AddTask(task)
e.logger.Info("Task submitted: " + task.GetID()) e.logger.Info("任务已提交: " + task.GetID())
} }
// worker 工作协程 // worker 工作协程
func (e *Executor) worker(id int) { func (e *Executor) worker(id int) {
defer e.wg.Done() defer e.wg.Done()
e.logger.Info(fmt.Sprintf("Worker (id = %d) started", id)) e.logger.Info(fmt.Sprintf("工作协程(id = %d)已启动", id))
for { for {
select { select {
case <-e.ctx.Done(): case <-e.ctx.Done():
e.logger.Info(fmt.Sprintf("Worker %d stopped", id)) e.logger.Info(fmt.Sprintf("工作协程 %d 已停止", id))
return return
default: default:
// 获取下一个任务 // 获取下一个任务
task := e.taskQueue.GetNextTask() task := e.taskQueue.GetNextTask()
if task != nil { if task != nil {
e.logger.Info(fmt.Sprintf("Worker %d executing task: %s", id, task.GetID())) e.logger.Info(fmt.Sprintf("工作协程 %d 正在执行任务: %s", id, task.GetID()))
// 执行任务 // 执行任务
if err := task.Execute(); err != nil { if err := task.Execute(); err != nil {
e.logger.Error("Task execution failed: " + task.GetID() + ", error: " + err.Error()) e.logger.Error("任务执行失败: " + task.GetID() + ", 错误: " + err.Error())
} else { } else {
e.logger.Info("Task executed successfully: " + task.GetID()) e.logger.Info("任务执行成功: " + task.GetID())
} }
} else { } else {
// 没有任务时短暂休眠 // 没有任务时短暂休眠

10
main.go
View File

@@ -24,7 +24,7 @@ func main() {
// 加载配置 // 加载配置
cfg := config.NewConfig() cfg := config.NewConfig()
if err := cfg.Load("config.yml"); err != nil { if err := cfg.Load("config.yml"); err != nil {
logger.Error("Failed to load config: " + err.Error()) logger.Error("配置加载失败: " + err.Error())
os.Exit(1) os.Exit(1)
} }
@@ -33,12 +33,12 @@ func main() {
// 启动核心应用 // 启动核心应用
if err := app.Start(); err != nil { if err := app.Start(); err != nil {
logger.Error("Failed to start application: " + err.Error()) logger.Error("应用启动失败: " + err.Error())
os.Exit(1) os.Exit(1)
} }
// 记录应用启动成功 // 记录应用启动成功
logger.Info("Application started successfully") logger.Info("应用启动成功")
// 等待中断信号以优雅地关闭应用 // 等待中断信号以优雅地关闭应用
sigChan := make(chan os.Signal, 1) sigChan := make(chan os.Signal, 1)
@@ -47,10 +47,10 @@ func main() {
// 停止核心应用 // 停止核心应用
if err := app.Stop(); err != nil { if err := app.Stop(); err != nil {
logger.Error("Failed to stop application: " + err.Error()) logger.Error("应用停止失败: " + err.Error())
os.Exit(1) os.Exit(1)
} }
// 记录应用停止成功 // 记录应用停止成功
logger.Info("Application stopped successfully") logger.Info("应用停止成功")
} }

View File

@@ -49,7 +49,4 @@ ast/bench.sh
!testdata/*.json.gz !testdata/*.json.gz
fuzz/testdata fuzz/testdata
*__debug_bin* *__debug_bin
*pprof
*coverage.txt
tools/venv/*

View File

@@ -4,6 +4,3 @@
[submodule "tools/simde"] [submodule "tools/simde"]
path = tools/simde path = tools/simde
url = https://github.com/simd-everywhere/simde.git url = https://github.com/simd-everywhere/simde.git
[submodule "fuzz/go-fuzz-corpus"]
path = fuzz/go-fuzz-corpus
url = https://github.com/dvyukov/go-fuzz-corpus.git

View File

@@ -6,10 +6,9 @@ A blazingly fast JSON serializing &amp; deserializing library, accelerated by JI
## Requirement ## Requirement
- Go: 1.18~1.25 - Go 1.16~1.22
- Notice: Go1.24.0 is not supported due to the [issue](https://github.com/golang/go/issues/71672), please use higher go version or add build tag `--ldflags="-checklinkname=0"` - Linux / MacOS / Windows(need go1.17 above)
- OS: Linux / MacOS / Windows - Amd64 ARCH
- CPU: AMD64 / (ARM64, need go1.20 above)
## Features ## Features
@@ -212,7 +211,7 @@ ret, err := Encode(v, EscapeHTML) // ret == `{"\u0026\u0026":{"X":"\u003c\u003e"
### Compact Format ### Compact Format
Sonic encodes primitive objects (struct/map...) as compact-format JSON by default, except marshaling `json.RawMessage` or `json.Marshaler`: sonic ensures validating their output JSON but **DO NOT** compacting them for performance concerns. We provide the option `encoder.CompactMarshaler` to add compacting process. Sonic encodes primitive objects (struct/map...) as compact-format JSON by default, except marshaling `json.RawMessage` or `json.Marshaler`: sonic ensures validating their output JSON but **DONOT** compacting them for performance concerns. We provide the option `encoder.CompactMarshaler` to add compacting process.
### Print Error ### Print Error
@@ -283,22 +282,6 @@ sub := root.Get("key3").Index(2).Int64() // == 3
**Tip**: since `Index()` uses offset to locate data, which is much faster than scanning like `Get()`, we suggest you use it as much as possible. And sonic also provides another API `IndexOrGet()` to underlying use offset as well as ensure the key is matched. **Tip**: since `Index()` uses offset to locate data, which is much faster than scanning like `Get()`, we suggest you use it as much as possible. And sonic also provides another API `IndexOrGet()` to underlying use offset as well as ensure the key is matched.
#### SearchOption
`Searcher` provides some options for user to meet different needs:
```go
opts := ast.SearchOption{ CopyReturn: true ... }
val, err := sonic.GetWithOptions(JSON, opts, "key")
```
- CopyReturn
Indicate the searcher to copy the result JSON string instead of refer from the input. This can help to reduce memory usage if you cache the results
- ConcurentRead
Since `ast.Node` use `Lazy-Load` design, it doesn't support Concurrently-Read by default. If you want to read it concurrently, please specify it.
- ValidateJSON
Indicate the searcher to validate the entire JSON. This option is enabled by default, which slow down the search speed a little.
#### Set/Unset #### Set/Unset
Modify the json content by Set()/Unset() Modify the json content by Set()/Unset()
@@ -385,12 +368,16 @@ See [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go
## Compatibility ## Compatibility
For developers who want to use sonic to meet different scenarios, we provide some integrated configs as `sonic.API` Sonic **DOES NOT** ensure to support all environments, due to the difficulty of developing high-performance codes. For developers who use sonic to build their applications in different environments, we have the following suggestions:
- `ConfigDefault`: the sonic's default config (`EscapeHTML=false`,`SortKeys=false`...) to run sonic fast meanwhile ensure security. - Developing on **Mac M1**: Make sure you have Rosetta 2 installed on your machine, and set `GOARCH=amd64` when building your application. Rosetta 2 can automatically translate x86 binaries to arm64 binaries and run x86 applications on Mac M1.
- `ConfigStd`: the std-compatible config (`EscapeHTML=true`,`SortKeys=true`...) - Developing on **Linux arm64**: You can install qemu and use the `qemu-x86_64 -cpu max` command to convert x86 binaries to amr64 binaries for applications built with sonic. The qemu can achieve a similar transfer effect to Rosetta 2 on Mac M1.
- `ConfigFastest`: the fastest config (`NoQuoteTextMarshaler=true`) to run on sonic as fast as possible.
Sonic **DOES NOT** ensure to support all environments, due to the difficulty of developing high-performance codes. On non-sonic-supporting environment, the implementation will fall back to `encoding/json`. Thus below configs will all equal to `ConfigStd`. For developers who want to use sonic on Linux arm64 without qemu, or those who want to handle JSON strictly consistent with `encoding/json`, we provide some compatible APIs as `sonic.API`
- `ConfigDefault`: the sonic's default config (`EscapeHTML=false`,`SortKeys=false`...) to run on sonic-supporting environment. It will fall back to `encoding/json` with the corresponding config, and some options like `SortKeys=false` will be invalid.
- `ConfigStd`: the std-compatible config (`EscapeHTML=true`,`SortKeys=true`...) to run on sonic-supporting environment. It will fall back to `encoding/json`.
- `ConfigFastest`: the fastest config (`NoQuoteTextMarshaler=true`) to run on sonic-supporting environment. It will fall back to `encoding/json` with the corresponding config, and some options will be invalid.
## Tips ## Tips
@@ -479,23 +466,6 @@ For better performance, in previous case the `ast.Visitor` will be the better ch
But `ast.Visitor` is not a very handy API. You might need to write a lot of code to implement your visitor and carefully maintain the tree hierarchy during decoding. Please read the comments in [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) carefully if you decide to use this API. But `ast.Visitor` is not a very handy API. You might need to write a lot of code to implement your visitor and carefully maintain the tree hierarchy during decoding. Please read the comments in [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) carefully if you decide to use this API.
### Buffer Size
Sonic use memory pool in many places like `encoder.Encode`, `ast.Node.MarshalJSON` to improve performance, which may produce more memory usage (in-use) when server's load is high. See [issue 614](https://github.com/bytedance/sonic/issues/614). Therefore, we introduce some options to let user control the behavior of memory pool. See [option](https://pkg.go.dev/github.com/bytedance/sonic@v1.11.9/option#pkg-variables) package.
### Faster JSON Skip
For security, sonic use [FSM](native/skip_one.c) algorithm to validate JSON when decoding raw JSON or encoding `json.Marshaler`, which is much slower (1~10x) than [SIMD-searching-pair](native/skip_one_fast.c) algorithm. If user has many redundant JSON value and DO NOT NEED to strictly validate JSON correctness, you can enable below options:
- `Config.NoValidateSkipJSON`: for faster skipping JSON when decoding, such as unknown fields, json.Unmarshaler(json.RawMessage), mismatched values, and redundant array elements
- `Config.NoValidateJSONMarshaler`: avoid validating JSON when encoding `json.Marshaler`
- `SearchOption.ValidateJSON`: indicates if validate located JSON value when `Get`
## JSON-Path Support (GJSON)
[tidwall/gjson](https://github.com/tidwall/gjson) has provided a comprehensive and popular JSON-Path API, and
a lot of older codes heavily relies on it. Therefore, we provides a wrapper library, which combines gjson's API with sonic's SIMD algorithm to boost up the performance. See [cloudwego/gjson](https://github.com/cloudwego/gjson).
## Community ## Community
Sonic is a subproject of [CloudWeGo](https://www.cloudwego.io/). We are committed to building a cloud native ecosystem. Sonic is a subproject of [CloudWeGo](https://www.cloudwego.io/). We are committed to building a cloud native ecosystem.

View File

@@ -6,10 +6,9 @@
## 依赖 ## 依赖
- Go: 1.18~1.25 - Go 1.16~1.22
- 注意Go1.24.0 由于 [issue](https://github.com/golang/go/issues/71672) 不可用,请升级到更高 Go 版本,或添加编译选项 `--ldflags="-checklinkname=0"` - Linux / MacOS / Windows需要 Go1.17 以上)
- OS: Linux / MacOS / Windows - Amd64 架构
- CPU: AMD64 / (ARM64, 需要 Go1.20 以上)
## 接口 ## 接口
@@ -261,7 +260,7 @@ fmt.Printf("%+v", data) // {A:0 B:1}
### `Ast.Node` ### `Ast.Node`
Sonic/ast.Node 是完全独立的 JSON 抽象语法树库。它实现了序列化和反序列化,并提供了获取和修改JSON数据的鲁棒的 API。 Sonic/ast.Node 是完全独立的 JSON 抽象语法树库。它实现了序列化和反序列化,并提供了获取和修改通用数据的鲁棒的 API。
#### 查找/索引 #### 查找/索引
@@ -283,22 +282,6 @@ sub := root.Get("key3").Index(2).Int64() // == 3
**注意**:由于 `Index()` 使用偏移量来定位数据,比使用扫描的 `Get()` 要快的多,建议尽可能的使用 `Index` 。 Sonic 也提供了另一个 API `IndexOrGet()` ,以偏移量为基础并且也确保键的匹配。 **注意**:由于 `Index()` 使用偏移量来定位数据,比使用扫描的 `Get()` 要快的多,建议尽可能的使用 `Index` 。 Sonic 也提供了另一个 API `IndexOrGet()` ,以偏移量为基础并且也确保键的匹配。
#### 查找选项
`ast.Searcher`提供了一些选项,以满足用户的不同需求:
```go
opts := ast.SearchOption{CopyReturn: true…}
val, err := sonic.GetWithOptions(JSON, opts, "key")
```
- CopyReturn
指示搜索器复制结果JSON字符串而不是从输入引用。如果用户缓存结果这有助于减少内存使用
- ConcurentRead
因为`ast.Node`使用`Lazy-Load`设计,默认不支持并发读取。如果您想同时读取,请指定它。
- ValidateJSON
指示搜索器来验证整个JSON。默认情况下启用该选项, 但是对于查找速度有一定影响。
#### 修改 #### 修改
使用 `Set()` / `Unset()` 修改 json 的内容 使用 `Set()` / `Unset()` 修改 json 的内容
@@ -385,12 +368,16 @@ type Visitor interface {
## 兼容性 ## 兼容性
对于想要使用sonic来满足不同场景的开发人员,我们提供了一些集成配置: 由于开发高性能代码的困难性, Sonic **不**保证对所有环境的支持。对于在不同环境中使用 Sonic 构建应用程序的开发,我们有以下建议:
- `ConfigDefault`: sonic的默认配(`EscapeHTML=false` `SortKeys=false`…) 保证性能同时兼顾安全性 - 在 **Mac M1** 上开发:确保在您的计算机上安装了 Rosetta 2并在构建时设置 `GOARCH=amd64` 。 Rosetta 2 可以自动将 x86 二进制文件转换为 arm64 二进制文件,并在 Mac M1 上运行 x86 应用程序
- `ConfigStd`: 与 `encoding/json` 保证完全兼容的配置 - 在 **Linux arm64** 上开发:您可以安装 qemu 并使用 `qemu-x86_64 -cpu max` 命令来将 x86 二进制文件转换为 arm64 二进制文件。qemu可以实现与Mac M1上的Rosetta 2类似的转换效果。
- `ConfigFastest`: 最快的配置(`NoQuoteTextMarshaler=true...`) 保证性能最优但是会缺少一些安全性检查validate UTF8 等)
Sonic **不**确保支持所有环境由于开发高性能代码的困难。在不支持sonic的环境中实现将回落到 `encoding/json`。因此上述配置将全部等于`ConfigStd`。 对于希望在不使用 qemu 下使用 sonic 的开发者,或者希望处理 JSON 时与 `encoding/JSON` 严格保持一致的开发者,我们在 `sonic.API` 中提供了一些兼容性 API
- `ConfigDefault`: 在支持 sonic 的环境下 sonic 的默认配置(`EscapeHTML=false``SortKeys=false`等)。行为与具有相应配置的 `encoding/json` 一致,一些选项,如 `SortKeys=false` 将无效。
- `ConfigStd`: 在支持 sonic 的环境下与标准库兼容的配置(`EscapeHTML=true``SortKeys=true`等)。行为与 `encoding/json` 一致。
- `ConfigFastest`: 在支持 sonic 的环境下运行最快的配置(`NoQuoteTextMarshaler=true`)。行为与具有相应配置的 `encoding/json` 一致,某些选项将无效。
## 注意事项 ## 注意事项
@@ -477,18 +464,6 @@ go someFunc(user)
但是,`ast.Visitor` 并不是一个很易用的 API。你可能需要写大量的代码去实现自己的 `ast.Visitor`,并且需要在解析过程中仔细维护树的层级。如果你决定要使用这个 API请先仔细阅读 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) 中的注释。 但是,`ast.Visitor` 并不是一个很易用的 API。你可能需要写大量的代码去实现自己的 `ast.Visitor`,并且需要在解析过程中仔细维护树的层级。如果你决定要使用这个 API请先仔细阅读 [ast/visitor.go](https://github.com/bytedance/sonic/blob/main/ast/visitor.go) 中的注释。
### 缓冲区大小
Sonic在许多地方使用内存池如`encoder.Encode`, `ast.Node.MarshalJSON`等来提高性能,这可能会在服务器负载高时产生更多的内存使用(in-use)。参见[issue 614](https://github.com/bytedance/sonic/issues/614)。因此,我们引入了一些选项来让用户配置内存池的行为。参见[option](https://pkg.go.dev/github.com/bytedance/sonic@v1.11.9/option#pkg-variables)包。
### 更快的 JSON Skip
为了安全起见在跳过原始JSON 时sonic decoder 默认使用[FSM](native/skip_one.c)算法扫描来跳过同时校验 JSON。它相比[SIMD-searching-pair](native/skip_one_fast.c)算法跳过要慢得多(1~10倍)。如果用户有很多冗余的JSON值并且不需要严格验证JSON的正确性你可以启用以下选项:
- `Config.NoValidateSkipJSON`: 用于在解码时更快地跳过JSON例如未知字段`json.RawMessage`,不匹配的值和冗余的数组元素等
- `Config.NoValidateJSONMarshaler`: 编码JSON时避免验证JSON。封送拆收器
- `SearchOption.ValidateJSON`: 指示当`Get`时是否验证定位的JSON值
## 社区 ## 社区
Sonic 是 [CloudWeGo](https://www.cloudwego.io/) 下的一个子项目。我们致力于构建云原生生态系统。 Sonic 是 [CloudWeGo](https://www.cloudwego.io/) 下的一个子项目。我们致力于构建云原生生态系统。

View File

@@ -23,16 +23,6 @@ import (
`github.com/bytedance/sonic/internal/rt` `github.com/bytedance/sonic/internal/rt`
) )
const (
// UseStdJSON indicates you are using fallback implementation (encoding/json)
UseStdJSON = iota
// UseSonicJSON indicates you are using real sonic implementation
UseSonicJSON
)
// APIKind is the kind of API, 0 is std json, 1 is sonic.
const APIKind = apiKind
// Config is a combination of sonic/encoder.Options and sonic/decoder.Options // Config is a combination of sonic/encoder.Options and sonic/decoder.Options
type Config struct { type Config struct {
// EscapeHTML indicates encoder to escape all HTML characters // EscapeHTML indicates encoder to escape all HTML characters
@@ -77,7 +67,7 @@ type Config struct {
// CopyString indicates decoder to decode string values by copying instead of referring. // CopyString indicates decoder to decode string values by copying instead of referring.
CopyString bool CopyString bool
// ValidateString indicates decoder and encoder to validate string values: decoder will return errors // ValidateString indicates decoder and encoder to valid string values: decoder will return errors
// when unescaped control chars(\u0000-\u001f) in the string value of JSON. // when unescaped control chars(\u0000-\u001f) in the string value of JSON.
ValidateString bool ValidateString bool
@@ -85,18 +75,8 @@ type Config struct {
// after encoding the JSONMarshaler to JSON. // after encoding the JSONMarshaler to JSON.
NoValidateJSONMarshaler bool NoValidateJSONMarshaler bool
// NoValidateJSONSkip indicates the decoder should not validate the JSON value when skipping it,
// such as unknown-fields, mismatched-type, redundant elements..
NoValidateJSONSkip bool
// NoEncoderNewline indicates that the encoder should not add a newline after every message // NoEncoderNewline indicates that the encoder should not add a newline after every message
NoEncoderNewline bool NoEncoderNewline bool
// Encode Infinity or Nan float into `null`, instead of returning an error.
EncodeNullForInfOrNan bool
// CaseSensitive indicates that the decoder should not ignore the case of object keys.
CaseSensitive bool
} }
var ( var (
@@ -114,15 +94,15 @@ var (
// ConfigFastest is the fastest config of APIs, aiming at speed. // ConfigFastest is the fastest config of APIs, aiming at speed.
ConfigFastest = Config{ ConfigFastest = Config{
NoQuoteTextMarshaler: true,
NoValidateJSONMarshaler: true, NoValidateJSONMarshaler: true,
NoValidateJSONSkip: true,
}.Froze() }.Froze()
) )
// API is a binding of specific config. // API is a binding of specific config.
// This interface is inspired by github.com/json-iterator/go, // This interface is inspired by github.com/json-iterator/go,
// and has same behaviors under equivalent config. // and has same behaviors under equavilent config.
type API interface { type API interface {
// MarshalToString returns the JSON encoding string of v // MarshalToString returns the JSON encoding string of v
MarshalToString(v interface{}) (string, error) MarshalToString(v interface{}) (string, error)
@@ -177,13 +157,6 @@ func Marshal(val interface{}) ([]byte, error) {
return ConfigDefault.Marshal(val) return ConfigDefault.Marshal(val)
} }
// MarshalIndent is like Marshal but applies Indent to format the output.
// Each JSON element in the output will begin on a new line beginning with prefix
// followed by one or more copies of indent according to the indentation nesting.
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
return ConfigDefault.MarshalIndent(v, prefix, indent)
}
// MarshalString returns the JSON encoding string of v. // MarshalString returns the JSON encoding string of v.
func MarshalString(val interface{}) (string, error) { func MarshalString(val interface{}) (string, error) {
return ConfigDefault.MarshalToString(val) return ConfigDefault.MarshalToString(val)
@@ -216,14 +189,6 @@ func Get(src []byte, path ...interface{}) (ast.Node, error) {
return GetCopyFromString(rt.Mem2Str(src), path...) return GetCopyFromString(rt.Mem2Str(src), path...)
} }
//GetWithOptions searches and locates the given path from src json,
// with specific options of ast.Searcher
func GetWithOptions(src []byte, opts ast.SearchOptions, path ...interface{}) (ast.Node, error) {
s := ast.NewSearcher(rt.Mem2Str(src))
s.SearchOptions = opts
return s.GetByPath(path...)
}
// GetFromString is same with Get except src is string. // GetFromString is same with Get except src is string.
// //
// WARNING: The returned JSON is **Referenced** from the input. // WARNING: The returned JSON is **Referenced** from the input.

View File

@@ -1,4 +1,4 @@
// +build !amd64,!arm64 go1.26 !go1.17 arm64,!go1.20 // +build !amd64,!arm64 go1.23 !go1.16 arm64,!go1.20
/* /*
* Copyright 2022 ByteDance Inc. * Copyright 2022 ByteDance Inc.
@@ -23,17 +23,28 @@ import (
`unicode/utf8` `unicode/utf8`
`github.com/bytedance/sonic/internal/native/types` `github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/compat` `github.com/bytedance/sonic/internal/rt`
) )
func init() { func init() {
compat.Warn("sonic/ast") println("WARNING:(ast) sonic only supports Go1.16~1.22, but your environment is not suitable")
} }
func quote(buf *[]byte, val string) { func quote(buf *[]byte, val string) {
quoteString(buf, val) quoteString(buf, val)
} }
// unquote unescapes a internal JSON string (it doesn't count quotas at the begining and end)
func unquote(src string) (string, types.ParsingError) {
sp := rt.IndexChar(src, -1)
out, ok := unquoteBytes(rt.BytesFrom(sp, len(src)+2, len(src)+2))
if !ok {
return "", types.ERR_INVALID_ESCAPE
}
return rt.Mem2Str(out), 0
}
func (self *Parser) decodeValue() (val types.JsonState) { func (self *Parser) decodeValue() (val types.JsonState) {
e, v := decodeValue(self.s, self.p, self.dbuf == nil) e, v := decodeValue(self.s, self.p, self.dbuf == nil)
if e < 0 { if e < 0 {

View File

@@ -17,35 +17,36 @@
package ast package ast
import ( import (
"encoding/base64" `encoding/base64`
"runtime" `runtime`
"strconv" `strconv`
"unsafe" `unsafe`
"github.com/bytedance/sonic/internal/native/types" `github.com/bytedance/sonic/internal/native/types`
"github.com/bytedance/sonic/internal/rt" `github.com/bytedance/sonic/internal/rt`
"github.com/bytedance/sonic/internal/utils"
"github.com/bytedance/sonic/unquote"
) )
const _blankCharsMask = (1 << ' ') | (1 << '\t') | (1 << '\r') | (1 << '\n')
var bytesNull = []byte("null")
const ( const (
strNull = "null" bytesNull = "null"
bytesTrue = "true" bytesTrue = "true"
bytesFalse = "false" bytesFalse = "false"
bytesObject = "{}" bytesObject = "{}"
bytesArray = "[]" bytesArray = "[]"
) )
func isSpace(c byte) bool {
return (int(1<<c) & _blankCharsMask) != 0
}
//go:nocheckptr //go:nocheckptr
func skipBlank(src string, pos int) int { func skipBlank(src string, pos int) int {
se := uintptr(rt.IndexChar(src, len(src))) se := uintptr(rt.IndexChar(src, len(src)))
sp := uintptr(rt.IndexChar(src, pos)) sp := uintptr(rt.IndexChar(src, pos))
for sp < se { for sp < se {
if !utils.IsSpace(*(*byte)(unsafe.Pointer(sp))) { if !isSpace(*(*byte)(unsafe.Pointer(sp))) {
break break
} }
sp += 1 sp += 1
@@ -62,7 +63,7 @@ func decodeNull(src string, pos int) (ret int) {
if ret > len(src) { if ret > len(src) {
return -int(types.ERR_EOF) return -int(types.ERR_EOF)
} }
if src[pos:ret] == strNull { if src[pos:ret] == bytesNull {
return ret return ret
} else { } else {
return -int(types.ERR_INVALID_CHAR) return -int(types.ERR_INVALID_CHAR)
@@ -102,13 +103,13 @@ func decodeString(src string, pos int) (ret int, v string) {
return ret, v return ret, v
} }
result, err := unquote.String(src[pos:ret]) vv, ok := unquoteBytes(rt.Str2Mem(src[pos:ret]))
if err != 0 { if !ok {
return -int(types.ERR_INVALID_CHAR), "" return -int(types.ERR_INVALID_CHAR), ""
} }
runtime.KeepAlive(src) runtime.KeepAlive(src)
return ret, result return ret, rt.Mem2Str(vv)
} }
func decodeBinary(src string, pos int) (ret int, v []byte) { func decodeBinary(src string, pos int) (ret int, v []byte) {
@@ -286,7 +287,67 @@ func decodeValue(src string, pos int, skipnum bool) (ret int, v types.JsonState)
//go:nocheckptr //go:nocheckptr
func skipNumber(src string, pos int) (ret int) { func skipNumber(src string, pos int) (ret int) {
return utils.SkipNumber(src, pos) sp := uintptr(rt.IndexChar(src, pos))
se := uintptr(rt.IndexChar(src, len(src)))
if uintptr(sp) >= se {
return -int(types.ERR_EOF)
}
if c := *(*byte)(unsafe.Pointer(sp)); c == '-' {
sp += 1
}
ss := sp
var pointer bool
var exponent bool
var lastIsDigit bool
var nextNeedDigit = true
for ; sp < se; sp += uintptr(1) {
c := *(*byte)(unsafe.Pointer(sp))
if isDigit(c) {
lastIsDigit = true
nextNeedDigit = false
continue
} else if nextNeedDigit {
return -int(types.ERR_INVALID_CHAR)
} else if c == '.' {
if !lastIsDigit || pointer || exponent || sp == ss {
return -int(types.ERR_INVALID_CHAR)
}
pointer = true
lastIsDigit = false
nextNeedDigit = true
continue
} else if c == 'e' || c == 'E' {
if !lastIsDigit || exponent {
return -int(types.ERR_INVALID_CHAR)
}
if sp == se-1 {
return -int(types.ERR_EOF)
}
exponent = true
lastIsDigit = false
nextNeedDigit = false
continue
} else if c == '-' || c == '+' {
if prev := *(*byte)(unsafe.Pointer(sp - 1)); prev != 'e' && prev != 'E' {
return -int(types.ERR_INVALID_CHAR)
}
lastIsDigit = false
nextNeedDigit = true
continue
} else {
break
}
}
if nextNeedDigit {
return -int(types.ERR_EOF)
}
runtime.KeepAlive(src)
return int(uintptr(sp) - uintptr((*rt.GoString)(unsafe.Pointer(&src)).Ptr))
} }
//go:nocheckptr //go:nocheckptr
@@ -544,7 +605,7 @@ func _DecodeString(src string, pos int, needEsc bool, validStr bool) (v string,
return str, p.p, true return str, p.p, true
} }
/* unquote the string */ /* unquote the string */
out, err := unquote.String(str) out, err := unquote(str)
/* check for errors */ /* check for errors */
if err != 0 { if err != 0 {
return "", -int(err), true return "", -int(err), true

View File

@@ -17,12 +17,12 @@
package ast package ast
import ( import (
"sync" `sync`
"unicode/utf8" `unicode/utf8`
)
"github.com/bytedance/gopkg/lang/dirtmake" const (
"github.com/bytedance/sonic/internal/rt" _MaxBuffer = 1024 // 1KB buffer size
"github.com/bytedance/sonic/option"
) )
func quoteString(e *[]byte, s string) { func quoteString(e *[]byte, s string) {
@@ -30,7 +30,7 @@ func quoteString(e *[]byte, s string) {
start := 0 start := 0
for i := 0; i < len(s); { for i := 0; i < len(s); {
if b := s[i]; b < utf8.RuneSelf { if b := s[i]; b < utf8.RuneSelf {
if rt.SafeSet[b] { if safeSet[b] {
i++ i++
continue continue
} }
@@ -54,8 +54,8 @@ func quoteString(e *[]byte, s string) {
// user-controlled strings are rendered into JSON // user-controlled strings are rendered into JSON
// and served to some browsers. // and served to some browsers.
*e = append(*e, `u00`...) *e = append(*e, `u00`...)
*e = append(*e, rt.Hex[b>>4]) *e = append(*e, hex[b>>4])
*e = append(*e, rt.Hex[b&0xF]) *e = append(*e, hex[b&0xF])
} }
i++ i++
start = i start = i
@@ -76,7 +76,7 @@ func quoteString(e *[]byte, s string) {
*e = append(*e, s[start:i]...) *e = append(*e, s[start:i]...)
} }
*e = append(*e, `\u202`...) *e = append(*e, `\u202`...)
*e = append(*e, rt.Hex[c&0xF]) *e = append(*e, hex[c&0xF])
i += size i += size
start = i start = i
continue continue
@@ -92,29 +92,16 @@ func quoteString(e *[]byte, s string) {
var bytesPool = sync.Pool{} var bytesPool = sync.Pool{}
func (self *Node) MarshalJSON() ([]byte, error) { func (self *Node) MarshalJSON() ([]byte, error) {
if self == nil {
return bytesNull, nil
}
// fast path for raw node
if self.isRaw() {
return rt.Str2Mem(self.toString()), nil
}
buf := newBuffer() buf := newBuffer()
err := self.encode(buf) err := self.encode(buf)
if err != nil { if err != nil {
freeBuffer(buf) freeBuffer(buf)
return nil, err return nil, err
} }
var ret []byte
if !rt.CanSizeResue(cap(*buf)) { ret := make([]byte, len(*buf))
ret = *buf
} else {
ret = dirtmake.Bytes(len(*buf), len(*buf))
copy(ret, *buf) copy(ret, *buf)
freeBuffer(buf) freeBuffer(buf)
}
return ret, err return ret, err
} }
@@ -122,24 +109,21 @@ func newBuffer() *[]byte {
if ret := bytesPool.Get(); ret != nil { if ret := bytesPool.Get(); ret != nil {
return ret.(*[]byte) return ret.(*[]byte)
} else { } else {
buf := make([]byte, 0, option.DefaultAstBufferSize) buf := make([]byte, 0, _MaxBuffer)
return &buf return &buf
} }
} }
func freeBuffer(buf *[]byte) { func freeBuffer(buf *[]byte) {
if !rt.CanSizeResue(cap(*buf)) {
return
}
*buf = (*buf)[:0] *buf = (*buf)[:0]
bytesPool.Put(buf) bytesPool.Put(buf)
} }
func (self *Node) encode(buf *[]byte) error { func (self *Node) encode(buf *[]byte) error {
if self.isRaw() { if self.IsRaw() {
return self.encodeRaw(buf) return self.encodeRaw(buf)
} }
switch int(self.itype()) { switch self.Type() {
case V_NONE : return ErrNotExist case V_NONE : return ErrNotExist
case V_ERROR : return self.Check() case V_ERROR : return self.Check()
case V_NULL : return self.encodeNull(buf) case V_NULL : return self.encodeNull(buf)
@@ -155,21 +139,16 @@ func (self *Node) encode(buf *[]byte) error {
} }
func (self *Node) encodeRaw(buf *[]byte) error { func (self *Node) encodeRaw(buf *[]byte) error {
lock := self.rlock() raw, err := self.Raw()
if !self.isRaw() { if err != nil {
self.runlock() return err
return self.encode(buf)
}
raw := self.toString()
if lock {
self.runlock()
} }
*buf = append(*buf, raw...) *buf = append(*buf, raw...)
return nil return nil
} }
func (self *Node) encodeNull(buf *[]byte) error { func (self *Node) encodeNull(buf *[]byte) error {
*buf = append(*buf, strNull...) *buf = append(*buf, bytesNull...)
return nil return nil
} }

View File

@@ -17,10 +17,6 @@ func newError(err types.ParsingError, msg string) *Node {
} }
} }
func newErrorPair(err SyntaxError) *Pair {
return &Pair{0, "", *newSyntaxError(err)}
}
// Error returns error message if the node is invalid // Error returns error message if the node is invalid
func (self Node) Error() string { func (self Node) Error() string {
if self.t != V_ERROR { if self.t != V_ERROR {
@@ -83,7 +79,7 @@ func (self SyntaxError) description() string {
/* check for empty source */ /* check for empty source */
if self.Src == "" { if self.Src == "" {
return fmt.Sprintf("no sources available, the input json is empty: %#v", self) return fmt.Sprintf("no sources available: %#v", self)
} }
/* prevent slicing before the beginning */ /* prevent slicing before the beginning */

View File

@@ -17,29 +17,19 @@
package ast package ast
import ( import (
"fmt" `fmt`
"github.com/bytedance/sonic/internal/caching" `github.com/bytedance/sonic/internal/native/types`
"github.com/bytedance/sonic/internal/native/types"
) )
type Pair struct { type Pair struct {
hash uint64
Key string Key string
Value Node Value Node
} }
func NewPair(key string, val Node) Pair {
return Pair{
hash: caching.StrHash(key),
Key: key,
Value: val,
}
}
// Values returns iterator for array's children traversal // Values returns iterator for array's children traversal
func (self *Node) Values() (ListIterator, error) { func (self *Node) Values() (ListIterator, error) {
if err := self.should(types.V_ARRAY); err != nil { if err := self.should(types.V_ARRAY, "an array"); err != nil {
return ListIterator{}, err return ListIterator{}, err
} }
return self.values(), nil return self.values(), nil
@@ -51,7 +41,7 @@ func (self *Node) values() ListIterator {
// Properties returns iterator for object's children traversal // Properties returns iterator for object's children traversal
func (self *Node) Properties() (ObjectIterator, error) { func (self *Node) Properties() (ObjectIterator, error) {
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return ObjectIterator{}, err return ObjectIterator{}, err
} }
return self.properties(), nil return self.properties(), nil
@@ -173,14 +163,11 @@ type Scanner func(path Sequence, node *Node) bool
// ForEach scans one V_OBJECT node's children from JSON head to tail, // ForEach scans one V_OBJECT node's children from JSON head to tail,
// and pass the Sequence and Node of corresponding JSON value. // and pass the Sequence and Node of corresponding JSON value.
// //
// Especially, if the node is not V_ARRAY or V_OBJECT, // Especailly, if the node is not V_ARRAY or V_OBJECT,
// the node itself will be returned and Sequence.Index == -1. // the node itself will be returned and Sequence.Index == -1.
// //
// NOTICE: An unset node WON'T trigger sc, but its index still counts into Path.Index // NOTICE: A unsetted node WON'T trigger sc, but its index still counts into Path.Index
func (self *Node) ForEach(sc Scanner) error { func (self *Node) ForEach(sc Scanner) error {
if err := self.checkRaw(); err != nil {
return err
}
switch self.itype() { switch self.itype() {
case types.V_ARRAY: case types.V_ARRAY:
iter, err := self.Values() iter, err := self.Values()

View File

@@ -17,15 +17,13 @@
package ast package ast
import ( import (
"encoding/json" `encoding/json`
"fmt" `fmt`
"strconv" `strconv`
"sync" `unsafe`
"sync/atomic"
"unsafe"
"github.com/bytedance/sonic/internal/native/types" `github.com/bytedance/sonic/internal/native/types`
"github.com/bytedance/sonic/internal/rt" `github.com/bytedance/sonic/internal/rt`
) )
const ( const (
@@ -58,20 +56,19 @@ type Node struct {
t types.ValueType t types.ValueType
l uint l uint
p unsafe.Pointer p unsafe.Pointer
m *sync.RWMutex
} }
// UnmarshalJSON is just an adapter to json.Unmarshaler. // UnmarshalJSON is just an adapter to json.Unmarshaler.
// If you want better performance, use Searcher.GetByPath() directly // If you want better performance, use Searcher.GetByPath() directly
func (self *Node) UnmarshalJSON(data []byte) (err error) { func (self *Node) UnmarshalJSON(data []byte) (err error) {
*self = newRawNode(rt.Mem2Str(data), switchRawType(data[0]), false) *self = NewRaw(string(data))
return nil return self.Check()
} }
/** Node Type Accessor **/ /** Node Type Accessor **/
// Type returns json type represented by the node // Type returns json type represented by the node
// It will be one of bellows: // It will be one of belows:
// V_NONE = 0 (empty node, key not exists) // V_NONE = 0 (empty node, key not exists)
// V_ERROR = 1 (error node) // V_ERROR = 1 (error node)
// V_NULL = 2 (json value `null`, key exists) // V_NULL = 2 (json value `null`, key exists)
@@ -82,39 +79,17 @@ func (self *Node) UnmarshalJSON(data []byte) (err error) {
// V_STRING = 7 (json value string) // V_STRING = 7 (json value string)
// V_NUMBER = 33 (json value number ) // V_NUMBER = 33 (json value number )
// V_ANY = 34 (golang interface{}) // V_ANY = 34 (golang interface{})
//
// Deprecated: not concurrent safe. Use TypeSafe instead
func (self Node) Type() int { func (self Node) Type() int {
return int(self.t & _MASK_LAZY & _MASK_RAW) return int(self.t & _MASK_LAZY & _MASK_RAW)
} }
// Type concurrently-safe returns json type represented by the node func (self Node) itype() types.ValueType {
// It will be one of bellows:
// V_NONE = 0 (empty node, key not exists)
// V_ERROR = 1 (error node)
// V_NULL = 2 (json value `null`, key exists)
// V_TRUE = 3 (json value `true`)
// V_FALSE = 4 (json value `false`)
// V_ARRAY = 5 (json value array)
// V_OBJECT = 6 (json value object)
// V_STRING = 7 (json value string)
// V_NUMBER = 33 (json value number )
// V_ANY = 34 (golang interface{})
func (self *Node) TypeSafe() int {
return int(self.loadt() & _MASK_LAZY & _MASK_RAW)
}
func (self *Node) itype() types.ValueType {
return self.t & _MASK_LAZY & _MASK_RAW return self.t & _MASK_LAZY & _MASK_RAW
} }
// Exists returns false only if the self is nil or empty node V_NONE // Exists returns false only if the self is nil or empty node V_NONE
func (self *Node) Exists() bool { func (self *Node) Exists() bool {
if self == nil { return self.Valid() && self.t != _V_NONE
return false
}
t := self.loadt()
return t != V_ERROR && t != _V_NONE
} }
// Valid reports if self is NOT V_ERROR or nil // Valid reports if self is NOT V_ERROR or nil
@@ -122,7 +97,7 @@ func (self *Node) Valid() bool {
if self == nil { if self == nil {
return false return false
} }
return self.loadt() != V_ERROR return self.t != V_ERROR
} }
// Check checks if the node itself is valid, and return: // Check checks if the node itself is valid, and return:
@@ -131,31 +106,24 @@ func (self *Node) Valid() bool {
func (self *Node) Check() error { func (self *Node) Check() error {
if self == nil { if self == nil {
return ErrNotExist return ErrNotExist
} else if self.loadt() != V_ERROR { } else if self.t != V_ERROR {
return nil return nil
} else { } else {
return self return self
} }
} }
// isRaw returns true if node's underlying value is raw json
//
// Deprecated: not concurrent safe
func (self Node) IsRaw() bool {
return self.t & _V_RAW != 0
}
// IsRaw returns true if node's underlying value is raw json // IsRaw returns true if node's underlying value is raw json
func (self *Node) isRaw() bool { func (self Node) IsRaw() bool {
return self.loadt() & _V_RAW != 0 return self.t&_V_RAW != 0
} }
func (self *Node) isLazy() bool { func (self *Node) isLazy() bool {
return self != nil && self.t & _V_LAZY != 0 return self != nil && self.t&_V_LAZY != 0
} }
func (self *Node) isAny() bool { func (self *Node) isAny() bool {
return self != nil && self.loadt() == _V_ANY return self != nil && self.t == _V_ANY
} }
/** Simple Value Methods **/ /** Simple Value Methods **/
@@ -165,26 +133,18 @@ func (self *Node) Raw() (string, error) {
if self == nil { if self == nil {
return "", ErrNotExist return "", ErrNotExist
} }
lock := self.rlock() if !self.IsRaw() {
if !self.isRaw() {
if lock {
self.runlock()
}
buf, err := self.MarshalJSON() buf, err := self.MarshalJSON()
return rt.Mem2Str(buf), err return rt.Mem2Str(buf), err
} }
ret := self.toString() return self.toString(), nil
if lock {
self.runlock()
}
return ret, nil
} }
func (self *Node) checkRaw() error { func (self *Node) checkRaw() error {
if err := self.Check(); err != nil { if err := self.Check(); err != nil {
return err return err
} }
if self.isRaw() { if self.IsRaw() {
self.parseRaw(false) self.parseRaw(false)
} }
return self.Check() return self.Check()
@@ -440,7 +400,7 @@ func (self *Node) String() (string, error) {
} }
} }
// StrictString returns string value (unescaped), including V_STRING, V_ANY of string. // StrictString returns string value (unescaped), includeing V_STRING, V_ANY of string.
// In other cases, it will return empty string. // In other cases, it will return empty string.
func (self *Node) StrictString() (string, error) { func (self *Node) StrictString() (string, error) {
if err := self.checkRaw(); err != nil { if err := self.checkRaw(); err != nil {
@@ -509,24 +469,7 @@ func (self *Node) Float64() (float64, error) {
} }
} }
func (self *Node) StrictBool() (bool, error) { // Float64 exports underlying float64 value, includeing V_NUMBER, V_ANY
if err := self.checkRaw(); err!= nil {
return false, err
}
switch self.t {
case types.V_TRUE : return true, nil
case types.V_FALSE : return false, nil
case _V_ANY :
any := self.packAny()
switch v := any.(type) {
case bool : return v, nil
default : return false, ErrUnsupportType
}
default : return false, ErrUnsupportType
}
}
// Float64 exports underlying float64 value, including V_NUMBER, V_ANY
func (self *Node) StrictFloat64() (float64, error) { func (self *Node) StrictFloat64() (float64, error) {
if err := self.checkRaw(); err != nil { if err := self.checkRaw(); err != nil {
return 0.0, err return 0.0, err
@@ -544,7 +487,7 @@ func (self *Node) StrictFloat64() (float64, error) {
} }
} }
/** Sequential Value Methods **/ /** Sequencial Value Methods **/
// Len returns children count of a array|object|string node // Len returns children count of a array|object|string node
// WARN: For partially loaded node, it also works but only counts the parsed children // WARN: For partially loaded node, it also works but only counts the parsed children
@@ -561,7 +504,7 @@ func (self *Node) Len() (int, error) {
} }
} }
func (self *Node) len() int { func (self Node) len() int {
return int(self.l) return int(self.l)
} }
@@ -584,7 +527,7 @@ func (self *Node) Cap() (int, error) {
// //
// If self is V_NONE or V_NULL, it becomes V_OBJECT and sets the node at the key. // If self is V_NONE or V_NULL, it becomes V_OBJECT and sets the node at the key.
func (self *Node) Set(key string, node Node) (bool, error) { func (self *Node) Set(key string, node Node) (bool, error) {
if err := self.checkRaw(); err != nil { if err := self.Check(); err != nil {
return false, err return false, err
} }
if err := node.Check(); err != nil { if err := node.Check(); err != nil {
@@ -592,7 +535,7 @@ func (self *Node) Set(key string, node Node) (bool, error) {
} }
if self.t == _V_NONE || self.t == types.V_NULL { if self.t == _V_NONE || self.t == types.V_NULL {
*self = NewObject([]Pair{NewPair(key, node)}) *self = NewObject([]Pair{{key, node}})
return false, nil return false, nil
} else if self.itype() != types.V_OBJECT { } else if self.itype() != types.V_OBJECT {
return false, ErrUnsupportType return false, ErrUnsupportType
@@ -606,7 +549,7 @@ func (self *Node) Set(key string, node Node) (bool, error) {
*self = newObject(new(linkedPairs)) *self = newObject(new(linkedPairs))
} }
s := (*linkedPairs)(self.p) s := (*linkedPairs)(self.p)
s.Push(NewPair(key, node)) s.Push(Pair{key, node})
self.l++ self.l++
return false, nil return false, nil
@@ -625,10 +568,10 @@ func (self *Node) SetAny(key string, val interface{}) (bool, error) {
// Unset REMOVE (soft) the node of given key under object parent, and reports if the key has existed. // Unset REMOVE (soft) the node of given key under object parent, and reports if the key has existed.
func (self *Node) Unset(key string) (bool, error) { func (self *Node) Unset(key string) (bool, error) {
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return false, err return false, err
} }
// NOTICE: must get accurate length before deduct // NOTICE: must get acurate length before deduct
if err := self.skipAllKey(); err != nil { if err := self.skipAllKey(); err != nil {
return false, err return false, err
} }
@@ -646,7 +589,7 @@ func (self *Node) Unset(key string) (bool, error) {
// //
// The index must be within self's children. // The index must be within self's children.
func (self *Node) SetByIndex(index int, node Node) (bool, error) { func (self *Node) SetByIndex(index int, node Node) (bool, error) {
if err := self.checkRaw(); err != nil { if err := self.Check(); err != nil {
return false, err return false, err
} }
if err := node.Check(); err != nil { if err := node.Check(); err != nil {
@@ -674,7 +617,7 @@ func (self *Node) SetAnyByIndex(index int, val interface{}) (bool, error) {
return self.SetByIndex(index, NewAny(val)) return self.SetByIndex(index, NewAny(val))
} }
// UnsetByIndex REMOVE (softly) the node of given index. // UnsetByIndex REOMVE (softly) the node of given index.
// //
// WARN: this will change address of elements, which is a dangerous action. // WARN: this will change address of elements, which is a dangerous action.
// Use Unset() for object or Pop() for array instead. // Use Unset() for object or Pop() for array instead.
@@ -726,7 +669,7 @@ func (self *Node) UnsetByIndex(index int) (bool, error) {
// //
// If self is V_NONE or V_NULL, it becomes V_ARRAY and sets the node at index 0. // If self is V_NONE or V_NULL, it becomes V_ARRAY and sets the node at index 0.
func (self *Node) Add(node Node) error { func (self *Node) Add(node Node) error {
if err := self.checkRaw(); err != nil { if err := self.Check(); err != nil {
return err return err
} }
@@ -734,7 +677,7 @@ func (self *Node) Add(node Node) error {
*self = NewArray([]Node{node}) *self = NewArray([]Node{node})
return nil return nil
} }
if err := self.should(types.V_ARRAY); err != nil { if err := self.should(types.V_ARRAY, "an array"); err != nil {
return err return err
} }
@@ -793,11 +736,11 @@ func (self *Node) Pop() error {
} }
// Move moves the child at src index to dst index, // Move moves the child at src index to dst index,
// meanwhile slides siblings from src+1 to dst. // meanwhile slides sliblings from src+1 to dst.
// //
// WARN: this will change address of elements, which is a dangerous action. // WARN: this will change address of elements, which is a dangerous action.
func (self *Node) Move(dst, src int) error { func (self *Node) Move(dst, src int) error {
if err := self.should(types.V_ARRAY); err != nil { if err := self.should(types.V_ARRAY, "an array"); err != nil {
return err return err
} }
@@ -833,7 +776,7 @@ func (self *Node) Move(dst, src int) error {
return nil return nil
} }
// AddAny wraps val with V_ANY node, and Add() the node. // SetAny wraps val with V_ANY node, and Add() the node.
func (self *Node) AddAny(val interface{}) error { func (self *Node) AddAny(val interface{}) error {
return self.Add(NewAny(val)) return self.Add(NewAny(val))
} }
@@ -869,7 +812,7 @@ func (self *Node) GetByPath(path ...interface{}) *Node {
// Get loads given key of an object node on demands // Get loads given key of an object node on demands
func (self *Node) Get(key string) *Node { func (self *Node) Get(key string) *Node {
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return unwrapError(err) return unwrapError(err)
} }
n, _ := self.skipKey(key) n, _ := self.skipKey(key)
@@ -902,14 +845,14 @@ func (self *Node) Index(idx int) *Node {
// IndexPair indexies pair at given idx, // IndexPair indexies pair at given idx,
// node type MUST be either V_OBJECT // node type MUST be either V_OBJECT
func (self *Node) IndexPair(idx int) *Pair { func (self *Node) IndexPair(idx int) *Pair {
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return nil return nil
} }
return self.skipIndexPair(idx) return self.skipIndexPair(idx)
} }
func (self *Node) indexOrGet(idx int, key string) (*Node, int) { func (self *Node) indexOrGet(idx int, key string) (*Node, int) {
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return unwrapError(err), idx return unwrapError(err), idx
} }
@@ -946,16 +889,16 @@ func (self *Node) Map() (map[string]interface{}, error) {
return nil, ErrUnsupportType return nil, ErrUnsupportType
} }
} }
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return nil, err return nil, err
} }
if err := self.loadAllKey(false); err != nil { if err := self.loadAllKey(); err != nil {
return nil, err return nil, err
} }
return self.toGenericObject() return self.toGenericObject()
} }
// MapUseNumber loads all keys of an object node, with numeric nodes cast to json.Number // MapUseNumber loads all keys of an object node, with numeric nodes casted to json.Number
func (self *Node) MapUseNumber() (map[string]interface{}, error) { func (self *Node) MapUseNumber() (map[string]interface{}, error) {
if self.isAny() { if self.isAny() {
any := self.packAny() any := self.packAny()
@@ -965,16 +908,16 @@ func (self *Node) MapUseNumber() (map[string]interface{}, error) {
return nil, ErrUnsupportType return nil, ErrUnsupportType
} }
} }
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return nil, err return nil, err
} }
if err := self.loadAllKey(false); err != nil { if err := self.loadAllKey(); err != nil {
return nil, err return nil, err
} }
return self.toGenericObjectUseNumber() return self.toGenericObjectUseNumber()
} }
// MapUseNode scans both parsed and non-parsed children nodes, // MapUseNode scans both parsed and non-parsed chidren nodes,
// and map them by their keys // and map them by their keys
func (self *Node) MapUseNode() (map[string]Node, error) { func (self *Node) MapUseNode() (map[string]Node, error) {
if self.isAny() { if self.isAny() {
@@ -985,7 +928,7 @@ func (self *Node) MapUseNode() (map[string]Node, error) {
return nil, ErrUnsupportType return nil, ErrUnsupportType
} }
} }
if err := self.should(types.V_OBJECT); err != nil { if err := self.should(types.V_OBJECT, "an object"); err != nil {
return nil, err return nil, err
} }
if err := self.skipAllKey(); err != nil { if err := self.skipAllKey(); err != nil {
@@ -1091,16 +1034,16 @@ func (self *Node) Array() ([]interface{}, error) {
return nil, ErrUnsupportType return nil, ErrUnsupportType
} }
} }
if err := self.should(types.V_ARRAY); err != nil { if err := self.should(types.V_ARRAY, "an array"); err != nil {
return nil, err return nil, err
} }
if err := self.loadAllIndex(false); err != nil { if err := self.loadAllIndex(); err != nil {
return nil, err return nil, err
} }
return self.toGenericArray() return self.toGenericArray()
} }
// ArrayUseNumber loads all indexes of an array node, with numeric nodes cast to json.Number // ArrayUseNumber loads all indexes of an array node, with numeric nodes casted to json.Number
func (self *Node) ArrayUseNumber() ([]interface{}, error) { func (self *Node) ArrayUseNumber() ([]interface{}, error) {
if self.isAny() { if self.isAny() {
any := self.packAny() any := self.packAny()
@@ -1110,16 +1053,16 @@ func (self *Node) ArrayUseNumber() ([]interface{}, error) {
return nil, ErrUnsupportType return nil, ErrUnsupportType
} }
} }
if err := self.should(types.V_ARRAY); err != nil { if err := self.should(types.V_ARRAY, "an array"); err != nil {
return nil, err return nil, err
} }
if err := self.loadAllIndex(false); err != nil { if err := self.loadAllIndex(); err != nil {
return nil, err return nil, err
} }
return self.toGenericArrayUseNumber() return self.toGenericArrayUseNumber()
} }
// ArrayUseNode copies both parsed and non-parsed children nodes, // ArrayUseNode copys both parsed and non-parsed chidren nodes,
// and indexes them by original order // and indexes them by original order
func (self *Node) ArrayUseNode() ([]Node, error) { func (self *Node) ArrayUseNode() ([]Node, error) {
if self.isAny() { if self.isAny() {
@@ -1130,7 +1073,7 @@ func (self *Node) ArrayUseNode() ([]Node, error) {
return nil, ErrUnsupportType return nil, ErrUnsupportType
} }
} }
if err := self.should(types.V_ARRAY); err != nil { if err := self.should(types.V_ARRAY, "an array"); err != nil {
return nil, err return nil, err
} }
if err := self.skipAllIndex(); err != nil { if err := self.skipAllIndex(); err != nil {
@@ -1164,9 +1107,9 @@ func (self *Node) unsafeArray() (*linkedNodes, error) {
return (*linkedNodes)(self.p), nil return (*linkedNodes)(self.p), nil
} }
// Interface loads all children under all paths from this node, // Interface loads all children under all pathes from this node,
// and converts itself as generic type. // and converts itself as generic type.
// WARN: all numeric nodes are cast to float64 // WARN: all numberic nodes are casted to float64
func (self *Node) Interface() (interface{}, error) { func (self *Node) Interface() (interface{}, error) {
if err := self.checkRaw(); err != nil { if err := self.checkRaw(); err != nil {
return nil, err return nil, err
@@ -1186,12 +1129,12 @@ func (self *Node) Interface() (interface{}, error) {
} }
return v, nil return v, nil
case _V_ARRAY_LAZY : case _V_ARRAY_LAZY :
if err := self.loadAllIndex(false); err != nil { if err := self.loadAllIndex(); err != nil {
return nil, err return nil, err
} }
return self.toGenericArray() return self.toGenericArray()
case _V_OBJECT_LAZY : case _V_OBJECT_LAZY :
if err := self.loadAllKey(false); err != nil { if err := self.loadAllKey(); err != nil {
return nil, err return nil, err
} }
return self.toGenericObject() return self.toGenericObject()
@@ -1210,7 +1153,7 @@ func (self *Node) packAny() interface{} {
} }
// InterfaceUseNumber works same with Interface() // InterfaceUseNumber works same with Interface()
// except numeric nodes are cast to json.Number // except numberic nodes are casted to json.Number
func (self *Node) InterfaceUseNumber() (interface{}, error) { func (self *Node) InterfaceUseNumber() (interface{}, error) {
if err := self.checkRaw(); err != nil { if err := self.checkRaw(); err != nil {
return nil, err return nil, err
@@ -1225,12 +1168,12 @@ func (self *Node) InterfaceUseNumber() (interface{}, error) {
case types.V_STRING : return self.toString(), nil case types.V_STRING : return self.toString(), nil
case _V_NUMBER : return self.toNumber(), nil case _V_NUMBER : return self.toNumber(), nil
case _V_ARRAY_LAZY : case _V_ARRAY_LAZY :
if err := self.loadAllIndex(false); err != nil { if err := self.loadAllIndex(); err != nil {
return nil, err return nil, err
} }
return self.toGenericArrayUseNumber() return self.toGenericArrayUseNumber()
case _V_OBJECT_LAZY : case _V_OBJECT_LAZY :
if err := self.loadAllKey(false); err != nil { if err := self.loadAllKey(); err != nil {
return nil, err return nil, err
} }
return self.toGenericObjectUseNumber() return self.toGenericObjectUseNumber()
@@ -1262,30 +1205,70 @@ func (self *Node) InterfaceUseNode() (interface{}, error) {
} }
} }
// LoadAll loads the node's children // LoadAll loads all the node's children and children's children as parsed.
// and ensure all its children can be READ concurrently (include its children's children) // After calling it, the node can be safely used on concurrency
func (self *Node) LoadAll() error { func (self *Node) LoadAll() error {
return self.Load() if self.IsRaw() {
self.parseRaw(true)
return self.Check()
}
switch self.itype() {
case types.V_ARRAY:
e := self.len()
if err := self.loadAllIndex(); err != nil {
return err
}
for i := 0; i < e; i++ {
n := self.nodeAt(i)
if n.IsRaw() {
n.parseRaw(true)
}
if err := n.Check(); err != nil {
return err
}
}
return nil
case types.V_OBJECT:
e := self.len()
if err := self.loadAllKey(); err != nil {
return err
}
for i := 0; i < e; i++ {
n := self.pairAt(i)
if n.Value.IsRaw() {
n.Value.parseRaw(true)
}
if err := n.Value.Check(); err != nil {
return err
}
}
return nil
default:
return self.Check()
}
} }
// Load loads the node's children as parsed. // Load loads the node's children as parsed.
// and ensure all its children can be READ concurrently (include its children's children) // After calling it, only the node itself can be used on concurrency (not include its children)
func (self *Node) Load() error { func (self *Node) Load() error {
if err := self.checkRaw(); err != nil {
return err
}
switch self.t { switch self.t {
case _V_ARRAY_LAZY: self.loadAllIndex(true) case _V_ARRAY_LAZY:
case _V_OBJECT_LAZY: self.loadAllKey(true) return self.skipAllIndex()
case V_ERROR: return self case _V_OBJECT_LAZY:
case V_NONE: return nil return self.skipAllKey()
default:
return self.Check()
} }
if self.m == nil {
self.m = new(sync.RWMutex)
}
return self.checkRaw()
} }
/**---------------------------------- Internal Helper Methods ----------------------------------**/ /**---------------------------------- Internal Helper Methods ----------------------------------**/
func (self *Node) should(t types.ValueType) error { func (self *Node) should(t types.ValueType, s string) error {
if err := self.checkRaw(); err != nil { if err := self.checkRaw(); err != nil {
return err return err
} }
@@ -1456,17 +1439,13 @@ func (self *Node) skipIndexPair(index int) *Pair {
return nil return nil
} }
func (self *Node) loadAllIndex(loadOnce bool) error { func (self *Node) loadAllIndex() error {
if !self.isLazy() { if !self.isLazy() {
return nil return nil
} }
var err types.ParsingError var err types.ParsingError
parser, stack := self.getParserAndArrayStack() parser, stack := self.getParserAndArrayStack()
if !loadOnce {
parser.noLazy = true parser.noLazy = true
} else {
parser.loadOnce = true
}
*self, err = parser.decodeArray(&stack.v) *self, err = parser.decodeArray(&stack.v)
if err != 0 { if err != 0 {
return parser.ExportError(err) return parser.ExportError(err)
@@ -1474,19 +1453,14 @@ func (self *Node) loadAllIndex(loadOnce bool) error {
return nil return nil
} }
func (self *Node) loadAllKey(loadOnce bool) error { func (self *Node) loadAllKey() error {
if !self.isLazy() { if !self.isLazy() {
return nil return nil
} }
var err types.ParsingError var err types.ParsingError
parser, stack := self.getParserAndObjectStack() parser, stack := self.getParserAndObjectStack()
if !loadOnce {
parser.noLazy = true parser.noLazy = true
*self, err = parser.decodeObject(&stack.v) *self, err = parser.decodeObject(&stack.v)
} else {
parser.loadOnce = true
*self, err = parser.decodeObject(&stack.v)
}
if err != 0 { if err != 0 {
return parser.ExportError(err) return parser.ExportError(err)
} }
@@ -1655,23 +1629,7 @@ func NewRaw(json string) Node {
if it == _V_NONE { if it == _V_NONE {
return Node{} return Node{}
} }
return newRawNode(parser.s[start:parser.p], it, false) return newRawNode(parser.s[start:parser.p], it)
}
// NewRawConcurrentRead creates a node of raw json, which can be READ
// (GetByPath/Get/Index/GetOrIndex/Int64/Bool/Float64/String/Number/Interface/Array/Map/Raw/MarshalJSON) concurrently.
// If the input json is invalid, NewRaw returns a error Node.
func NewRawConcurrentRead(json string) Node {
parser := NewParserObj(json)
start, err := parser.skip()
if err != 0 {
return *newError(err, err.Message())
}
it := switchRawType(parser.s[start])
if it == _V_NONE {
return Node{}
}
return newRawNode(parser.s[start:parser.p], it, true)
} }
// NewAny creates a node of type V_ANY if any's type isn't Node or *Node, // NewAny creates a node of type V_ANY if any's type isn't Node or *Node,
@@ -1695,7 +1653,7 @@ func NewBytes(src []byte) Node {
if len(src) == 0 { if len(src) == 0 {
panic("empty src bytes") panic("empty src bytes")
} }
out := rt.EncodeBase64ToString(src) out := encodeBase64(src)
return NewString(out) return NewString(out)
} }
@@ -1731,15 +1689,15 @@ func NewNumber(v string) Node {
} }
} }
func (node *Node) toNumber() json.Number { func (node Node) toNumber() json.Number {
return json.Number(rt.StrFrom(node.p, int64(node.l))) return json.Number(rt.StrFrom(node.p, int64(node.l)))
} }
func (self *Node) toString() string { func (self Node) toString() string {
return rt.StrFrom(self.p, int64(self.l)) return rt.StrFrom(self.p, int64(self.l))
} }
func (node *Node) toFloat64() (float64, error) { func (node Node) toFloat64() (float64, error) {
ret, err := node.toNumber().Float64() ret, err := node.toNumber().Float64()
if err != nil { if err != nil {
return 0, err return 0, err
@@ -1747,7 +1705,7 @@ func (node *Node) toFloat64() (float64, error) {
return ret, nil return ret, nil
} }
func (node *Node) toInt64() (int64, error) { func (node Node) toInt64() (int64, error) {
ret,err := node.toNumber().Int64() ret,err := node.toNumber().Int64()
if err != nil { if err != nil {
return 0, err return 0, err
@@ -1783,8 +1741,6 @@ func NewArray(v []Node) Node {
return newArray(s) return newArray(s)
} }
const _Threshold_Index = 16
func newArray(v *linkedNodes) Node { func newArray(v *linkedNodes) Node {
return Node{ return Node{
t: types.V_ARRAY, t: types.V_ARRAY,
@@ -1808,9 +1764,6 @@ func NewObject(v []Pair) Node {
} }
func newObject(v *linkedPairs) Node { func newObject(v *linkedPairs) Node {
if v.size > _Threshold_Index {
v.BuildIndex()
}
return Node{ return Node{
t: types.V_OBJECT, t: types.V_OBJECT,
l: uint(v.Len()), l: uint(v.Len()),
@@ -1819,42 +1772,53 @@ func newObject(v *linkedPairs) Node {
} }
func (self *Node) setObject(v *linkedPairs) { func (self *Node) setObject(v *linkedPairs) {
if v.size > _Threshold_Index {
v.BuildIndex()
}
self.t = types.V_OBJECT self.t = types.V_OBJECT
self.l = uint(v.Len()) self.l = uint(v.Len())
self.p = unsafe.Pointer(v) self.p = unsafe.Pointer(v)
} }
func (self *Node) parseRaw(full bool) { func newRawNode(str string, typ types.ValueType) Node {
lock := self.lock() return Node{
defer self.unlock() t: _V_RAW | typ,
if !self.isRaw() { p: rt.StrPtr(str),
return l: uint(len(str)),
} }
}
func (self *Node) parseRaw(full bool) {
raw := self.toString() raw := self.toString()
parser := NewParserObj(raw) parser := NewParserObj(raw)
var e types.ParsingError
if full { if full {
parser.noLazy = true parser.noLazy = true
*self, e = parser.Parse() parser.skipValue = false
} else if lock {
var n Node
parser.noLazy = true
parser.loadOnce = true
n, e = parser.Parse()
self.assign(n)
} else {
*self, e = parser.Parse()
} }
var e types.ParsingError
*self, e = parser.Parse()
if e != 0 { if e != 0 {
*self = *newSyntaxError(parser.syntaxError(e)) *self = *newSyntaxError(parser.syntaxError(e))
} }
} }
func (self *Node) assign(n Node) { var typeJumpTable = [256]types.ValueType{
self.l = n.l '"' : types.V_STRING,
self.p = n.p '-' : _V_NUMBER,
atomic.StoreInt64(&self.t, n.t) '0' : _V_NUMBER,
'1' : _V_NUMBER,
'2' : _V_NUMBER,
'3' : _V_NUMBER,
'4' : _V_NUMBER,
'5' : _V_NUMBER,
'6' : _V_NUMBER,
'7' : _V_NUMBER,
'8' : _V_NUMBER,
'9' : _V_NUMBER,
'[' : types.V_ARRAY,
'f' : types.V_FALSE,
'n' : types.V_NULL,
't' : types.V_TRUE,
'{' : types.V_OBJECT,
}
func switchRawType(c byte) types.ValueType {
return typeJumpTable[c]
} }

View File

@@ -17,18 +17,14 @@
package ast package ast
import ( import (
"fmt" `fmt`
"sync"
"sync/atomic"
"github.com/bytedance/sonic/internal/native/types" `github.com/bytedance/sonic/internal/native/types`
"github.com/bytedance/sonic/internal/rt" `github.com/bytedance/sonic/internal/rt`
"github.com/bytedance/sonic/internal/utils"
"github.com/bytedance/sonic/unquote"
) )
const ( const (
_DEFAULT_NODE_CAP int = 16 _DEFAULT_NODE_CAP int = 8
_APPEND_GROW_SHIFT = 1 _APPEND_GROW_SHIFT = 1
) )
@@ -49,7 +45,6 @@ type Parser struct {
p int p int
s string s string
noLazy bool noLazy bool
loadOnce bool
skipValue bool skipValue bool
dbuf *byte dbuf *byte
} }
@@ -65,7 +60,7 @@ func (self *Parser) delim() types.ParsingError {
return types.ERR_EOF return types.ERR_EOF
} }
/* check for the delimiter */ /* check for the delimtier */
if self.s[p] != ':' { if self.s[p] != ':' {
return types.ERR_INVALID_CHAR return types.ERR_INVALID_CHAR
} }
@@ -84,7 +79,7 @@ func (self *Parser) object() types.ParsingError {
return types.ERR_EOF return types.ERR_EOF
} }
/* check for the delimiter */ /* check for the delimtier */
if self.s[p] != '{' { if self.s[p] != '{' {
return types.ERR_INVALID_CHAR return types.ERR_INVALID_CHAR
} }
@@ -103,7 +98,7 @@ func (self *Parser) array() types.ParsingError {
return types.ERR_EOF return types.ERR_EOF
} }
/* check for the delimiter */ /* check for the delimtier */
if self.s[p] != '[' { if self.s[p] != '[' {
return types.ERR_INVALID_CHAR return types.ERR_INVALID_CHAR
} }
@@ -115,15 +110,11 @@ func (self *Parser) array() types.ParsingError {
func (self *Parser) lspace(sp int) int { func (self *Parser) lspace(sp int) int {
ns := len(self.s) ns := len(self.s)
for ; sp<ns && utils.IsSpace(self.s[sp]); sp+=1 {} for ; sp<ns && isSpace(self.s[sp]); sp+=1 {}
return sp return sp
} }
func (self *Parser) backward() {
for ; self.p >= 0 && utils.IsSpace(self.s[self.p]); self.p-=1 {}
}
func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) { func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) {
sp := self.p sp := self.p
ns := len(self.s) ns := len(self.s)
@@ -157,7 +148,7 @@ func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) {
if t == _V_NONE { if t == _V_NONE {
return Node{}, types.ERR_INVALID_CHAR return Node{}, types.ERR_INVALID_CHAR
} }
val = newRawNode(self.s[start:self.p], t, false) val = newRawNode(self.s[start:self.p], t)
}else{ }else{
/* decode the value */ /* decode the value */
if val, err = self.Parse(); err != 0 { if val, err = self.Parse(); err != 0 {
@@ -219,7 +210,7 @@ func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) {
/* check for escape sequence */ /* check for escape sequence */
if njs.Ep != -1 { if njs.Ep != -1 {
if key, err = unquote.String(key); err != 0 { if key, err = unquote(key); err != 0 {
return Node{}, err return Node{}, err
} }
} }
@@ -243,7 +234,7 @@ func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) {
if t == _V_NONE { if t == _V_NONE {
return Node{}, types.ERR_INVALID_CHAR return Node{}, types.ERR_INVALID_CHAR
} }
val = newRawNode(self.s[start:self.p], t, false) val = newRawNode(self.s[start:self.p], t)
} else { } else {
/* decode the value */ /* decode the value */
if val, err = self.Parse(); err != 0 { if val, err = self.Parse(); err != 0 {
@@ -253,7 +244,7 @@ func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) {
/* add the value to result */ /* add the value to result */
// FIXME: ret's address may change here, thus previous referred node in ret may be invalid !! // FIXME: ret's address may change here, thus previous referred node in ret may be invalid !!
ret.Push(NewPair(key, val)) ret.Push(Pair{Key: key, Value: val})
self.p = self.lspace(self.p) self.p = self.lspace(self.p)
/* check for EOF */ /* check for EOF */
@@ -284,7 +275,7 @@ func (self *Parser) decodeString(iv int64, ep int) (Node, types.ParsingError) {
} }
/* unquote the string */ /* unquote the string */
out, err := unquote.String(s) out, err := unquote(s)
/* check for errors */ /* check for errors */
if err != 0 { if err != 0 {
@@ -300,10 +291,6 @@ func (self *Parser) Pos() int {
return self.p return self.p
} }
// Parse returns a ast.Node representing the parser's JSON.
// NOTICE: the specific parsing lazy dependens parser's option
// It only parse first layer and first child for Object or Array be default
func (self *Parser) Parse() (Node, types.ParsingError) { func (self *Parser) Parse() (Node, types.ParsingError) {
switch val := self.decodeValue(); val.Vt { switch val := self.decodeValue(); val.Vt {
case types.V_EOF : return Node{}, types.ERR_EOF case types.V_EOF : return Node{}, types.ERR_EOF
@@ -312,48 +299,22 @@ func (self *Parser) Parse() (Node, types.ParsingError) {
case types.V_FALSE : return falseNode, 0 case types.V_FALSE : return falseNode, 0
case types.V_STRING : return self.decodeString(val.Iv, val.Ep) case types.V_STRING : return self.decodeString(val.Iv, val.Ep)
case types.V_ARRAY: case types.V_ARRAY:
s := self.p - 1;
if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == ']' { if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == ']' {
self.p = p + 1 self.p = p + 1
return Node{t: types.V_ARRAY}, 0 return Node{t: types.V_ARRAY}, 0
} }
if self.noLazy { if self.noLazy {
if self.loadOnce {
self.noLazy = false
}
return self.decodeArray(new(linkedNodes)) return self.decodeArray(new(linkedNodes))
} }
// NOTICE: loadOnce always keep raw json for object or array
if self.loadOnce {
self.p = s
s, e := self.skipFast()
if e != 0 {
return Node{}, e
}
return newRawNode(self.s[s:self.p], types.V_ARRAY, true), 0
}
return newLazyArray(self), 0 return newLazyArray(self), 0
case types.V_OBJECT: case types.V_OBJECT:
s := self.p - 1;
if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == '}' { if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == '}' {
self.p = p + 1 self.p = p + 1
return Node{t: types.V_OBJECT}, 0 return Node{t: types.V_OBJECT}, 0
} }
// NOTICE: loadOnce always keep raw json for object or array
if self.noLazy { if self.noLazy {
if self.loadOnce {
self.noLazy = false
}
return self.decodeObject(new(linkedPairs)) return self.decodeObject(new(linkedPairs))
} }
if self.loadOnce {
self.p = s
s, e := self.skipFast()
if e != 0 {
return Node{}, e
}
return newRawNode(self.s[s:self.p], types.V_OBJECT, true), 0
}
return newLazyObject(self), 0 return newLazyObject(self), 0
case types.V_DOUBLE : return NewNumber(self.s[val.Ep:self.p]), 0 case types.V_DOUBLE : return NewNumber(self.s[val.Ep:self.p]), 0
case types.V_INTEGER : return NewNumber(self.s[val.Ep:self.p]), 0 case types.V_INTEGER : return NewNumber(self.s[val.Ep:self.p]), 0
@@ -394,7 +355,7 @@ func (self *Parser) searchKey(match string) types.ParsingError {
/* check for escape sequence */ /* check for escape sequence */
if njs.Ep != -1 { if njs.Ep != -1 {
if key, err = unquote.String(key); err != 0 { if key, err = unquote(key); err != 0 {
return err return err
} }
} }
@@ -510,7 +471,7 @@ func (self *Node) skipNextNode() *Node {
if t == _V_NONE { if t == _V_NONE {
return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR)) return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))
} }
val = newRawNode(parser.s[start:parser.p], t, false) val = newRawNode(parser.s[start:parser.p], t)
} }
/* add the value to result */ /* add the value to result */
@@ -549,7 +510,7 @@ func (self *Node) skipNextPair() (*Pair) {
/* check for EOF */ /* check for EOF */
if parser.p = parser.lspace(sp); parser.p >= ns { if parser.p = parser.lspace(sp); parser.p >= ns {
return newErrorPair(parser.syntaxError(types.ERR_EOF)) return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_EOF))}
} }
/* check for empty object */ /* check for empty object */
@@ -566,7 +527,7 @@ func (self *Node) skipNextPair() (*Pair) {
/* decode the key */ /* decode the key */
if njs = parser.decodeValue(); njs.Vt != types.V_STRING { if njs = parser.decodeValue(); njs.Vt != types.V_STRING {
return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) return &Pair{"", *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
} }
/* extract the key */ /* extract the key */
@@ -575,35 +536,35 @@ func (self *Node) skipNextPair() (*Pair) {
/* check for escape sequence */ /* check for escape sequence */
if njs.Ep != -1 { if njs.Ep != -1 {
if key, err = unquote.String(key); err != 0 { if key, err = unquote(key); err != 0 {
return newErrorPair(parser.syntaxError(err)) return &Pair{key, *newSyntaxError(parser.syntaxError(err))}
} }
} }
/* expect a ':' delimiter */ /* expect a ':' delimiter */
if err = parser.delim(); err != 0 { if err = parser.delim(); err != 0 {
return newErrorPair(parser.syntaxError(err)) return &Pair{key, *newSyntaxError(parser.syntaxError(err))}
} }
/* skip the value */ /* skip the value */
if start, err := parser.skipFast(); err != 0 { if start, err := parser.skipFast(); err != 0 {
return newErrorPair(parser.syntaxError(err)) return &Pair{key, *newSyntaxError(parser.syntaxError(err))}
} else { } else {
t := switchRawType(parser.s[start]) t := switchRawType(parser.s[start])
if t == _V_NONE { if t == _V_NONE {
return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
} }
val = newRawNode(parser.s[start:parser.p], t, false) val = newRawNode(parser.s[start:parser.p], t)
} }
/* add the value to result */ /* add the value to result */
ret.Push(NewPair(key, val)) ret.Push(Pair{Key: key, Value: val})
self.l++ self.l++
parser.p = parser.lspace(parser.p) parser.p = parser.lspace(parser.p)
/* check for EOF */ /* check for EOF */
if parser.p >= ns { if parser.p >= ns {
return newErrorPair(parser.syntaxError(types.ERR_EOF)) return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_EOF))}
} }
/* check for the next character */ /* check for the next character */
@@ -616,7 +577,7 @@ func (self *Node) skipNextPair() (*Pair) {
self.setObject(ret) self.setObject(ret)
return ret.At(ret.Len()-1) return ret.At(ret.Len()-1)
default: default:
return newErrorPair(parser.syntaxError(types.ERR_INVALID_CHAR)) return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
} }
} }
@@ -640,7 +601,7 @@ func Loads(src string) (int, interface{}, error) {
} }
} }
// LoadsUseNumber parse all json into interface{}, with numeric nodes cast to json.Number // LoadsUseNumber parse all json into interface{}, with numeric nodes casted to json.Number
func LoadsUseNumber(src string) (int, interface{}, error) { func LoadsUseNumber(src string) (int, interface{}, error) {
ps := &Parser{s: src} ps := &Parser{s: src}
np, err := ps.Parse() np, err := ps.Parse()
@@ -694,75 +655,6 @@ func (self *Parser) ExportError(err types.ParsingError) error {
} }
func backward(src string, i int) int { func backward(src string, i int) int {
for ; i>=0 && utils.IsSpace(src[i]); i-- {} for ; i>=0 && isSpace(src[i]); i-- {}
return i return i
} }
func newRawNode(str string, typ types.ValueType, lock bool) Node {
ret := Node{
t: typ | _V_RAW,
p: rt.StrPtr(str),
l: uint(len(str)),
}
if lock {
ret.m = new(sync.RWMutex)
}
return ret
}
var typeJumpTable = [256]types.ValueType{
'"' : types.V_STRING,
'-' : _V_NUMBER,
'0' : _V_NUMBER,
'1' : _V_NUMBER,
'2' : _V_NUMBER,
'3' : _V_NUMBER,
'4' : _V_NUMBER,
'5' : _V_NUMBER,
'6' : _V_NUMBER,
'7' : _V_NUMBER,
'8' : _V_NUMBER,
'9' : _V_NUMBER,
'[' : types.V_ARRAY,
'f' : types.V_FALSE,
'n' : types.V_NULL,
't' : types.V_TRUE,
'{' : types.V_OBJECT,
}
func switchRawType(c byte) types.ValueType {
return typeJumpTable[c]
}
func (self *Node) loadt() types.ValueType {
return (types.ValueType)(atomic.LoadInt64(&self.t))
}
func (self *Node) lock() bool {
if m := self.m; m != nil {
m.Lock()
return true
}
return false
}
func (self *Node) unlock() {
if m := self.m; m != nil {
m.Unlock()
}
}
func (self *Node) rlock() bool {
if m := self.m; m != nil {
m.RLock()
return true
}
return false
}
func (self *Node) runlock() {
if m := self.m; m != nil {
m.RUnlock()
}
}

View File

@@ -21,23 +21,8 @@ import (
`github.com/bytedance/sonic/internal/native/types` `github.com/bytedance/sonic/internal/native/types`
) )
// SearchOptions controls Searcher's behavior
type SearchOptions struct {
// ValidateJSON indicates the searcher to validate the entire JSON
ValidateJSON bool
// CopyReturn indicates the searcher to copy the result JSON instead of refer from the input
// This can help to reduce memory usage if you cache the results
CopyReturn bool
// ConcurrentRead indicates the searcher to return a concurrently-READ-safe node,
// including: GetByPath/Get/Index/GetOrIndex/Int64/Bool/Float64/String/Number/Interface/Array/Map/Raw/MarshalJSON
ConcurrentRead bool
}
type Searcher struct { type Searcher struct {
parser Parser parser Parser
SearchOptions
} }
func NewSearcher(str string) *Searcher { func NewSearcher(str string) *Searcher {
@@ -46,16 +31,12 @@ func NewSearcher(str string) *Searcher {
s: str, s: str,
noLazy: false, noLazy: false,
}, },
SearchOptions: SearchOptions{
ValidateJSON: true,
},
} }
} }
// GetByPathCopy search in depth from top json and returns a **Copied** json node at the path location // GetByPathCopy search in depth from top json and returns a **Copied** json node at the path location
func (self *Searcher) GetByPathCopy(path ...interface{}) (Node, error) { func (self *Searcher) GetByPathCopy(path ...interface{}) (Node, error) {
self.CopyReturn = true return self.getByPath(true, true, path...)
return self.getByPath(path...)
} }
// GetByPathNoCopy search in depth from top json and returns a **Referenced** json node at the path location // GetByPathNoCopy search in depth from top json and returns a **Referenced** json node at the path location
@@ -63,15 +44,15 @@ func (self *Searcher) GetByPathCopy(path ...interface{}) (Node, error) {
// WARN: this search directly refer partial json from top json, which has faster speed, // WARN: this search directly refer partial json from top json, which has faster speed,
// may consumes more memory. // may consumes more memory.
func (self *Searcher) GetByPath(path ...interface{}) (Node, error) { func (self *Searcher) GetByPath(path ...interface{}) (Node, error) {
return self.getByPath(path...) return self.getByPath(false, true, path...)
} }
func (self *Searcher) getByPath(path ...interface{}) (Node, error) { func (self *Searcher) getByPath(copystring bool, validate bool, path ...interface{}) (Node, error) {
var err types.ParsingError var err types.ParsingError
var start int var start int
self.parser.p = 0 self.parser.p = 0
start, err = self.parser.getByPath(self.ValidateJSON, path...) start, err = self.parser.getByPath(validate, path...)
if err != 0 { if err != 0 {
// for compatibility with old version // for compatibility with old version
if err == types.ERR_NOT_FOUND { if err == types.ERR_NOT_FOUND {
@@ -90,12 +71,12 @@ func (self *Searcher) getByPath(path ...interface{}) (Node, error) {
// copy string to reducing memory usage // copy string to reducing memory usage
var raw string var raw string
if self.CopyReturn { if copystring {
raw = rt.Mem2Str([]byte(self.parser.s[start:self.parser.p])) raw = rt.Mem2Str([]byte(self.parser.s[start:self.parser.p]))
} else { } else {
raw = self.parser.s[start:self.parser.p] raw = self.parser.s[start:self.parser.p]
} }
return newRawNode(raw, t, self.ConcurrentRead), nil return newRawNode(raw, t), nil
} }
// GetByPath searches a path and returns relaction and types of target // GetByPath searches a path and returns relaction and types of target

View File

@@ -1,4 +1,4 @@
// +build !amd64,!arm64 go1.26 !go1.17 arm64,!go1.20 // +build !amd64 !go1.16 go1.23
/* /*
* Copyright 2021 ByteDance Inc. * Copyright 2021 ByteDance Inc.
@@ -27,8 +27,6 @@ import (
`github.com/bytedance/sonic/option` `github.com/bytedance/sonic/option`
) )
const apiKind = UseStdJSON
type frozenConfig struct { type frozenConfig struct {
Config Config
} }
@@ -87,17 +85,7 @@ func (cfg frozenConfig) UnmarshalFromString(buf string, val interface{}) error {
if cfg.DisallowUnknownFields { if cfg.DisallowUnknownFields {
dec.DisallowUnknownFields() dec.DisallowUnknownFields()
} }
err := dec.Decode(val) return dec.Decode(val)
if err != nil {
return err
}
// check the trailing chars
offset := dec.InputOffset()
if t, err := dec.Token(); !(t == nil && err == io.EOF) {
return &json.SyntaxError{ Offset: offset}
}
return nil
} }
// Unmarshal is implemented by sonic // Unmarshal is implemented by sonic

View File

@@ -1,5 +1,4 @@
//go:build (!amd64 && !arm64) || go1.26 || !go1.17 || (arm64 && !go1.20) // +build !amd64 !go1.16 go1.23
// +build !amd64,!arm64 go1.26 !go1.17 arm64,!go1.20
/* /*
* Copyright 2023 ByteDance Inc. * Copyright 2023 ByteDance Inc.
@@ -20,33 +19,29 @@
package decoder package decoder
import ( import (
"bytes" `bytes`
"encoding/json" `encoding/json`
"io" `io`
"reflect" `reflect`
"unsafe" `unsafe`
"github.com/bytedance/sonic/internal/decoder/consts" `github.com/bytedance/sonic/internal/native/types`
"github.com/bytedance/sonic/internal/native/types" `github.com/bytedance/sonic/option`
"github.com/bytedance/sonic/option"
"github.com/bytedance/sonic/internal/compat"
) )
func init() { func init() {
compat.Warn("sonic/decoder") println("WARNING: sonic only supports Go1.16~1.22 && CPU amd64, but your environment is not suitable")
} }
const ( const (
_F_use_int64 = consts.F_use_int64 _F_use_int64 = 0
_F_disable_urc = consts.F_disable_unknown _F_disable_urc = 2
_F_disable_unknown = consts.F_disable_unknown _F_disable_unknown = 3
_F_copy_string = consts.F_copy_string _F_copy_string = 4
_F_use_number = consts.F_use_number _F_use_number = types.B_USE_NUMBER
_F_validate_string = consts.F_validate_string _F_validate_string = types.B_VALIDATE_STRING
_F_allow_control = consts.F_allow_control _F_allow_control = types.B_ALLOW_CONTROL
_F_no_validate_json = consts.F_no_validate_json
_F_case_sensitive = consts.F_case_sensitive
) )
type Options uint64 type Options uint64
@@ -58,8 +53,6 @@ const (
OptionDisableUnknown Options = 1 << _F_disable_unknown OptionDisableUnknown Options = 1 << _F_disable_unknown
OptionCopyString Options = 1 << _F_copy_string OptionCopyString Options = 1 << _F_copy_string
OptionValidateString Options = 1 << _F_validate_string OptionValidateString Options = 1 << _F_validate_string
OptionNoValidateJSON Options = 1 << _F_no_validate_json
OptionCaseSensitive Options = 1 << _F_case_sensitive
) )
func (self *Decoder) SetOptions(opts Options) { func (self *Decoder) SetOptions(opts Options) {
@@ -197,5 +190,5 @@ func (s SyntaxError) Error() string {
return (*json.SyntaxError)(unsafe.Pointer(&s)).Error() return (*json.SyntaxError)(unsafe.Pointer(&s)).Error()
} }
// MismatchTypeError represents mismatching between json and object // MismatchTypeError represents dismatching between json and object
type MismatchTypeError json.UnmarshalTypeError type MismatchTypeError json.UnmarshalTypeError

View File

@@ -1,4 +1,4 @@
// +build !amd64,!arm64 go1.26 !go1.17 arm64,!go1.20 // +build !amd64 !go1.16 go1.23
/* /*
* Copyright 2023 ByteDance Inc. * Copyright 2023 ByteDance Inc.
@@ -25,11 +25,10 @@ import (
`reflect` `reflect`
`github.com/bytedance/sonic/option` `github.com/bytedance/sonic/option`
`github.com/bytedance/sonic/internal/compat`
) )
func init() { func init() {
compat.Warn("sonic/encoder") println("WARNING:(encoder) sonic only supports Go1.16~1.22 && CPU amd64, but your environment is not suitable")
} }
// EnableFallback indicates if encoder use fallback // EnableFallback indicates if encoder use fallback

View File

@@ -23,12 +23,16 @@ import (
) )
var ( var (
V_strhash = rt.UnpackEface(rt.Strhash) V_strhash = rt.UnpackEface(strhash)
S_strhash = *(*uintptr)(V_strhash.Value) S_strhash = *(*uintptr)(V_strhash.Value)
) )
//go:noescape
//go:linkname strhash runtime.strhash
func strhash(_ unsafe.Pointer, _ uintptr) uintptr
func StrHash(s string) uint64 { func StrHash(s string) uint64 {
if v := rt.Strhash(unsafe.Pointer(&s), 0); v == 0 { if v := strhash(unsafe.Pointer(&s), 0); v == 0 {
return 1 return 1
} else { } else {
return uint64(v) return uint64(v)

View File

@@ -24,6 +24,7 @@ import (
) )
var ( var (
HasAVX = cpuid.CPU.Has(cpuid.AVX)
HasAVX2 = cpuid.CPU.Has(cpuid.AVX2) HasAVX2 = cpuid.CPU.Has(cpuid.AVX2)
HasSSE = cpuid.CPU.Has(cpuid.SSE) HasSSE = cpuid.CPU.Has(cpuid.SSE)
) )
@@ -32,8 +33,7 @@ func init() {
switch v := os.Getenv("SONIC_MODE"); v { switch v := os.Getenv("SONIC_MODE"); v {
case "" : break case "" : break
case "auto" : break case "auto" : break
case "noavx" : HasAVX2 = false case "noavx" : HasAVX = false; fallthrough
// will also disable avx, act as `noavx`, we remain it to make sure forward compatibility
case "noavx2" : HasAVX2 = false case "noavx2" : HasAVX2 = false
default : panic(fmt.Sprintf("invalid mode: '%s', should be one of 'auto', 'noavx', 'noavx2'", v)) default : panic(fmt.Sprintf("invalid mode: '%s', should be one of 'auto', 'noavx', 'noavx2'", v))
} }

File diff suppressed because it is too large Load Diff

View File

@@ -17,63 +17,72 @@
package encoder package encoder
import ( import (
"bytes" `bytes`
"encoding/json" `encoding/json`
"reflect" `reflect`
"runtime" `runtime`
"unsafe" `unsafe`
"github.com/bytedance/sonic/utf8" `github.com/bytedance/sonic/internal/native`
"github.com/bytedance/sonic/internal/encoder/alg" `github.com/bytedance/sonic/internal/native/types`
"github.com/bytedance/sonic/internal/encoder/vars" `github.com/bytedance/sonic/internal/rt`
"github.com/bytedance/sonic/internal/rt" `github.com/bytedance/sonic/utf8`
"github.com/bytedance/sonic/option" `github.com/bytedance/sonic/option`
"github.com/bytedance/gopkg/lang/dirtmake"
) )
// Options is a set of encoding options. // Options is a set of encoding options.
type Options uint64 type Options uint64
const (
bitSortMapKeys = iota
bitEscapeHTML
bitCompactMarshaler
bitNoQuoteTextMarshaler
bitNoNullSliceOrMap
bitValidateString
bitNoValidateJSONMarshaler
bitNoEncoderNewline
// used for recursive compile
bitPointerValue = 63
)
const ( const (
// SortMapKeys indicates that the keys of a map needs to be sorted // SortMapKeys indicates that the keys of a map needs to be sorted
// before serializing into JSON. // before serializing into JSON.
// WARNING: This hurts performance A LOT, USE WITH CARE. // WARNING: This hurts performance A LOT, USE WITH CARE.
SortMapKeys Options = 1 << alg.BitSortMapKeys SortMapKeys Options = 1 << bitSortMapKeys
// EscapeHTML indicates encoder to escape all HTML characters // EscapeHTML indicates encoder to escape all HTML characters
// after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape). // after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
// WARNING: This hurts performance A LOT, USE WITH CARE. // WARNING: This hurts performance A LOT, USE WITH CARE.
EscapeHTML Options = 1 << alg.BitEscapeHTML EscapeHTML Options = 1 << bitEscapeHTML
// CompactMarshaler indicates that the output JSON from json.Marshaler // CompactMarshaler indicates that the output JSON from json.Marshaler
// is always compact and needs no validation // is always compact and needs no validation
CompactMarshaler Options = 1 << alg.BitCompactMarshaler CompactMarshaler Options = 1 << bitCompactMarshaler
// NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler // NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler
// is always escaped string and needs no quoting // is always escaped string and needs no quoting
NoQuoteTextMarshaler Options = 1 << alg.BitNoQuoteTextMarshaler NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler
// NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}', // NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}',
// instead of 'null'. // instead of 'null'
// NOTE: The priority of this option is lower than json tag `omitempty`. NoNullSliceOrMap Options = 1 << bitNoNullSliceOrMap
NoNullSliceOrMap Options = 1 << alg.BitNoNullSliceOrMap
// ValidateString indicates that encoder should validate the input string // ValidateString indicates that encoder should validate the input string
// before encoding it into JSON. // before encoding it into JSON.
ValidateString Options = 1 << alg.BitValidateString ValidateString Options = 1 << bitValidateString
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string // NoValidateJSONMarshaler indicates that the encoder should not validate the output string
// after encoding the JSONMarshaler to JSON. // after encoding the JSONMarshaler to JSON.
NoValidateJSONMarshaler Options = 1 << alg.BitNoValidateJSONMarshaler NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler
// NoEncoderNewline indicates that the encoder should not add a newline after every message // NoEncoderNewline indicates that the encoder should not add a newline after every message
NoEncoderNewline Options = 1 << alg.BitNoEncoderNewline NoEncoderNewline Options = 1 << bitNoEncoderNewline
// CompatibleWithStd is used to be compatible with std encoder. // CompatibleWithStd is used to be compatible with std encoder.
CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
// Encode Infinity or Nan float into `null`, instead of returning an error.
EncodeNullForInfOrNan Options = 1 << alg.BitEncodeNullForInfOrNan
) )
// Encoder represents a specific set of encoder configurations. // Encoder represents a specific set of encoder configurations.
@@ -162,45 +171,53 @@ func (enc *Encoder) SetIndent(prefix, indent string) {
// Quote returns the JSON-quoted version of s. // Quote returns the JSON-quoted version of s.
func Quote(s string) string { func Quote(s string) string {
buf := make([]byte, 0, len(s)+2) var n int
buf = alg.Quote(buf, s, false) var p []byte
return rt.Mem2Str(buf)
/* check for empty string */
if s == "" {
return `""`
}
/* allocate space for result */
n = len(s) + 2
p = make([]byte, 0, n)
/* call the encoder */
_ = encodeString(&p, s)
return rt.Mem2Str(p)
} }
// Encode returns the JSON encoding of val, encoded with opts. // Encode returns the JSON encoding of val, encoded with opts.
func Encode(val interface{}, opts Options) ([]byte, error) { func Encode(val interface{}, opts Options) ([]byte, error) {
var ret []byte var ret []byte
buf := vars.NewBytes() buf := newBytes()
err := encodeIntoCheckRace(buf, val, opts) err := encodeInto(&buf, val, opts)
/* check for errors */ /* check for errors */
if err != nil { if err != nil {
vars.FreeBytes(buf) freeBytes(buf)
return nil, err return nil, err
} }
/* htmlescape or correct UTF-8 if opts enable */ /* htmlescape or correct UTF-8 if opts enable */
old := buf old := buf
*buf = encodeFinish(*old, opts) buf = encodeFinish(old, opts)
pbuf := ((*rt.GoSlice)(unsafe.Pointer(buf))).Ptr pbuf := ((*rt.GoSlice)(unsafe.Pointer(&buf))).Ptr
pold := ((*rt.GoSlice)(unsafe.Pointer(old))).Ptr pold := ((*rt.GoSlice)(unsafe.Pointer(&old))).Ptr
/* return when allocated a new buffer */ /* return when allocated a new buffer */
if pbuf != pold { if pbuf != pold {
vars.FreeBytes(old) freeBytes(old)
return *buf, nil return buf, nil
} }
/* make a copy of the result */ /* make a copy of the result */
if rt.CanSizeResue(cap(*buf)) { ret = make([]byte, len(buf))
ret = dirtmake.Bytes(len(*buf), len(*buf)) copy(ret, buf)
copy(ret, *buf)
vars.FreeBytes(buf)
} else {
ret = *buf
}
freeBytes(buf)
/* return the buffer into pool */ /* return the buffer into pool */
return ret, nil return ret, nil
} }
@@ -208,7 +225,7 @@ func Encode(val interface{}, opts Options) ([]byte, error) {
// EncodeInto is like Encode but uses a user-supplied buffer instead of allocating // EncodeInto is like Encode but uses a user-supplied buffer instead of allocating
// a new one. // a new one.
func EncodeInto(buf *[]byte, val interface{}, opts Options) error { func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
err := encodeIntoCheckRace(buf, val, opts) err := encodeInto(buf, val, opts)
if err != nil { if err != nil {
return err return err
} }
@@ -217,15 +234,15 @@ func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
} }
func encodeInto(buf *[]byte, val interface{}, opts Options) error { func encodeInto(buf *[]byte, val interface{}, opts Options) error {
stk := vars.NewStack() stk := newStack()
efv := rt.UnpackEface(val) efv := rt.UnpackEface(val)
err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts)) err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts))
/* return the stack into pool */ /* return the stack into pool */
if err != nil { if err != nil {
vars.ResetStack(stk) resetStack(stk)
} }
vars.FreeStack(stk) freeStack(stk)
/* avoid GC ahead */ /* avoid GC ahead */
runtime.KeepAlive(buf) runtime.KeepAlive(buf)
@@ -237,12 +254,13 @@ func encodeFinish(buf []byte, opts Options) []byte {
if opts & EscapeHTML != 0 { if opts & EscapeHTML != 0 {
buf = HTMLEscape(nil, buf) buf = HTMLEscape(nil, buf)
} }
if (opts & ValidateString != 0) && !utf8.Validate(buf) { if opts & ValidateString != 0 && !utf8.Validate(buf) {
buf = utf8.CorrectWith(nil, buf, `\ufffd`) buf = utf8.CorrectWith(nil, buf, `\ufffd`)
} }
return buf return buf
} }
var typeByte = rt.UnpackType(reflect.TypeOf(byte(0)))
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 // HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 // characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
@@ -251,7 +269,7 @@ func encodeFinish(buf []byte, opts Options) []byte {
// escaping within <script> tags, so an alternative JSON encoding must // escaping within <script> tags, so an alternative JSON encoding must
// be used. // be used.
func HTMLEscape(dst []byte, src []byte) []byte { func HTMLEscape(dst []byte, src []byte) []byte {
return alg.HtmlEscape(dst, src) return htmlEscape(dst, src)
} }
// EncodeIndented is like Encode but applies Indent to format the output. // EncodeIndented is like Encode but applies Indent to format the output.
@@ -259,40 +277,37 @@ func HTMLEscape(dst []byte, src []byte) []byte {
// followed by one or more copies of indent according to the indentation nesting. // followed by one or more copies of indent according to the indentation nesting.
func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) { func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) {
var err error var err error
var out []byte
var buf *bytes.Buffer var buf *bytes.Buffer
/* encode into the buffer */ /* encode into the buffer */
out := vars.NewBytes() out = newBytes()
err = EncodeInto(out, val, opts) err = EncodeInto(&out, val, opts)
/* check for errors */ /* check for errors */
if err != nil { if err != nil {
vars.FreeBytes(out) freeBytes(out)
return nil, err return nil, err
} }
/* indent the JSON */ /* indent the JSON */
buf = vars.NewBuffer() buf = newBuffer()
err = json.Indent(buf, *out, prefix, indent) err = json.Indent(buf, out, prefix, indent)
vars.FreeBytes(out)
/* check for errors */ /* check for errors */
if err != nil { if err != nil {
vars.FreeBuffer(buf) freeBytes(out)
freeBuffer(buf)
return nil, err return nil, err
} }
/* copy to the result buffer */ /* copy to the result buffer */
var ret []byte ret := make([]byte, buf.Len())
if rt.CanSizeResue(cap(buf.Bytes())) {
ret = make([]byte, buf.Len())
copy(ret, buf.Bytes()) copy(ret, buf.Bytes())
/* return the buffers into pool */
vars.FreeBuffer(buf)
} else {
ret = buf.Bytes()
}
/* return the buffers into pool */
freeBytes(out)
freeBuffer(buf)
return ret, nil return ret, nil
} }
@@ -315,5 +330,26 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
// //
// Note: it does not check for the invalid UTF-8 characters. // Note: it does not check for the invalid UTF-8 characters.
func Valid(data []byte) (ok bool, start int) { func Valid(data []byte) (ok bool, start int) {
return alg.Valid(data) n := len(data)
if n == 0 {
return false, -1
}
s := rt.Mem2Str(data)
p := 0
m := types.NewStateMachine()
ret := native.ValidateOne(&s, &p, m, types.F_VALIDATE_STRING)
types.FreeStateMachine(m)
if ret < 0 {
return false, p-1
}
/* check for trailing spaces */
for ;p < n; p++ {
if (types.SPACE_MASK & (1 << data[p])) == 0 {
return false, p
}
}
return true, ret
} }

View File

@@ -17,10 +17,8 @@
package encoder package encoder
import ( import (
"encoding/json" `encoding/json`
"io" `io`
"github.com/bytedance/sonic/internal/encoder/vars"
) )
// StreamEncoder uses io.Writer as input. // StreamEncoder uses io.Writer as input.
@@ -38,20 +36,21 @@ func NewStreamEncoder(w io.Writer) *StreamEncoder {
// Encode encodes interface{} as JSON to io.Writer // Encode encodes interface{} as JSON to io.Writer
func (enc *StreamEncoder) Encode(val interface{}) (err error) { func (enc *StreamEncoder) Encode(val interface{}) (err error) {
out := vars.NewBytes() buf := newBytes()
out := buf
/* encode into the buffer */ /* encode into the buffer */
err = EncodeInto(out, val, enc.Opts) err = EncodeInto(&out, val, enc.Opts)
if err != nil { if err != nil {
goto free_bytes goto free_bytes
} }
if enc.indent != "" || enc.prefix != "" { if enc.indent != "" || enc.prefix != "" {
/* indent the JSON */ /* indent the JSON */
buf := vars.NewBuffer() buf := newBuffer()
err = json.Indent(buf, *out, enc.prefix, enc.indent) err = json.Indent(buf, out, enc.prefix, enc.indent)
if err != nil { if err != nil {
vars.FreeBuffer(buf) freeBuffer(buf)
goto free_bytes goto free_bytes
} }
@@ -63,17 +62,16 @@ func (enc *StreamEncoder) Encode(val interface{}) (err error) {
/* copy into io.Writer */ /* copy into io.Writer */
_, err = io.Copy(enc.w, buf) _, err = io.Copy(enc.w, buf)
if err != nil { if err != nil {
vars.FreeBuffer(buf) freeBuffer(buf)
goto free_bytes goto free_bytes
} }
} else { } else {
/* copy into io.Writer */ /* copy into io.Writer */
var n int var n int
buf := *out for len(out) > 0 {
for len(buf) > 0 { n, err = enc.w.Write(out)
n, err = enc.w.Write(buf) out = out[n:]
buf = buf[n:]
if err != nil { if err != nil {
goto free_bytes goto free_bytes
} }
@@ -86,6 +84,6 @@ func (enc *StreamEncoder) Encode(val interface{}) (err error) {
} }
free_bytes: free_bytes:
vars.FreeBytes(out) freeBytes(buf)
return err return err
} }

View File

@@ -17,10 +17,8 @@
package jit package jit
import ( import (
"unsafe" `github.com/twitchyliquid64/golang-asm/asm/arch`
`github.com/twitchyliquid64/golang-asm/obj`
"github.com/twitchyliquid64/golang-asm/asm/arch"
"github.com/twitchyliquid64/golang-asm/obj"
) )
var ( var (
@@ -35,13 +33,6 @@ func As(op string) obj.As {
} }
} }
func ImmPtr(imm unsafe.Pointer) obj.Addr {
return obj.Addr {
Type : obj.TYPE_CONST,
Offset : int64(uintptr(imm)),
}
}
func Imm(imm int64) obj.Addr { func Imm(imm int64) obj.Addr {
return obj.Addr { return obj.Addr {
Type : obj.TYPE_CONST, Type : obj.TYPE_CONST,

View File

@@ -21,7 +21,6 @@ import (
`sync` `sync`
_ `unsafe` _ `unsafe`
`github.com/bytedance/sonic/internal/rt`
`github.com/twitchyliquid64/golang-asm/asm/arch` `github.com/twitchyliquid64/golang-asm/asm/arch`
`github.com/twitchyliquid64/golang-asm/obj` `github.com/twitchyliquid64/golang-asm/obj`
`github.com/twitchyliquid64/golang-asm/objabi` `github.com/twitchyliquid64/golang-asm/objabi`
@@ -39,6 +38,10 @@ var (
_progPool sync.Pool _progPool sync.Pool
) )
//go:nosplit
//go:linkname throw runtime.throw
func throw(_ string)
func newProg() *obj.Prog { func newProg() *obj.Prog {
if val := _progPool.Get(); val == nil { if val := _progPool.Get(); val == nil {
return new(obj.Prog) return new(obj.Prog)
@@ -68,7 +71,7 @@ func newLinkContext(arch *obj.LinkArch) (ret *obj.Link) {
} }
func diagLinkContext(str string, args ...interface{}) { func diagLinkContext(str string, args ...interface{}) {
rt.Throw(fmt.Sprintf(str, args...)) throw(fmt.Sprintf(str, args...))
} }
func (self *Backend) New() (ret *obj.Prog) { func (self *Backend) New() (ret *obj.Prog) {

View File

@@ -37,7 +37,7 @@ func Type(t reflect.Type) obj.Addr {
} }
func Itab(i *rt.GoType, t reflect.Type) obj.Addr { func Itab(i *rt.GoType, t reflect.Type) obj.Addr {
return Imm(int64(uintptr(unsafe.Pointer(rt.GetItab(rt.IfaceType(i), rt.UnpackType(t), false))))) return Imm(int64(uintptr(unsafe.Pointer(rt.Getitab(rt.IfaceType(i), rt.UnpackType(t), false)))))
} }
func Gitab(i *rt.GoItab) obj.Addr { func Gitab(i *rt.GoItab) obj.Addr {

View File

@@ -20,6 +20,7 @@ import (
`unsafe` `unsafe`
`github.com/bytedance/sonic/internal/cpu` `github.com/bytedance/sonic/internal/cpu`
`github.com/bytedance/sonic/internal/native/avx`
`github.com/bytedance/sonic/internal/native/avx2` `github.com/bytedance/sonic/internal/native/avx2`
`github.com/bytedance/sonic/internal/native/sse` `github.com/bytedance/sonic/internal/native/sse`
`github.com/bytedance/sonic/internal/native/types` `github.com/bytedance/sonic/internal/native/types`
@@ -86,10 +87,6 @@ var (
__ValidateUTF8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int) __ValidateUTF8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__ValidateUTF8Fast func(s unsafe.Pointer) (ret int) __ValidateUTF8Fast func(s unsafe.Pointer) (ret int)
__ParseWithPadding func(parser unsafe.Pointer) (ret int)
__LookupSmallKey func(key unsafe.Pointer, table unsafe.Pointer, lowerOff int) (index int)
) )
//go:nosplit //go:nosplit
@@ -162,22 +159,12 @@ func ValidateUTF8Fast(s *string) (ret int) {
return __ValidateUTF8Fast(rt.NoEscape(unsafe.Pointer(s))) return __ValidateUTF8Fast(rt.NoEscape(unsafe.Pointer(s)))
} }
//go:nosplit
func ParseWithPadding(parser unsafe.Pointer) (ret int) {
return __ParseWithPadding(rt.NoEscape(unsafe.Pointer(parser)))
}
//go:nosplit
func LookupSmallKey(key *string, table *[]byte, lowerOff int) (index int) {
return __LookupSmallKey(rt.NoEscape(unsafe.Pointer(key)), rt.NoEscape(unsafe.Pointer(table)), lowerOff)
}
func useSSE() { func useSSE() {
sse.Use() sse.Use()
S_f64toa = sse.S_f64toa S_f64toa = sse.S_f64toa
__F64toa = sse.F_f64toa __F64toa = sse.F_f64toa
S_f32toa = sse.S_f32toa S_f32toa = sse.S_f32toa
__F32toa = sse.F_f32toa __F64toa = sse.F_f64toa
S_i64toa = sse.S_i64toa S_i64toa = sse.S_i64toa
__I64toa = sse.F_i64toa __I64toa = sse.F_i64toa
S_u64toa = sse.S_u64toa S_u64toa = sse.S_u64toa
@@ -205,8 +192,42 @@ func useSSE() {
__ValidateOne = sse.F_validate_one __ValidateOne = sse.F_validate_one
__ValidateUTF8= sse.F_validate_utf8 __ValidateUTF8= sse.F_validate_utf8
__ValidateUTF8Fast = sse.F_validate_utf8_fast __ValidateUTF8Fast = sse.F_validate_utf8_fast
__ParseWithPadding = sse.F_parse_with_padding }
__LookupSmallKey = sse.F_lookup_small_key
func useAVX() {
avx.Use()
S_f64toa = avx.S_f64toa
__F64toa = avx.F_f64toa
S_f32toa = avx.S_f32toa
__F64toa = avx.F_f64toa
S_i64toa = avx.S_i64toa
__I64toa = avx.F_i64toa
S_u64toa = avx.S_u64toa
__U64toa = avx.F_u64toa
S_lspace = avx.S_lspace
S_quote = avx.S_quote
__Quote = avx.F_quote
S_unquote = avx.S_unquote
__Unquote = avx.F_unquote
S_value = avx.S_value
__Value = avx.F_value
S_vstring = avx.S_vstring
S_vnumber = avx.S_vnumber
S_vsigned = avx.S_vsigned
S_vunsigned = avx.S_vunsigned
S_skip_one = avx.S_skip_one
__SkipOne = avx.F_skip_one
__SkipOneFast = avx.F_skip_one_fast
S_skip_array = avx.S_skip_array
S_skip_object = avx.S_skip_object
S_skip_number = avx.S_skip_number
S_get_by_path = avx.S_get_by_path
__GetByPath = avx.F_get_by_path
__HTMLEscape = avx.F_html_escape
__ValidateOne = avx.F_validate_one
__ValidateUTF8= avx.F_validate_utf8
__ValidateUTF8Fast = avx.F_validate_utf8_fast
} }
func useAVX2() { func useAVX2() {
@@ -214,7 +235,7 @@ func useAVX2() {
S_f64toa = avx2.S_f64toa S_f64toa = avx2.S_f64toa
__F64toa = avx2.F_f64toa __F64toa = avx2.F_f64toa
S_f32toa = avx2.S_f32toa S_f32toa = avx2.S_f32toa
__F32toa = avx2.F_f32toa __F64toa = avx2.F_f64toa
S_i64toa = avx2.S_i64toa S_i64toa = avx2.S_i64toa
__I64toa = avx2.F_i64toa __I64toa = avx2.F_i64toa
S_u64toa = avx2.S_u64toa S_u64toa = avx2.S_u64toa
@@ -242,17 +263,17 @@ func useAVX2() {
__ValidateOne = avx2.F_validate_one __ValidateOne = avx2.F_validate_one
__ValidateUTF8= avx2.F_validate_utf8 __ValidateUTF8= avx2.F_validate_utf8
__ValidateUTF8Fast = avx2.F_validate_utf8_fast __ValidateUTF8Fast = avx2.F_validate_utf8_fast
__ParseWithPadding = avx2.F_parse_with_padding
__LookupSmallKey = avx2.F_lookup_small_key
} }
func init() { func init() {
if cpu.HasAVX2 { if cpu.HasAVX2 {
useAVX2() useAVX2()
} else if cpu.HasAVX {
useAVX()
} else if cpu.HasSSE { } else if cpu.HasSSE {
useSSE() useSSE()
} else { } else {
panic("Unsupported CPU, lacks of AVX2 or SSE CPUID Flag. maybe it's too old to run Sonic.") panic("Unsupported CPU, maybe it's too old to run Sonic.")
} }
} }

View File

@@ -22,7 +22,7 @@ import (
`unsafe` `unsafe`
) )
type ValueType = int64 type ValueType int
type ParsingError uint type ParsingError uint
type SearchingError uint type SearchingError uint
@@ -57,9 +57,6 @@ const (
B_USE_NUMBER = 1 B_USE_NUMBER = 1
B_VALIDATE_STRING = 5 B_VALIDATE_STRING = 5
B_ALLOW_CONTROL = 31 B_ALLOW_CONTROL = 31
// for native.SkipOne() flags
B_NO_VALIDATE_JSON= 6
) )
const ( const (

View File

@@ -17,11 +17,10 @@
package resolver package resolver
import ( import (
"fmt" `fmt`
"reflect" `reflect`
"strings" `strings`
"sync" `sync`
_ "unsafe"
) )
type FieldOpts int type FieldOpts int
@@ -30,7 +29,6 @@ type OffsetType int
const ( const (
F_omitempty FieldOpts = 1 << iota F_omitempty FieldOpts = 1 << iota
F_stringize F_stringize
F_omitzero
) )
const ( const (
@@ -49,7 +47,6 @@ type FieldMeta struct {
Path []Offset Path []Offset
Opts FieldOpts Opts FieldOpts
Type reflect.Type Type reflect.Type
IsZero func(reflect.Value) bool
} }
func (self *FieldMeta) String() string { func (self *FieldMeta) String() string {
@@ -120,26 +117,20 @@ func resolveFields(vt reflect.Type) []FieldMeta {
/* convert each field */ /* convert each field */
for _, fv := range tfv.list { for _, fv := range tfv.list {
/* add to result */
ret = append(ret, FieldMeta{})
fm := &ret[len(ret)-1]
item := vt item := vt
path := []Offset(nil) path := []Offset(nil)
opts := FieldOpts(0)
/* check for "string" */ /* check for "string" */
if fv.quoted { if fv.quoted {
fm.Opts |= F_stringize opts |= F_stringize
} }
/* check for "omitempty" */ /* check for "omitempty" */
if fv.omitEmpty { if fv.omitEmpty {
fm.Opts |= F_omitempty opts |= F_omitempty
} }
/* handle the "omitzero" */
handleOmitZero(fv, fm)
/* dump the field path */ /* dump the field path */
for _, i := range fv.index { for _, i := range fv.index {
kind := F_offset kind := F_offset
@@ -170,9 +161,13 @@ func resolveFields(vt reflect.Type) []FieldMeta {
path[idx].Kind = F_offset path[idx].Kind = F_offset
} }
fm.Type = fvt /* add to result */
fm.Path = path ret = append(ret, FieldMeta {
fm.Name = fv.name Type: fvt,
Opts: opts,
Path: path,
Name: fv.name,
})
} }
/* optimize the offsets */ /* optimize the offsets */
@@ -217,10 +212,3 @@ func ResolveStruct(vt reflect.Type) []FieldMeta {
fieldCache[vt] = fm fieldCache[vt] = fm
return fm return fm
} }
func handleOmitZero(fv StdField, fm *FieldMeta) {
if fv.omitZero {
fm.Opts |= F_omitzero
fm.IsZero = fv.isZero
}
}

View File

@@ -1,5 +1,4 @@
// +build !noasm,amd64 !appengine,amd64 // +build !noasm,amd64 !appengine,amd64
// Code generated by asm2asm, DO NOT EDIT·
#include "go_asm.h" #include "go_asm.h"
#include "funcdata.h" #include "funcdata.h"
@@ -18,3 +17,43 @@ _entry:
_stack_grow: _stack_grow:
CALL runtime·morestack_noctxt<>(SB) CALL runtime·morestack_noctxt<>(SB)
JMP _entry JMP _entry
TEXT ·StopProf(SB), NOSPLIT, $0-0
NO_LOCAL_POINTERS
CMPB github·combytedancesonicinternalrt·StopProfiling(SB), $0
JEQ _ret_1
MOVL $1, AX
LEAQ github·combytedancesonicinternalrt·yieldCount(SB), CX
LOCK
XADDL AX, (CX)
MOVL runtime·prof+4(SB), AX
TESTL AX, AX
JEQ _ret_1
MOVL AX, github·combytedancesonicinternalrt·oldHz(SB)
MOVL $0, runtime·prof+4(SB)
_ret_1:
RET
TEXT ·StartProf(SB), NOSPLIT, $0-0
NO_LOCAL_POINTERS
CMPB github·combytedancesonicinternalrt·StopProfiling(SB), $0
JEQ _ret_2
MOVL $-1, AX
LEAQ github·combytedancesonicinternalrt·yieldCount(SB), CX
LOCK
XADDL AX, (CX)
CMPL github·combytedancesonicinternalrt·yieldCount(SB), $0
JNE _ret_2
CMPL runtime·prof+4(SB), $0
JNE _ret_2
CMPL github·combytedancesonicinternalrt·oldHz(SB), $0
JNE _branch_1
MOVL $100, github·combytedancesonicinternalrt·oldHz(SB)
_branch_1:
MOVL github·combytedancesonicinternalrt·oldHz(SB), AX
MOVL AX, runtime·prof+4(SB)
_ret_2:
RET

View File

@@ -17,10 +17,8 @@
package rt package rt
import ( import (
"reflect" `unsafe`
"unsafe" `reflect`
"github.com/bytedance/sonic/option"
) )
//go:nosplit //go:nosplit
@@ -92,21 +90,6 @@ func GuardSlice(buf *[]byte, n int) {
} }
} }
func GuardSlice2(buf []byte, n int) []byte {
c := cap(buf)
l := len(buf)
if c-l < n {
c = c>>1 + n + l
if c < 32 {
c = 32
}
tmp := make([]byte, l, c)
copy(tmp, buf)
buf = tmp
}
return buf
}
//go:nosplit //go:nosplit
func Ptr2SlicePtr(s unsafe.Pointer, l int, c int) unsafe.Pointer { func Ptr2SlicePtr(s unsafe.Pointer, l int, c int) unsafe.Pointer {
slice := &GoSlice{ slice := &GoSlice{
@@ -140,16 +123,3 @@ func NoEscape(p unsafe.Pointer) unsafe.Pointer {
x := uintptr(p) x := uintptr(p)
return unsafe.Pointer(x ^ 0) return unsafe.Pointer(x ^ 0)
} }
//go:nosplit
func MoreStack(size uintptr)
//go:nosplit
func Add(ptr unsafe.Pointer, off uintptr) unsafe.Pointer {
return unsafe.Pointer(uintptr(ptr) + off)
}
// CanSizeResue
func CanSizeResue(cap int) bool {
return cap <= int(option.LimitBufferSize)
}

View File

@@ -17,8 +17,8 @@
package rt package rt
import ( import (
"reflect" `reflect`
"unsafe" `unsafe`
) )
var ( var (
@@ -72,7 +72,37 @@ func (self *GoType) String() string {
} }
func (self *GoType) Indirect() bool { func (self *GoType) Indirect() bool {
return self.KindFlags&F_direct == 0 return self.KindFlags & F_direct == 0
}
type GoMap struct {
Count int
Flags uint8
B uint8
Overflow uint16
Hash0 uint32
Buckets unsafe.Pointer
OldBuckets unsafe.Pointer
Evacuate uintptr
Extra unsafe.Pointer
}
type GoMapIterator struct {
K unsafe.Pointer
V unsafe.Pointer
T *GoMapType
H *GoMap
Buckets unsafe.Pointer
Bptr *unsafe.Pointer
Overflow *[]unsafe.Pointer
OldOverflow *[]unsafe.Pointer
StartBucket uintptr
Offset uint8
Wrapped bool
B uint8
I uint8
Bucket uintptr
CheckBucket uintptr
} }
type GoItab struct { type GoItab struct {
@@ -116,7 +146,7 @@ type GoMapType struct {
} }
func (self *GoMapType) IndirectElem() bool { func (self *GoMapType) IndirectElem() bool {
return self.Flags&2 != 0 return self.Flags & 2 != 0
} }
type GoStructType struct { type GoStructType struct {
@@ -189,7 +219,7 @@ func AssertI2I2(t *GoType, i GoIface) (r GoIface) {
return return
} }
if (*GoInterfaceType)(tab.it) != inter { if (*GoInterfaceType)(tab.it) != inter {
tab = GetItab(inter, tab.Vt, true) tab = Getitab(inter, tab.Vt, true)
if tab == nil { if tab == nil {
return return
} }
@@ -199,33 +229,15 @@ func AssertI2I2(t *GoType, i GoIface) (r GoIface) {
return return
} }
func (t *GoType) IsInt64() bool {
return t.Kind() == reflect.Int64 || (t.Kind() == reflect.Int && t.Size == 8)
}
func (t *GoType) IsInt32() bool {
return t.Kind() == reflect.Int32 || (t.Kind() == reflect.Int && t.Size == 4)
}
//go:nosplit
func (t *GoType) IsUint64() bool {
isUint := t.Kind() == reflect.Uint || t.Kind() == reflect.Uintptr
return t.Kind() == reflect.Uint64 || (isUint && t.Size == 8)
}
//go:nosplit
func (t *GoType) IsUint32() bool {
isUint := t.Kind() == reflect.Uint || t.Kind() == reflect.Uintptr
return t.Kind() == reflect.Uint32 || (isUint && t.Size == 4)
}
//go:nosplit
func PtrAdd(ptr unsafe.Pointer, offset uintptr) unsafe.Pointer {
return unsafe.Pointer(uintptr(ptr) + offset)
}
//go:noescape //go:noescape
//go:linkname GetItab runtime.getitab //go:linkname Getitab runtime.getitab
func GetItab(inter *GoInterfaceType, typ *GoType, canfail bool) *GoItab func Getitab(inter *GoInterfaceType, typ *GoType, canfail bool) *GoItab
func GetFuncPC(fn interface{}) uintptr {
ft := UnpackEface(fn)
if ft.Type.Kind() != reflect.Func {
panic("not a function")
}
return *(*uintptr)(ft.Value)
}

View File

@@ -1,5 +1,3 @@
// +build go1.21,!go1.26
/* /*
* Copyright 2021 ByteDance Inc. * Copyright 2021 ByteDance Inc.
* *
@@ -19,18 +17,13 @@
package rt package rt
import ( import (
`os`
`sync/atomic` `sync/atomic`
`unsafe` `unsafe`
`golang.org/x/arch/x86/x86asm` `golang.org/x/arch/x86/x86asm`
) )
//go:linkname GcWriteBarrier2 runtime.gcWriteBarrier2
func GcWriteBarrier2()
//go:linkname RuntimeWriteBarrier runtime.writeBarrier
var RuntimeWriteBarrier uintptr
const ( const (
_MaxInstr = 15 _MaxInstr = 15
) )
@@ -83,3 +76,49 @@ func GcwbAddr() uintptr {
} }
} }
// StopProfiling is used to stop traceback introduced by SIGPROF while native code is running.
// WARN: this option is only a workaround for traceback issue (https://github.com/bytedance/sonic/issues/310),
// and will be dropped when the issue is fixed.
var StopProfiling = os.Getenv("SONIC_STOP_PROFILING") != ""
// WARN: must be aligned with runtime.Prof
// type Prof struct {
// signalLock uint32
// hz int32
// }
var (
// // go:linkname runtimeProf runtime.prof
// runtimeProf Prof
// count of native-C calls
yieldCount uint32
// previous value of runtimeProf.hz
oldHz int32
)
//go:nosplit
func MoreStack(size uintptr)
func StopProf()
// func StopProf() {
// atomic.AddUint32(&yieldCount, 1)
// if runtimeProf.hz != 0 {
// oldHz = runtimeProf.hz
// runtimeProf.hz = 0
// }
// }
func StartProf()
// func StartProf() {
// atomic.AddUint32(&yieldCount, ^uint32(0))
// if yieldCount == 0 && runtimeProf.hz == 0 {
// if oldHz == 0 {
// oldHz = 100
// }
// runtimeProf.hz = oldHz
// }
// }

View File

@@ -17,6 +17,8 @@
package loader package loader
import ( import (
"sync/atomic"
"unsafe"
_ `unsafe` _ `unsafe`
) )
@@ -24,5 +26,35 @@ import (
//goland:noinspection GoUnusedGlobalVariable //goland:noinspection GoUnusedGlobalVariable
var lastmoduledatap *moduledata var lastmoduledatap *moduledata
func registerModule(mod *moduledata) {
registerModuleLockFree(&lastmoduledatap, mod)
}
//go:linkname moduledataverify1 runtime.moduledataverify1 //go:linkname moduledataverify1 runtime.moduledataverify1
func moduledataverify1(_ *moduledata) func moduledataverify1(_ *moduledata)
func registerModuleLockFree(tail **moduledata, mod *moduledata) {
for {
oldTail := loadModule(tail)
if casModule(tail, oldTail, mod) {
storeModule(&oldTail.next, mod)
break
}
}
}
func loadModule(p **moduledata) *moduledata {
return (*moduledata)(atomic.LoadPointer((*unsafe.Pointer)(unsafe.Pointer(p))))
}
func storeModule(p **moduledata, value *moduledata) {
atomic.StorePointer((*unsafe.Pointer)(unsafe.Pointer(p)), unsafe.Pointer(value))
}
func casModule(p **moduledata, oldValue *moduledata, newValue *moduledata) bool {
return atomic.CompareAndSwapPointer(
(*unsafe.Pointer)(unsafe.Pointer(p)),
unsafe.Pointer(oldValue),
unsafe.Pointer(newValue),
)
}

View File

@@ -18,17 +18,10 @@ package option
var ( var (
// DefaultDecoderBufferSize is the initial buffer size of StreamDecoder // DefaultDecoderBufferSize is the initial buffer size of StreamDecoder
DefaultDecoderBufferSize uint = 4 * 1024 DefaultDecoderBufferSize uint = 128 * 1024
// DefaultEncoderBufferSize is the initial buffer size of Encoder // DefaultEncoderBufferSize is the initial buffer size of Encoder
DefaultEncoderBufferSize uint = 4 * 1024 DefaultEncoderBufferSize uint = 128 * 1024
// DefaultAstBufferSize is the initial buffer size of ast.Node.MarshalJSON()
DefaultAstBufferSize uint = 4 * 1024
// LimitBufferSize indicates the max pool buffer size, in case of OOM.
// See issue https://github.com/bytedance/sonic/issues/614
LimitBufferSize uint = 1024 * 1024
) )
// CompileOptions includes all options for encoder or decoder compiler. // CompileOptions includes all options for encoder or decoder compiler.
@@ -68,7 +61,7 @@ type CompileOption func(o *CompileOptions)
// //
// For deep nested struct (depth exceeds MaxInlineDepth), // For deep nested struct (depth exceeds MaxInlineDepth),
// try to set more loops to completely compile, // try to set more loops to completely compile,
// thus reduce JIT instability in the first hit. // thus reduce JIT unstability in the first hit.
func WithCompileRecursiveDepth(loop int) CompileOption { func WithCompileRecursiveDepth(loop int) CompileOption {
return func(o *CompileOptions) { return func(o *CompileOptions) {
if loop < 0 { if loop < 0 {
@@ -90,3 +83,4 @@ func WithCompileMaxInlineDepth(depth int) CompileOption {
o.MaxInlineDepth = depth o.MaxInlineDepth = depth
} }
} }

View File

@@ -1,5 +1,4 @@
//go:build (amd64 && go1.17 && !go1.26) || (arm64 && go1.20 && !go1.26) // +build amd64,go1.16,!go1.23
// +build amd64,go1.17,!go1.26 arm64,go1.20,!go1.26
/* /*
* Copyright 2021 ByteDance Inc. * Copyright 2021 ByteDance Inc.
@@ -17,6 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
//go:generate make
package sonic package sonic
import ( import (
@@ -29,8 +29,6 @@ import (
`github.com/bytedance/sonic/internal/rt` `github.com/bytedance/sonic/internal/rt`
) )
const apiKind = UseSonicJSON
type frozenConfig struct { type frozenConfig struct {
Config Config
encoderOpts encoder.Options encoderOpts encoder.Options
@@ -66,14 +64,8 @@ func (cfg Config) Froze() API {
if cfg.NoEncoderNewline { if cfg.NoEncoderNewline {
api.encoderOpts |= encoder.NoEncoderNewline api.encoderOpts |= encoder.NoEncoderNewline
} }
if cfg.EncodeNullForInfOrNan {
api.encoderOpts |= encoder.EncodeNullForInfOrNan
}
// configure decoder options: // configure decoder options:
if cfg.NoValidateJSONSkip {
api.decoderOpts |= decoder.OptionNoValidateJSON
}
if cfg.UseInt64 { if cfg.UseInt64 {
api.decoderOpts |= decoder.OptionUseInt64 api.decoderOpts |= decoder.OptionUseInt64
} }
@@ -89,9 +81,6 @@ func (cfg Config) Froze() API {
if cfg.ValidateString { if cfg.ValidateString {
api.decoderOpts |= decoder.OptionValidateString api.decoderOpts |= decoder.OptionValidateString
} }
if cfg.CaseSensitive {
api.decoderOpts |= decoder.OptionCaseSensitive
}
return api return api
} }

View File

@@ -1,7 +1,3 @@
//go:build (amd64 && go1.17 && !go1.26) || (arm64 && go1.20 && !go1.26)
// +build amd64,go1.17,!go1.26 arm64,go1.20,!go1.26
/* /*
* Copyright 2021 ByteDance Inc. * Copyright 2021 ByteDance Inc.
* *
@@ -29,7 +25,7 @@ import (
`github.com/bytedance/sonic/internal/rt` `github.com/bytedance/sonic/internal/rt`
) )
// String unescapes an escaped string (not including `"` at beginning and end) // String unescapes a escaped string (not including `"` at begining and end)
// It validates invalid UTF8 and replace with `\ufffd` // It validates invalid UTF8 and replace with `\ufffd`
func String(s string) (ret string, err types.ParsingError) { func String(s string) (ret string, err types.ParsingError) {
mm := make([]byte, 0, len(s)) mm := make([]byte, 0, len(s))
@@ -47,7 +43,7 @@ func IntoBytes(s string, m *[]byte) types.ParsingError {
} }
} }
// String unescapes an escaped string (not including `"` at beginning and end) // String unescapes a escaped string (not including `"` at begining and end)
// - replace enables replacing invalid utf8 escaped char with `\uffd` // - replace enables replacing invalid utf8 escaped char with `\uffd`
func _String(s string, replace bool) (ret string, err error) { func _String(s string, replace bool) (ret string, err error) {
mm := make([]byte, 0, len(s)) mm := make([]byte, 0, len(s))

View File

@@ -1,6 +1,3 @@
//go:build (amd64 && go1.17 && !go1.26) || (arm64 && go1.20 && !go1.26)
// +build amd64,go1.17,!go1.26 arm64,go1.20,!go1.26
/* /*
* Copyright 2022 ByteDance Inc. * Copyright 2022 ByteDance Inc.
* *
@@ -20,8 +17,6 @@
package utf8 package utf8
import ( import (
`runtime`
`github.com/bytedance/sonic/internal/rt` `github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/internal/native/types` `github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/native` `github.com/bytedance/sonic/internal/native`
@@ -32,7 +27,7 @@ func CorrectWith(dst []byte, src []byte, repl string) []byte {
sstr := rt.Mem2Str(src) sstr := rt.Mem2Str(src)
sidx := 0 sidx := 0
/* state machine records the invalid positions */ /* state machine records the invalid postions */
m := types.NewStateMachine() m := types.NewStateMachine()
m.Sp = 0 // invalid utf8 numbers m.Sp = 0 // invalid utf8 numbers
@@ -65,20 +60,12 @@ func CorrectWith(dst []byte, src []byte, repl string) []byte {
return dst return dst
} }
// Validate is a simd-accelerated drop-in replacement for the standard library's utf8.Valid. // Validate is a simd-accelereated drop-in replacement for the standard library's utf8.Valid.
func Validate(src []byte) bool { func Validate(src []byte) bool {
if src == nil {
return true
}
return ValidateString(rt.Mem2Str(src)) return ValidateString(rt.Mem2Str(src))
} }
// ValidateString as Validate, but for string. // ValidateString as Validate, but for string.
func ValidateString(src string) bool { func ValidateString(src string) bool {
if src == "" { return native.ValidateUTF8Fast(&src) == 0
return true
}
ret := native.ValidateUTF8Fast(&src) == 0
runtime.KeepAlive(src)
return ret
} }

View File

@@ -1,6 +1,6 @@
MIT License MIT License
Copyright (c) 2018 Gabriel Vasile Copyright (c) 2018-2020 Gabriel Vasile
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@@ -16,6 +16,9 @@
<a href="https://goreportcard.com/report/github.com/gabriel-vasile/mimetype"> <a href="https://goreportcard.com/report/github.com/gabriel-vasile/mimetype">
<img alt="Go report card" src="https://goreportcard.com/badge/github.com/gabriel-vasile/mimetype"> <img alt="Go report card" src="https://goreportcard.com/badge/github.com/gabriel-vasile/mimetype">
</a> </a>
<a href="https://codecov.io/gh/gabriel-vasile/mimetype">
<img alt="Code coverage" src="https://codecov.io/gh/gabriel-vasile/mimetype/branch/master/graph/badge.svg?token=qcfJF1kkl2"/>
</a>
<a href="LICENSE"> <a href="LICENSE">
<img alt="License" src="https://img.shields.io/badge/License-MIT-green.svg"> <img alt="License" src="https://img.shields.io/badge/License-MIT-green.svg">
</a> </a>
@@ -27,7 +30,6 @@
- possibility to [extend](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#example-package-Extend) with other file formats - possibility to [extend](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#example-package-Extend) with other file formats
- common file formats are prioritized - common file formats are prioritized
- [text vs. binary files differentiation](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#example-package-TextVsBinary) - [text vs. binary files differentiation](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#example-package-TextVsBinary)
- no external dependencies
- safe for concurrent usage - safe for concurrent usage
## Install ## Install
@@ -46,7 +48,8 @@ fmt.Println(mtype.String(), mtype.Extension())
``` ```
See the [runnable Go Playground examples](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#pkg-overview). See the [runnable Go Playground examples](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#pkg-overview).
Caution: only use libraries like **mimetype** as a last resort. Content type detection ## Usage'
Only use libraries like **mimetype** as a last resort. Content type detection
using magic numbers is slow, inaccurate, and non-standard. Most of the times using magic numbers is slow, inaccurate, and non-standard. Most of the times
protocols have methods for specifying such metadata; e.g., `Content-Type` header protocols have methods for specifying such metadata; e.g., `Content-Type` header
in HTTP and SMTP. in HTTP and SMTP.
@@ -67,18 +70,6 @@ mimetype.DetectFile("file.doc")
If increasing the limit does not help, please If increasing the limit does not help, please
[open an issue](https://github.com/gabriel-vasile/mimetype/issues/new?assignees=&labels=&template=mismatched-mime-type-detected.md&title=). [open an issue](https://github.com/gabriel-vasile/mimetype/issues/new?assignees=&labels=&template=mismatched-mime-type-detected.md&title=).
## Tests
In addition to unit tests,
[mimetype_tests](https://github.com/gabriel-vasile/mimetype_tests) compares the
library with the [Unix file utility](https://en.wikipedia.org/wiki/File_(command))
for around 50 000 sample files. Check the latest comparison results
[here](https://github.com/gabriel-vasile/mimetype_tests/actions).
## Benchmarks
Benchmarks for each file format are performed when a PR is open. The results can
be seen on the [workflows page](https://github.com/gabriel-vasile/mimetype/actions/workflows/benchmark.yml).
Performance improvements are welcome but correctness is prioritized.
## Structure ## Structure
**mimetype** uses a hierarchical structure to keep the MIME type detection logic. **mimetype** uses a hierarchical structure to keep the MIME type detection logic.
This reduces the number of calls needed for detecting the file type. The reason This reduces the number of calls needed for detecting the file type. The reason
@@ -93,11 +84,22 @@ To prevent loading entire files into memory, when detecting from a
or from a [file](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#DetectFile) or from a [file](https://pkg.go.dev/github.com/gabriel-vasile/mimetype#DetectFile)
**mimetype** limits itself to reading only the header of the input. **mimetype** limits itself to reading only the header of the input.
<div align="center"> <div align="center">
<img alt="how project is structured" src="https://raw.githubusercontent.com/gabriel-vasile/mimetype/master/testdata/gif.gif" width="88%"> <img alt="structure" src="https://github.com/gabriel-vasile/mimetype/blob/420a05228c6a6efbb6e6f080168a25663414ff36/mimetype.gif?raw=true" width="88%">
</div> </div>
## Performance
Thanks to the hierarchical structure, searching for common formats first,
and limiting itself to file headers, **mimetype** matches the performance of
stdlib `http.DetectContentType` while outperforming the alternative package.
```bash
mimetype http.DetectContentType filetype
BenchmarkMatchTar-24 250 ns/op 400 ns/op 3778 ns/op
BenchmarkMatchZip-24 524 ns/op 351 ns/op 4884 ns/op
BenchmarkMatchJpeg-24 103 ns/op 228 ns/op 839 ns/op
BenchmarkMatchGif-24 139 ns/op 202 ns/op 751 ns/op
BenchmarkMatchPng-24 165 ns/op 221 ns/op 1176 ns/op
```
## Contributing ## Contributing
Contributions are unexpected but welcome. When submitting a PR for detection of See [CONTRIBUTING.md](CONTRIBUTING.md).
a new file format, please make sure to add a record to the list of testcases
from [mimetype_test.go](mimetype_test.go). For complex files a record can be added
in the [testdata](testdata) directory.

View File

@@ -2,10 +2,11 @@ package charset
import ( import (
"bytes" "bytes"
"encoding/xml"
"strings"
"unicode/utf8" "unicode/utf8"
"github.com/gabriel-vasile/mimetype/internal/markup" "golang.org/x/net/html"
"github.com/gabriel-vasile/mimetype/internal/scan"
) )
const ( const (
@@ -140,31 +141,20 @@ func FromXML(content []byte) string {
} }
return FromPlain(content) return FromPlain(content)
} }
func fromXML(s scan.Bytes) string { func fromXML(content []byte) string {
xml := []byte("<?XML") content = trimLWS(content)
lxml := len(xml) dec := xml.NewDecoder(bytes.NewReader(content))
for { rawT, err := dec.RawToken()
if len(s) == 0 { if err != nil {
return "" return ""
} }
for scan.ByteIsWS(s.Peek()) {
s.Advance(1) t, ok := rawT.(xml.ProcInst)
} if !ok {
if len(s) <= lxml {
return "" return ""
} }
if !s.Match(xml, scan.IgnoreCase) {
s = s[1:] // safe to slice instead of s.Advance(1) because bounds are checked return strings.ToLower(xmlEncoding(string(t.Inst)))
continue
}
aName, aVal, hasMore := "", "", true
for hasMore {
aName, aVal, hasMore = markup.GetAnAttribute(&s)
if aName == "encoding" && aVal != "" {
return aVal
}
}
}
} }
// FromHTML returns the charset of an HTML document. It first looks if a BOM is // FromHTML returns the charset of an HTML document. It first looks if a BOM is
@@ -181,65 +171,57 @@ func FromHTML(content []byte) string {
return FromPlain(content) return FromPlain(content)
} }
func fromHTML(s scan.Bytes) string { func fromHTML(content []byte) string {
z := html.NewTokenizer(bytes.NewReader(content))
for {
switch z.Next() {
case html.ErrorToken:
return ""
case html.StartTagToken, html.SelfClosingTagToken:
tagName, hasAttr := z.TagName()
if !bytes.Equal(tagName, []byte("meta")) {
continue
}
attrList := make(map[string]bool)
gotPragma := false
const ( const (
dontKnow = iota dontKnow = iota
doNeedPragma doNeedPragma
doNotNeedPragma doNotNeedPragma
) )
meta := []byte("<META")
body := []byte("<BODY")
lmeta := len(meta)
for {
if markup.SkipAComment(&s) {
continue
}
if len(s) <= lmeta {
return ""
}
// Abort when <body is reached.
if s.Match(body, scan.IgnoreCase) {
return ""
}
if !s.Match(meta, scan.IgnoreCase) {
s = s[1:] // safe to slice instead of s.Advance(1) because bounds are checked
continue
}
s = s[lmeta:]
c := s.Pop()
if c == 0 || (!scan.ByteIsWS(c) && c != '/') {
return ""
}
attrList := make(map[string]bool)
gotPragma := false
needPragma := dontKnow needPragma := dontKnow
charset := "" name := ""
aName, aVal, hasMore := "", "", true for hasAttr {
for hasMore { var key, val []byte
aName, aVal, hasMore = markup.GetAnAttribute(&s) key, val, hasAttr = z.TagAttr()
if attrList[aName] { ks := string(key)
if attrList[ks] {
continue continue
} }
// processing step attrList[ks] = true
if len(aName) == 0 && len(aVal) == 0 { for i, c := range val {
if needPragma == dontKnow { if 'A' <= c && c <= 'Z' {
continue val[i] = c + 0x20
}
if needPragma == doNeedPragma && !gotPragma {
continue
} }
} }
attrList[aName] = true
if aName == "http-equiv" && scan.Bytes(aVal).Match([]byte("CONTENT-TYPE"), scan.IgnoreCase) { switch ks {
case "http-equiv":
if bytes.Equal(val, []byte("content-type")) {
gotPragma = true gotPragma = true
} else if aName == "content" { }
charset = string(extractCharsetFromMeta(scan.Bytes(aVal)))
if len(charset) != 0 { case "content":
name = fromMetaElement(string(val))
if name != "" {
needPragma = doNeedPragma needPragma = doNeedPragma
} }
} else if aName == "charset" {
charset = aVal case "charset":
name = string(val)
needPragma = doNotNeedPragma needPragma = doNotNeedPragma
} }
} }
@@ -248,36 +230,80 @@ func fromHTML(s scan.Bytes) string {
continue continue
} }
return charset if strings.HasPrefix(name, "utf-16") {
name = "utf-8"
}
return name
}
} }
} }
// https://html.spec.whatwg.org/multipage/urls-and-fetching.html#algorithm-for-extracting-a-character-encoding-from-a-meta-element func fromMetaElement(s string) string {
func extractCharsetFromMeta(s scan.Bytes) []byte { for s != "" {
for { csLoc := strings.Index(s, "charset")
i := bytes.Index(s, []byte("charset")) if csLoc == -1 {
if i == -1 { return ""
return nil
} }
s.Advance(i + len("charset")) s = s[csLoc+len("charset"):]
for scan.ByteIsWS(s.Peek()) { s = strings.TrimLeft(s, " \t\n\f\r")
s.Advance(1) if !strings.HasPrefix(s, "=") {
}
if s.Pop() != '=' {
continue continue
} }
for scan.ByteIsWS(s.Peek()) { s = s[1:]
s.Advance(1) s = strings.TrimLeft(s, " \t\n\f\r")
if s == "" {
return ""
} }
quote := s.Peek() if q := s[0]; q == '"' || q == '\'' {
if quote == 0 { s = s[1:]
return nil closeQuote := strings.IndexRune(s, rune(q))
if closeQuote == -1 {
return ""
} }
if quote == '"' || quote == '\'' { return s[:closeQuote]
s.Advance(1)
return bytes.TrimSpace(s.PopUntil(quote))
} }
return bytes.TrimSpace(s.PopUntil(';', '\t', '\n', '\x0c', '\r', ' ')) end := strings.IndexAny(s, "; \t\n\f\r")
if end == -1 {
end = len(s)
} }
return s[:end]
}
return ""
}
func xmlEncoding(s string) string {
param := "encoding="
idx := strings.Index(s, param)
if idx == -1 {
return ""
}
v := s[idx+len(param):]
if v == "" {
return ""
}
if v[0] != '\'' && v[0] != '"' {
return ""
}
idx = strings.IndexRune(v[1:], rune(v[0]))
if idx == -1 {
return ""
}
return v[1 : idx+1]
}
// trimLWS trims whitespace from beginning of the input.
// TODO: find a way to call trimLWS once per detection instead of once in each
// detector which needs the trimmed input.
func trimLWS(in []byte) []byte {
firstNonWS := 0
for ; firstNonWS < len(in) && isWS(in[firstNonWS]); firstNonWS++ {
}
return in[firstNonWS:]
}
func isWS(b byte) bool {
return b == '\t' || b == '\n' || b == '\x0c' || b == '\r' || b == ' '
} }

View File

@@ -52,15 +52,10 @@ func InstallShieldCab(raw []byte, _ uint32) bool {
} }
// Zstd matches a Zstandard archive file. // Zstd matches a Zstandard archive file.
// https://github.com/facebook/zstd/blob/dev/doc/zstd_compression_format.md
func Zstd(raw []byte, limit uint32) bool { func Zstd(raw []byte, limit uint32) bool {
if len(raw) < 4 { return len(raw) >= 4 &&
return false (0x22 <= raw[0] && raw[0] <= 0x28 || raw[0] == 0x1E) && // Different Zstandard versions.
} bytes.HasPrefix(raw[1:], []byte{0xB5, 0x2F, 0xFD})
sig := binary.LittleEndian.Uint32(raw)
// Check for Zstandard frames and skippable frames.
return (sig >= 0xFD2FB522 && sig <= 0xFD2FB528) ||
(sig >= 0x184D2A50 && sig <= 0x184D2A5F)
} }
// CRX matches a Chrome extension file: a zip archive prepended by a package header. // CRX matches a Chrome extension file: a zip archive prepended by a package header.
@@ -79,85 +74,51 @@ func CRX(raw []byte, limit uint32) bool {
} }
// Tar matches a (t)ape (ar)chive file. // Tar matches a (t)ape (ar)chive file.
// Tar files are divided into 512 bytes records. First record contains a 257
// bytes header padded with NUL.
func Tar(raw []byte, _ uint32) bool { func Tar(raw []byte, _ uint32) bool {
const sizeRecord = 512 // The "magic" header field for files in in UStar (POSIX IEEE P1003.1) archives
// has the prefix "ustar". The values of the remaining bytes in this field vary
// The structure of a tar header: // by archiver implementation.
// type TarHeader struct { if len(raw) >= 512 && bytes.HasPrefix(raw[257:], []byte{0x75, 0x73, 0x74, 0x61, 0x72}) {
// Name [100]byte return true
// Mode [8]byte
// Uid [8]byte
// Gid [8]byte
// Size [12]byte
// Mtime [12]byte
// Chksum [8]byte
// Linkflag byte
// Linkname [100]byte
// Magic [8]byte
// Uname [32]byte
// Gname [32]byte
// Devmajor [8]byte
// Devminor [8]byte
// }
if len(raw) < sizeRecord {
return false
} }
raw = raw[:sizeRecord]
// First 100 bytes of the header represent the file name. if len(raw) < 256 {
// Check if file looks like Gentoo GLEP binary package.
if bytes.Contains(raw[:100], []byte("/gpkg-1\x00")) {
return false return false
} }
// Get the checksum recorded into the file. // The older v7 format has no "magic" field, and therefore must be identified
recsum := tarParseOctal(raw[148:156]) // with heuristics based on legal ranges of values for other header fields:
if recsum == -1 { // https://www.nationalarchives.gov.uk/PRONOM/Format/proFormatSearch.aspx?status=detailReport&id=385&strPageToDisplay=signatures
rules := []struct {
min, max uint8
i int
}{
{0x21, 0xEF, 0},
{0x30, 0x37, 105},
{0x20, 0x37, 106},
{0x00, 0x00, 107},
{0x30, 0x37, 113},
{0x20, 0x37, 114},
{0x00, 0x00, 115},
{0x30, 0x37, 121},
{0x20, 0x37, 122},
{0x00, 0x00, 123},
{0x30, 0x37, 134},
{0x30, 0x37, 146},
{0x30, 0x37, 153},
{0x00, 0x37, 154},
}
for _, r := range rules {
if raw[r.i] < r.min || raw[r.i] > r.max {
return false return false
} }
sum1, sum2 := tarChksum(raw) }
return recsum == sum1 || recsum == sum2
}
// tarParseOctal converts octal string to decimal int. for _, i := range []uint8{135, 147, 155} {
func tarParseOctal(b []byte) int64 { if raw[i] != 0x00 && raw[i] != 0x20 {
// Because unused fields are filled with NULs, we need to skip leading NULs. return false
// Fields may also be padded with spaces or NULs. }
// So we remove leading and trailing NULs and spaces to be sure. }
b = bytes.Trim(b, " \x00")
if len(b) == 0 { return true
return -1
}
ret := int64(0)
for _, b := range b {
if b == 0 {
break
}
if b < '0' || b > '7' {
return -1
}
ret = (ret << 3) | int64(b-'0')
}
return ret
}
// tarChksum computes the checksum for the header block b.
// The actual checksum is written to same b block after it has been calculated.
// Before calculation the bytes from b reserved for checksum have placeholder
// value of ASCII space 0x20.
// POSIX specifies a sum of the unsigned byte values, but the Sun tar used
// signed byte values. We compute and return both.
func tarChksum(b []byte) (unsigned, signed int64) {
for i, c := range b {
if 148 <= i && i < 156 {
c = ' ' // Treat the checksum field itself as all spaces.
}
unsigned += int64(c)
signed += int64(int8(c))
}
return unsigned, signed
} }

View File

@@ -21,10 +21,6 @@ var (
SWF = prefix([]byte("CWS"), []byte("FWS"), []byte("ZWS")) SWF = prefix([]byte("CWS"), []byte("FWS"), []byte("ZWS"))
// Torrent has bencoded text in the beginning. // Torrent has bencoded text in the beginning.
Torrent = prefix([]byte("d8:announce")) Torrent = prefix([]byte("d8:announce"))
// PAR1 matches a parquet file.
Par1 = prefix([]byte{0x50, 0x41, 0x52, 0x31})
// CBOR matches a Concise Binary Object Representation https://cbor.io/
CBOR = prefix([]byte{0xD9, 0xD9, 0xF7})
) )
// Java bytecode and Mach-O binaries share the same magic number. // Java bytecode and Mach-O binaries share the same magic number.
@@ -36,7 +32,7 @@ func classOrMachOFat(in []byte) bool {
return false return false
} }
return binary.BigEndian.Uint32(in) == macho.MagicFat return bytes.HasPrefix(in, []byte{0xCA, 0xFE, 0xBA, 0xBE})
} }
// Class matches a java class file. // Class matches a java class file.
@@ -46,7 +42,7 @@ func Class(raw []byte, limit uint32) bool {
// MachO matches Mach-O binaries format. // MachO matches Mach-O binaries format.
func MachO(raw []byte, limit uint32) bool { func MachO(raw []byte, limit uint32) bool {
if classOrMachOFat(raw) && raw[7] < 0x14 { if classOrMachOFat(raw) && raw[7] < 20 {
return true return true
} }
@@ -71,7 +67,7 @@ func Dbf(raw []byte, limit uint32) bool {
} }
// 3rd and 4th bytes contain the last update month and day of month. // 3rd and 4th bytes contain the last update month and day of month.
if raw[2] == 0 || raw[2] > 12 || raw[3] == 0 || raw[3] > 31 { if !(0 < raw[2] && raw[2] < 13 && 0 < raw[3] && raw[3] < 32) {
return false return false
} }
@@ -153,7 +149,7 @@ func Marc(raw []byte, limit uint32) bool {
return bytes.Contains(raw[:min(2048, len(raw))], []byte{0x1E}) return bytes.Contains(raw[:min(2048, len(raw))], []byte{0x1E})
} }
// GLB matches a glTF model format file. // Glb matches a glTF model format file.
// GLB is the binary file format representation of 3D models saved in // GLB is the binary file format representation of 3D models saved in
// the GL transmission Format (glTF). // the GL transmission Format (glTF).
// GLB uses little endian and its header structure is as follows: // GLB uses little endian and its header structure is as follows:
@@ -168,13 +164,12 @@ func Marc(raw []byte, limit uint32) bool {
// //
// [glTF specification]: https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html // [glTF specification]: https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html
// [IANA glTF entry]: https://www.iana.org/assignments/media-types/model/gltf-binary // [IANA glTF entry]: https://www.iana.org/assignments/media-types/model/gltf-binary
var GLB = prefix([]byte("\x67\x6C\x54\x46\x02\x00\x00\x00"), var Glb = prefix([]byte("\x67\x6C\x54\x46\x02\x00\x00\x00"),
[]byte("\x67\x6C\x54\x46\x01\x00\x00\x00")) []byte("\x67\x6C\x54\x46\x01\x00\x00\x00"))
// TzIf matches a Time Zone Information Format (TZif) file. // TzIf matches a Time Zone Information Format (TZif) file.
// See more: https://tools.ietf.org/id/draft-murchison-tzdist-tzif-00.html#rfc.section.3 // See more: https://tools.ietf.org/id/draft-murchison-tzdist-tzif-00.html#rfc.section.3
// Its header structure is shown below: // Its header structure is shown below:
//
// +---------------+---+ // +---------------+---+
// | magic (4) | <-+-- version (1) // | magic (4) | <-+-- version (1)
// +---------------+---+---------------------------------------+ // +---------------+---+---------------------------------------+

View File

@@ -1,11 +1,18 @@
package magic package magic
import ( import "bytes"
"bytes"
"encoding/binary"
)
var ( var (
// Pdf matches a Portable Document Format file.
// https://github.com/file/file/blob/11010cc805546a3e35597e67e1129a481aed40e8/magic/Magdir/pdf
Pdf = prefix(
// usual pdf signature
[]byte("%PDF-"),
// new-line prefixed signature
[]byte("\012%PDF-"),
// UTF-8 BOM prefixed signature
[]byte("\xef\xbb\xbf%PDF-"),
)
// Fdf matches a Forms Data Format file. // Fdf matches a Forms Data Format file.
Fdf = prefix([]byte("%FDF")) Fdf = prefix([]byte("%FDF"))
// Mobi matches a Mobi file. // Mobi matches a Mobi file.
@@ -14,18 +21,8 @@ var (
Lit = prefix([]byte("ITOLITLS")) Lit = prefix([]byte("ITOLITLS"))
) )
// PDF matches a Portable Document Format file.
// The %PDF- header should be the first thing inside the file but many
// implementations don't follow the rule. The PDF spec at Appendix H says the
// signature can be prepended by anything.
// https://bugs.astron.com/view.php?id=446
func PDF(raw []byte, _ uint32) bool {
raw = raw[:min(len(raw), 1024)]
return bytes.Contains(raw, []byte("%PDF-"))
}
// DjVu matches a DjVu file. // DjVu matches a DjVu file.
func DjVu(raw []byte, _ uint32) bool { func DjVu(raw []byte, limit uint32) bool {
if len(raw) < 12 { if len(raw) < 12 {
return false return false
} }
@@ -39,7 +36,7 @@ func DjVu(raw []byte, _ uint32) bool {
} }
// P7s matches an .p7s signature File (PEM, Base64). // P7s matches an .p7s signature File (PEM, Base64).
func P7s(raw []byte, _ uint32) bool { func P7s(raw []byte, limit uint32) bool {
// Check for PEM Encoding. // Check for PEM Encoding.
if bytes.HasPrefix(raw, []byte("-----BEGIN PKCS7")) { if bytes.HasPrefix(raw, []byte("-----BEGIN PKCS7")) {
return true return true
@@ -63,21 +60,3 @@ func P7s(raw []byte, _ uint32) bool {
return false return false
} }
// Lotus123 matches a Lotus 1-2-3 spreadsheet document.
func Lotus123(raw []byte, _ uint32) bool {
if len(raw) <= 20 {
return false
}
version := binary.BigEndian.Uint32(raw)
if version == 0x00000200 {
return raw[6] != 0 && raw[7] == 0
}
return version == 0x00001a00 && raw[20] > 0 && raw[20] < 32
}
// CHM matches a Microsoft Compiled HTML Help file.
func CHM(raw []byte, _ uint32) bool {
return bytes.HasPrefix(raw, []byte("ITSF\003\000\000\000\x60\000\000\000"))
}

View File

@@ -1,14 +1,22 @@
package magic package magic
import ( import "bytes"
"bytes"
)
var ( var (
// AVIF matches an AV1 Image File Format still or animated. // AVIF matches an AV1 Image File Format still or animated.
// Wikipedia page seems outdated listing image/avif-sequence for animations. // Wikipedia page seems outdated listing image/avif-sequence for animations.
// https://github.com/AOMediaCodec/av1-avif/issues/59 // https://github.com/AOMediaCodec/av1-avif/issues/59
AVIF = ftyp([]byte("avif"), []byte("avis")) AVIF = ftyp([]byte("avif"), []byte("avis"))
// Mp4 matches an MP4 file.
Mp4 = ftyp(
[]byte("avc1"), []byte("dash"), []byte("iso2"), []byte("iso3"),
[]byte("iso4"), []byte("iso5"), []byte("iso6"), []byte("isom"),
[]byte("mmp4"), []byte("mp41"), []byte("mp42"), []byte("mp4v"),
[]byte("mp71"), []byte("MSNV"), []byte("NDAS"), []byte("NDSC"),
[]byte("NSDC"), []byte("NSDH"), []byte("NDSM"), []byte("NDSP"),
[]byte("NDSS"), []byte("NDXC"), []byte("NDXH"), []byte("NDXM"),
[]byte("NDXP"), []byte("NDXS"), []byte("F4V "), []byte("F4P "),
)
// ThreeGP matches a 3GPP file. // ThreeGP matches a 3GPP file.
ThreeGP = ftyp( ThreeGP = ftyp(
[]byte("3gp1"), []byte("3gp2"), []byte("3gp3"), []byte("3gp4"), []byte("3gp1"), []byte("3gp2"), []byte("3gp3"), []byte("3gp4"),
@@ -45,17 +53,6 @@ var (
Heif = ftyp([]byte("mif1"), []byte("heim"), []byte("heis"), []byte("avic")) Heif = ftyp([]byte("mif1"), []byte("heim"), []byte("heis"), []byte("avic"))
// HeifSequence matches a High Efficiency Image File Format (HEIF) file sequence. // HeifSequence matches a High Efficiency Image File Format (HEIF) file sequence.
HeifSequence = ftyp([]byte("msf1"), []byte("hevm"), []byte("hevs"), []byte("avcs")) HeifSequence = ftyp([]byte("msf1"), []byte("hevm"), []byte("hevs"), []byte("avcs"))
// Mj2 matches a Motion JPEG 2000 file: https://en.wikipedia.org/wiki/Motion_JPEG_2000.
Mj2 = ftyp([]byte("mj2s"), []byte("mjp2"), []byte("MFSM"), []byte("MGSV"))
// Dvb matches a Digital Video Broadcasting file: https://dvb.org.
// https://cconcolato.github.io/mp4ra/filetype.html
// https://github.com/file/file/blob/512840337ead1076519332d24fefcaa8fac36e06/magic/Magdir/animation#L135-L154
Dvb = ftyp(
[]byte("dby1"), []byte("dsms"), []byte("dts1"), []byte("dts2"),
[]byte("dts3"), []byte("dxo "), []byte("dmb1"), []byte("dmpf"),
[]byte("drc1"), []byte("dv1a"), []byte("dv1b"), []byte("dv2a"),
[]byte("dv2b"), []byte("dv3a"), []byte("dv3b"), []byte("dvr1"),
[]byte("dvt1"), []byte("emsg"))
// TODO: add support for remaining video formats at ftyps.com. // TODO: add support for remaining video formats at ftyps.com.
) )
@@ -89,21 +86,3 @@ func QuickTime(raw []byte, _ uint32) bool {
} }
return bytes.Equal(raw[:8], []byte("\x00\x00\x00\x08wide")) return bytes.Equal(raw[:8], []byte("\x00\x00\x00\x08wide"))
} }
// Mp4 detects an .mp4 file. Mp4 detections only does a basic ftyp check.
// Mp4 has many registered and unregistered code points so it's hard to keep track
// of all. Detection will default on video/mp4 for all ftyp files.
// ISO_IEC_14496-12 is the specification for the iso container.
func Mp4(raw []byte, _ uint32) bool {
if len(raw) < 12 {
return false
}
// ftyps are made out of boxes. The first 4 bytes of the box represent
// its size in big-endian uint32. First box is the ftyp box and it is small
// in size. Check most significant byte is 0 to filter out false positive
// text files that happen to contain the string "ftyp" at index 4.
if raw[0] != 0 {
return false
}
return bytes.Equal(raw[4:8], []byte("ftyp"))
}

View File

@@ -12,13 +12,13 @@ func Shp(raw []byte, limit uint32) bool {
return false return false
} }
if binary.BigEndian.Uint32(raw[0:4]) != 9994 || if !(binary.BigEndian.Uint32(raw[0:4]) == 9994 &&
binary.BigEndian.Uint32(raw[4:8]) != 0 || binary.BigEndian.Uint32(raw[4:8]) == 0 &&
binary.BigEndian.Uint32(raw[8:12]) != 0 || binary.BigEndian.Uint32(raw[8:12]) == 0 &&
binary.BigEndian.Uint32(raw[12:16]) != 0 || binary.BigEndian.Uint32(raw[12:16]) == 0 &&
binary.BigEndian.Uint32(raw[16:20]) != 0 || binary.BigEndian.Uint32(raw[16:20]) == 0 &&
binary.BigEndian.Uint32(raw[20:24]) != 0 || binary.BigEndian.Uint32(raw[20:24]) == 0 &&
binary.LittleEndian.Uint32(raw[28:32]) != 1000 { binary.LittleEndian.Uint32(raw[28:32]) == 1000) {
return false return false
} }

View File

@@ -4,8 +4,6 @@ package magic
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"github.com/gabriel-vasile/mimetype/internal/scan"
) )
type ( type (
@@ -76,13 +74,12 @@ func ciCheck(sig, raw []byte) bool {
// matches the raw input. // matches the raw input.
func xml(sigs ...xmlSig) Detector { func xml(sigs ...xmlSig) Detector {
return func(raw []byte, limit uint32) bool { return func(raw []byte, limit uint32) bool {
b := scan.Bytes(raw) raw = trimLWS(raw)
b.TrimLWS() if len(raw) == 0 {
if len(b) == 0 {
return false return false
} }
for _, s := range sigs { for _, s := range sigs {
if xmlCheck(s, b) { if xmlCheck(s, raw) {
return true return true
} }
} }
@@ -107,19 +104,19 @@ func xmlCheck(sig xmlSig, raw []byte) bool {
// matches the raw input. // matches the raw input.
func markup(sigs ...[]byte) Detector { func markup(sigs ...[]byte) Detector {
return func(raw []byte, limit uint32) bool { return func(raw []byte, limit uint32) bool {
b := scan.Bytes(raw) if bytes.HasPrefix(raw, []byte{0xEF, 0xBB, 0xBF}) {
if bytes.HasPrefix(b, []byte{0xEF, 0xBB, 0xBF}) {
// We skip the UTF-8 BOM if present to ensure we correctly // We skip the UTF-8 BOM if present to ensure we correctly
// process any leading whitespace. The presence of the BOM // process any leading whitespace. The presence of the BOM
// is taken into account during charset detection in charset.go. // is taken into account during charset detection in charset.go.
b.Advance(3) raw = trimLWS(raw[3:])
} else {
raw = trimLWS(raw)
} }
b.TrimLWS() if len(raw) == 0 {
if len(b) == 0 {
return false return false
} }
for _, s := range sigs { for _, s := range sigs {
if markupCheck(s, b) { if markupCheck(s, raw) {
return true return true
} }
} }
@@ -142,7 +139,7 @@ func markupCheck(sig, raw []byte) bool {
} }
} }
// Next byte must be space or right angle bracket. // Next byte must be space or right angle bracket.
if db := raw[len(sig)]; !scan.ByteIsWS(db) && db != '>' { if db := raw[len(sig)]; db != ' ' && db != '>' {
return false return false
} }
@@ -157,7 +154,7 @@ func ftyp(sigs ...[]byte) Detector {
return false return false
} }
for _, s := range sigs { for _, s := range sigs {
if bytes.Equal(raw[8:12], s) { if bytes.Equal(raw[4:12], append([]byte("ftyp"), s...)) {
return true return true
} }
} }
@@ -186,10 +183,8 @@ func newXMLSig(localName, xmlns string) xmlSig {
// /usr/bin/env is the interpreter, php is the first and only argument. // /usr/bin/env is the interpreter, php is the first and only argument.
func shebang(sigs ...[]byte) Detector { func shebang(sigs ...[]byte) Detector {
return func(raw []byte, limit uint32) bool { return func(raw []byte, limit uint32) bool {
b := scan.Bytes(raw)
line := b.Line()
for _, s := range sigs { for _, s := range sigs {
if shebangCheck(s, line) { if shebangCheck(s, firstLine(raw)) {
return true return true
} }
} }
@@ -197,7 +192,7 @@ func shebang(sigs ...[]byte) Detector {
} }
} }
func shebangCheck(sig []byte, raw scan.Bytes) bool { func shebangCheck(sig, raw []byte) bool {
if len(raw) < len(sig)+2 { if len(raw) < len(sig)+2 {
return false return false
} }
@@ -205,8 +200,42 @@ func shebangCheck(sig []byte, raw scan.Bytes) bool {
return false return false
} }
raw.Advance(2) // skip #! we checked above return bytes.Equal(trimLWS(trimRWS(raw[2:])), sig)
raw.TrimLWS() }
raw.TrimRWS()
return bytes.Equal(raw, sig) // trimLWS trims whitespace from beginning of the input.
func trimLWS(in []byte) []byte {
firstNonWS := 0
for ; firstNonWS < len(in) && isWS(in[firstNonWS]); firstNonWS++ {
}
return in[firstNonWS:]
}
// trimRWS trims whitespace from the end of the input.
func trimRWS(in []byte) []byte {
lastNonWS := len(in) - 1
for ; lastNonWS > 0 && isWS(in[lastNonWS]); lastNonWS-- {
}
return in[:lastNonWS+1]
}
func firstLine(in []byte) []byte {
lineEnd := 0
for ; lineEnd < len(in) && in[lineEnd] != '\n'; lineEnd++ {
}
return in[:lineEnd]
}
func isWS(b byte) bool {
return b == '\t' || b == '\n' || b == '\x0c' || b == '\r' || b == ' '
}
func min(a, b int) int {
if a < b {
return a
}
return b
} }

View File

@@ -5,36 +5,58 @@ import (
"encoding/binary" "encoding/binary"
) )
var (
xlsxSigFiles = []string{
"xl/worksheets/",
"xl/drawings/",
"xl/theme/",
"xl/_rels/",
"xl/styles.xml",
"xl/workbook.xml",
"xl/sharedStrings.xml",
}
docxSigFiles = []string{
"word/media/",
"word/_rels/document.xml.rels",
"word/document.xml",
"word/styles.xml",
"word/fontTable.xml",
"word/settings.xml",
"word/numbering.xml",
"word/header",
"word/footer",
}
pptxSigFiles = []string{
"ppt/slides/",
"ppt/media/",
"ppt/slideLayouts/",
"ppt/theme/",
"ppt/slideMasters/",
"ppt/tags/",
"ppt/notesMasters/",
"ppt/_rels/",
"ppt/handoutMasters/",
"ppt/notesSlides/",
"ppt/presentation.xml",
"ppt/tableStyles.xml",
"ppt/presProps.xml",
"ppt/viewProps.xml",
}
)
// Xlsx matches a Microsoft Excel 2007 file. // Xlsx matches a Microsoft Excel 2007 file.
func Xlsx(raw []byte, limit uint32) bool { func Xlsx(raw []byte, limit uint32) bool {
return msoxml(raw, zipEntries{{ return zipContains(raw, xlsxSigFiles...)
name: []byte("xl/"),
dir: true,
}}, 100)
} }
// Docx matches a Microsoft Word 2007 file. // Docx matches a Microsoft Word 2007 file.
func Docx(raw []byte, limit uint32) bool { func Docx(raw []byte, limit uint32) bool {
return msoxml(raw, zipEntries{{ return zipContains(raw, docxSigFiles...)
name: []byte("word/"),
dir: true,
}}, 100)
} }
// Pptx matches a Microsoft PowerPoint 2007 file. // Pptx matches a Microsoft PowerPoint 2007 file.
func Pptx(raw []byte, limit uint32) bool { func Pptx(raw []byte, limit uint32) bool {
return msoxml(raw, zipEntries{{ return zipContains(raw, pptxSigFiles...)
name: []byte("ppt/"),
dir: true,
}}, 100)
}
// Visio matches a Microsoft Visio 2013+ file.
func Visio(raw []byte, limit uint32) bool {
return msoxml(raw, zipEntries{{
name: []byte("visio/"),
dir: true,
}}, 100)
} }
// Ole matches an Open Linking and Embedding file. // Ole matches an Open Linking and Embedding file.
@@ -174,14 +196,6 @@ func Msi(raw []byte, limit uint32) bool {
}) })
} }
// One matches a Microsoft OneNote file.
func One(raw []byte, limit uint32) bool {
return bytes.HasPrefix(raw, []byte{
0xe4, 0x52, 0x5c, 0x7b, 0x8c, 0xd8, 0xa7, 0x4d,
0xae, 0xb1, 0x53, 0x78, 0xd0, 0x29, 0x96, 0xd3,
})
}
// Helper to match by a specific CLSID of a compound file. // Helper to match by a specific CLSID of a compound file.
// //
// http://fileformats.archiveteam.org/wiki/Microsoft_Compound_File // http://fileformats.archiveteam.org/wiki/Microsoft_Compound_File

View File

@@ -1,13 +1,13 @@
package magic package magic
import ( import (
"bufio"
"bytes" "bytes"
"strings"
"time" "time"
"github.com/gabriel-vasile/mimetype/internal/charset" "github.com/gabriel-vasile/mimetype/internal/charset"
"github.com/gabriel-vasile/mimetype/internal/json" "github.com/gabriel-vasile/mimetype/internal/json"
mkup "github.com/gabriel-vasile/mimetype/internal/markup"
"github.com/gabriel-vasile/mimetype/internal/scan"
) )
var ( var (
@@ -29,7 +29,6 @@ var (
[]byte("<BODY"), []byte("<BODY"),
[]byte("<BR"), []byte("<BR"),
[]byte("<P"), []byte("<P"),
[]byte("<!--"),
) )
// XML matches an Extensible Markup Language file. // XML matches an Extensible Markup Language file.
XML = markup([]byte("<?XML")) XML = markup([]byte("<?XML"))
@@ -108,18 +107,6 @@ var (
[]byte("/usr/bin/python"), []byte("/usr/bin/python"),
[]byte("/usr/local/bin/python"), []byte("/usr/local/bin/python"),
[]byte("/usr/bin/env python"), []byte("/usr/bin/env python"),
[]byte("/usr/bin/python2"),
[]byte("/usr/local/bin/python2"),
[]byte("/usr/bin/env python2"),
[]byte("/usr/bin/python3"),
[]byte("/usr/local/bin/python3"),
[]byte("/usr/bin/env python3"),
)
// Ruby matches a Ruby programming language file.
Ruby = shebang(
[]byte("/usr/bin/ruby"),
[]byte("/usr/local/bin/ruby"),
[]byte("/usr/bin/env ruby"),
) )
// Tcl matches a Tcl programming language file. // Tcl matches a Tcl programming language file.
Tcl = shebang( Tcl = shebang(
@@ -134,43 +121,20 @@ var (
[]byte("/usr/bin/env wish"), []byte("/usr/bin/env wish"),
) )
// Rtf matches a Rich Text Format file. // Rtf matches a Rich Text Format file.
Rtf = prefix([]byte("{\\rtf")) Rtf = prefix([]byte("{\\rtf1"))
// Shell matches a shell script file.
Shell = shebang(
[]byte("/bin/sh"),
[]byte("/bin/bash"),
[]byte("/usr/local/bin/bash"),
[]byte("/usr/bin/env bash"),
[]byte("/bin/csh"),
[]byte("/usr/local/bin/csh"),
[]byte("/usr/bin/env csh"),
[]byte("/bin/dash"),
[]byte("/usr/local/bin/dash"),
[]byte("/usr/bin/env dash"),
[]byte("/bin/ksh"),
[]byte("/usr/local/bin/ksh"),
[]byte("/usr/bin/env ksh"),
[]byte("/bin/tcsh"),
[]byte("/usr/local/bin/tcsh"),
[]byte("/usr/bin/env tcsh"),
[]byte("/bin/zsh"),
[]byte("/usr/local/bin/zsh"),
[]byte("/usr/bin/env zsh"),
)
) )
// Text matches a plain text file. // Text matches a plain text file.
// //
// TODO: This function does not parse BOM-less UTF16 and UTF32 files. Not really // TODO: This function does not parse BOM-less UTF16 and UTF32 files. Not really
// sure it should. Linux file utility also requires a BOM for UTF16 and UTF32. // sure it should. Linux file utility also requires a BOM for UTF16 and UTF32.
func Text(raw []byte, _ uint32) bool { func Text(raw []byte, limit uint32) bool {
// First look for BOM. // First look for BOM.
if cset := charset.FromBOM(raw); cset != "" { if cset := charset.FromBOM(raw); cset != "" {
return true return true
} }
// Binary data bytes as defined here: https://mimesniff.spec.whatwg.org/#binary-data-byte // Binary data bytes as defined here: https://mimesniff.spec.whatwg.org/#binary-data-byte
for i := 0; i < min(len(raw), 4096); i++ { for _, b := range raw {
b := raw[i]
if b <= 0x08 || if b <= 0x08 ||
b == 0x0B || b == 0x0B ||
0x0E <= b && b <= 0x1A || 0x0E <= b && b <= 0x1A ||
@@ -181,14 +145,6 @@ func Text(raw []byte, _ uint32) bool {
return true return true
} }
// XHTML matches an XHTML file. This check depends on the XML check to have passed.
func XHTML(raw []byte, limit uint32) bool {
raw = raw[:min(len(raw), 4096)]
b := scan.Bytes(raw)
return b.Search([]byte("<!DOCTYPE HTML"), scan.CompactWS|scan.IgnoreCase) != -1 ||
b.Search([]byte("<HTML XMLNS="), scan.CompactWS|scan.IgnoreCase) != -1
}
// Php matches a PHP: Hypertext Preprocessor file. // Php matches a PHP: Hypertext Preprocessor file.
func Php(raw []byte, limit uint32) bool { func Php(raw []byte, limit uint32) bool {
if res := phpPageF(raw, limit); res { if res := phpPageF(raw, limit); res {
@@ -199,180 +155,189 @@ func Php(raw []byte, limit uint32) bool {
// JSON matches a JavaScript Object Notation file. // JSON matches a JavaScript Object Notation file.
func JSON(raw []byte, limit uint32) bool { func JSON(raw []byte, limit uint32) bool {
raw = trimLWS(raw)
// #175 A single JSON string, number or bool is not considered JSON. // #175 A single JSON string, number or bool is not considered JSON.
// JSON objects and arrays are reported as JSON. // JSON objects and arrays are reported as JSON.
return jsonHelper(raw, limit, json.QueryNone, json.TokObject|json.TokArray) if len(raw) < 2 || (raw[0] != '[' && raw[0] != '{') {
return false
}
parsed, err := json.Scan(raw)
// If the full file content was provided, check there is no error.
if limit == 0 || len(raw) < int(limit) {
return err == nil
}
// If a section of the file was provided, check if all of it was parsed.
return parsed == len(raw) && len(raw) > 0
} }
// GeoJSON matches a RFC 7946 GeoJSON file. // GeoJSON matches a RFC 7946 GeoJSON file.
// //
// GeoJSON detection implies searching for key:value pairs like: `"type": "Feature"` // GeoJSON detection implies searching for key:value pairs like: `"type": "Feature"`
// in the input. // in the input.
// BUG(gabriel-vasile): The "type" key should be searched for in the root object.
func GeoJSON(raw []byte, limit uint32) bool { func GeoJSON(raw []byte, limit uint32) bool {
return jsonHelper(raw, limit, json.QueryGeo, json.TokObject) raw = trimLWS(raw)
} if len(raw) == 0 {
// HAR matches a HAR Spec file.
// Spec: http://www.softwareishard.com/blog/har-12-spec/
func HAR(raw []byte, limit uint32) bool {
return jsonHelper(raw, limit, json.QueryHAR, json.TokObject)
}
// GLTF matches a GL Transmission Format (JSON) file.
// Visit [glTF specification] and [IANA glTF entry] for more details.
//
// [glTF specification]: https://registry.khronos.org/glTF/specs/2.0/glTF-2.0.html
// [IANA glTF entry]: https://www.iana.org/assignments/media-types/model/gltf+json
func GLTF(raw []byte, limit uint32) bool {
return jsonHelper(raw, limit, json.QueryGLTF, json.TokObject)
}
func jsonHelper(raw []byte, limit uint32, q string, wantTok int) bool {
if !json.LooksLikeObjectOrArray(raw) {
return false return false
} }
lraw := len(raw) // GeoJSON is always a JSON object, not a JSON array or any other JSON value.
parsed, inspected, firstToken, querySatisfied := json.Parse(q, raw) if raw[0] != '{' {
if !querySatisfied || firstToken&wantTok == 0 {
return false return false
} }
// If the full file content was provided, check that the whole input was parsed. s := []byte(`"type"`)
if limit == 0 || lraw < int(limit) { si, sl := bytes.Index(raw, s), len(s)
return parsed == lraw
if si == -1 {
return false
} }
// If a section of the file was provided, check if all of it was inspected. // If the "type" string is the suffix of the input,
// In other words, check that if there was a problem parsing, that problem // there is no need to search for the value of the key.
// occured at the last byte in the input. if si+sl == len(raw) {
return inspected == lraw && lraw > 0 return false
}
// Skip the "type" part.
raw = raw[si+sl:]
// Skip any whitespace before the colon.
raw = trimLWS(raw)
// Check for colon.
if len(raw) == 0 || raw[0] != ':' {
return false
}
// Skip any whitespace after the colon.
raw = trimLWS(raw[1:])
geoJSONTypes := [][]byte{
[]byte(`"Feature"`),
[]byte(`"FeatureCollection"`),
[]byte(`"Point"`),
[]byte(`"LineString"`),
[]byte(`"Polygon"`),
[]byte(`"MultiPoint"`),
[]byte(`"MultiLineString"`),
[]byte(`"MultiPolygon"`),
[]byte(`"GeometryCollection"`),
}
for _, t := range geoJSONTypes {
if bytes.HasPrefix(raw, t) {
return true
}
}
return false
} }
// NdJSON matches a Newline delimited JSON file. All complete lines from raw // NdJSON matches a Newline delimited JSON file. All complete lines from raw
// must be valid JSON documents meaning they contain one of the valid JSON data // must be valid JSON documents meaning they contain one of the valid JSON data
// types. // types.
func NdJSON(raw []byte, limit uint32) bool { func NdJSON(raw []byte, limit uint32) bool {
lCount, objOrArr := 0, 0 lCount, hasObjOrArr := 0, false
sc := bufio.NewScanner(dropLastLine(raw, limit))
s := scan.Bytes(raw) for sc.Scan() {
s.DropLastLine(limit) l := sc.Bytes()
var l scan.Bytes // Empty lines are allowed in NDJSON.
for len(s) != 0 { if l = trimRWS(trimLWS(l)); len(l) == 0 {
l = s.Line() continue
_, inspected, firstToken, _ := json.Parse(json.QueryNone, l) }
if len(l) != inspected { _, err := json.Scan(l)
if err != nil {
return false return false
} }
if firstToken == json.TokArray || firstToken == json.TokObject { if l[0] == '[' || l[0] == '{' {
objOrArr++ hasObjOrArr = true
} }
lCount++ lCount++
} }
return lCount > 1 && objOrArr > 0 return lCount > 1 && hasObjOrArr
}
// HAR matches a HAR Spec file.
// Spec: http://www.softwareishard.com/blog/har-12-spec/
func HAR(raw []byte, limit uint32) bool {
s := []byte(`"log"`)
si, sl := bytes.Index(raw, s), len(s)
if si == -1 {
return false
}
// If the "log" string is the suffix of the input,
// there is no need to search for the value of the key.
if si+sl == len(raw) {
return false
}
// Skip the "log" part.
raw = raw[si+sl:]
// Skip any whitespace before the colon.
raw = trimLWS(raw)
// Check for colon.
if len(raw) == 0 || raw[0] != ':' {
return false
}
// Skip any whitespace after the colon.
raw = trimLWS(raw[1:])
harJSONTypes := [][]byte{
[]byte(`"version"`),
[]byte(`"creator"`),
[]byte(`"entries"`),
}
for _, t := range harJSONTypes {
si := bytes.Index(raw, t)
if si > -1 {
return true
}
}
return false
} }
// Svg matches a SVG file. // Svg matches a SVG file.
func Svg(raw []byte, limit uint32) bool { func Svg(raw []byte, limit uint32) bool {
return svgWithoutXMLDeclaration(raw) || svgWithXMLDeclaration(raw) return bytes.Contains(raw, []byte("<svg"))
}
// svgWithoutXMLDeclaration matches a SVG image that does not have an XML header.
// Example:
//
// <!-- xml comment ignored -->
// <svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
// <rect fill="#fff" stroke="#000" x="-70" y="-70" width="390" height="390"/>
// </svg>
func svgWithoutXMLDeclaration(s scan.Bytes) bool {
for scan.ByteIsWS(s.Peek()) {
s.Advance(1)
}
for mkup.SkipAComment(&s) {
}
if !bytes.HasPrefix(s, []byte("<svg")) {
return false
}
targetName, targetVal := "xmlns", "http://www.w3.org/2000/svg"
aName, aVal, hasMore := "", "", true
for hasMore {
aName, aVal, hasMore = mkup.GetAnAttribute(&s)
if aName == targetName && aVal == targetVal {
return true
}
if !hasMore {
return false
}
}
return false
}
// svgWithXMLDeclaration matches a SVG image that has an XML header.
// Example:
//
// <?xml version="1.0" encoding="UTF-8" standalone="no"?>
// <svg width="391" height="391" viewBox="-70.5 -70.5 391 391" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
// <rect fill="#fff" stroke="#000" x="-70" y="-70" width="390" height="390"/>
// </svg>
func svgWithXMLDeclaration(s scan.Bytes) bool {
for scan.ByteIsWS(s.Peek()) {
s.Advance(1)
}
if !bytes.HasPrefix(s, []byte("<?xml")) {
return false
}
// version is a required attribute for XML.
hasVersion := false
aName, hasMore := "", true
for hasMore {
aName, _, hasMore = mkup.GetAnAttribute(&s)
if aName == "version" {
hasVersion = true
break
}
if !hasMore {
break
}
}
if len(s) > 4096 {
s = s[:4096]
}
return hasVersion && bytes.Contains(s, []byte("<svg"))
} }
// Srt matches a SubRip file. // Srt matches a SubRip file.
func Srt(raw []byte, _ uint32) bool { func Srt(in []byte, _ uint32) bool {
s := scan.Bytes(raw) s := bufio.NewScanner(bytes.NewReader(in))
line := s.Line() if !s.Scan() {
// First line must be 1.
if len(line) != 1 || line[0] != '1' {
return false return false
} }
line = s.Line() // First line must be 1.
// Timestamp format (e.g: 00:02:16,612 --> 00:02:19,376) limits second line if s.Text() != "1" {
return false
}
if !s.Scan() {
return false
}
secondLine := s.Text()
// Timestamp format (e.g: 00:02:16,612 --> 00:02:19,376) limits secondLine
// length to exactly 29 characters. // length to exactly 29 characters.
if len(line) != 29 { if len(secondLine) != 29 {
return false return false
} }
// Decimal separator of fractional seconds in the timestamps must be a // Decimal separator of fractional seconds in the timestamps must be a
// comma, not a period. // comma, not a period.
if bytes.IndexByte(line, '.') != -1 { if strings.Contains(secondLine, ".") {
return false return false
} }
sep := []byte(" --> ") // For Go <1.17, comma is not recognised as a decimal separator by `time.Parse`.
i := bytes.Index(line, sep) secondLine = strings.ReplaceAll(secondLine, ",", ".")
if i == -1 { // Second line must be a time range.
ts := strings.Split(secondLine, " --> ")
if len(ts) != 2 {
return false return false
} }
const layout = "15:04:05,000" const layout = "15:04:05.000"
t0, err := time.Parse(layout, string(line[:i])) t0, err := time.Parse(layout, ts[0])
if err != nil { if err != nil {
return false return false
} }
t1, err := time.Parse(layout, string(line[i+len(sep):])) t1, err := time.Parse(layout, ts[1])
if err != nil { if err != nil {
return false return false
} }
@@ -380,9 +345,8 @@ func Srt(raw []byte, _ uint32) bool {
return false return false
} }
line = s.Line()
// A third line must exist and not be empty. This is the actual subtitle text. // A third line must exist and not be empty. This is the actual subtitle text.
return len(line) != 0 return s.Scan() && len(s.Bytes()) != 0
} }
// Vtt matches a Web Video Text Tracks (WebVTT) file. See // Vtt matches a Web Video Text Tracks (WebVTT) file. See

View File

@@ -1,43 +1,63 @@
package magic package magic
import ( import (
"github.com/gabriel-vasile/mimetype/internal/csv" "bytes"
"github.com/gabriel-vasile/mimetype/internal/scan" "encoding/csv"
"errors"
"io"
) )
// CSV matches a comma-separated values file. // Csv matches a comma-separated values file.
func CSV(raw []byte, limit uint32) bool { func Csv(raw []byte, limit uint32) bool {
return sv(raw, ',', limit) return sv(raw, ',', limit)
} }
// TSV matches a tab-separated values file. // Tsv matches a tab-separated values file.
func TSV(raw []byte, limit uint32) bool { func Tsv(raw []byte, limit uint32) bool {
return sv(raw, '\t', limit) return sv(raw, '\t', limit)
} }
func sv(in []byte, comma byte, limit uint32) bool { func sv(in []byte, comma rune, limit uint32) bool {
s := scan.Bytes(in) r := csv.NewReader(dropLastLine(in, limit))
s.DropLastLine(limit) r.Comma = comma
r := csv.NewParser(comma, '#', s) r.ReuseRecord = true
r.LazyQuotes = true
r.Comment = '#'
headerFields, _, hasMore := r.CountFields(false) lines := 0
if headerFields < 2 || !hasMore {
return false
}
csvLines := 1 // 1 for header
for { for {
fields, _, hasMore := r.CountFields(false) _, err := r.Read()
if !hasMore && fields == 0 { if errors.Is(err, io.EOF) {
break break
} }
csvLines++ if err != nil {
if fields != headerFields {
return false return false
} }
if csvLines >= 10 { lines++
return true }
return r.FieldsPerRecord > 1 && lines > 1
}
// dropLastLine drops the last incomplete line from b.
//
// mimetype limits itself to ReadLimit bytes when performing a detection.
// This means, for file formats like CSV for NDJSON, the last line of the input
// can be an incomplete line.
func dropLastLine(b []byte, cutAt uint32) io.Reader {
if cutAt == 0 {
return bytes.NewReader(b)
}
if uint32(len(b)) >= cutAt {
for i := cutAt - 1; i > 0; i-- {
if b[i] == '\n' {
return bytes.NewReader(b[:i])
} }
} }
return csvLines >= 2 // No newline was found between the 0 index and cutAt.
return bytes.NewReader(b[:cutAt])
}
return bytes.NewReader(b)
} }

View File

@@ -2,8 +2,8 @@ package magic
import ( import (
"bytes" "bytes"
"encoding/binary"
"github.com/gabriel-vasile/mimetype/internal/scan" "strings"
) )
var ( var (
@@ -41,149 +41,52 @@ func Zip(raw []byte, limit uint32) bool {
(raw[3] == 0x4 || raw[3] == 0x6 || raw[3] == 0x8) (raw[3] == 0x4 || raw[3] == 0x6 || raw[3] == 0x8)
} }
// Jar matches a Java archive file. There are two types of Jar files: // Jar matches a Java archive file.
// 1. the ones that can be opened with jexec and have 0xCAFE optional flag
// https://stackoverflow.com/tags/executable-jar/info
// 2. regular jars, same as above, just without the executable flag
// https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=262278#c0
// There is an argument to only check for manifest, since it's the common nominator
// for both executable and non-executable versions. But the traversing zip entries
// is unreliable because it does linear search for signatures
// (instead of relying on offsets told by the file.)
func Jar(raw []byte, limit uint32) bool { func Jar(raw []byte, limit uint32) bool {
return executableJar(raw) || return zipContains(raw, "META-INF/MANIFEST.MF")
zipHas(raw, zipEntries{{
name: []byte("META-INF/MANIFEST.MF"),
}, {
name: []byte("META-INF/"),
}}, 1)
} }
// KMZ matches a zipped KML file, which is "doc.kml" by convention. // zipTokenizer holds the source zip file and scanned index.
func KMZ(raw []byte, _ uint32) bool { type zipTokenizer struct {
return zipHas(raw, zipEntries{{ in []byte
name: []byte("doc.kml"), i int // current index
}}, 100)
} }
// An executable Jar has a 0xCAFE flag enabled in the first zip entry. // next returns the next file name from the zip headers.
// The rule from file/file is: // https://web.archive.org/web/20191129114319/https://users.cs.jmu.edu/buchhofp/forensics/formats/pkzip.html
// >(26.s+30) leshort 0xcafe Java archive data (JAR) func (t *zipTokenizer) next() (fileName string) {
func executableJar(b scan.Bytes) bool { if t.i > len(t.in) {
b.Advance(0x1A) return
offset, ok := b.Uint16()
if !ok {
return false
} }
b.Advance(int(offset) + 2) in := t.in[t.i:]
// pkSig is the signature of the zip local file header.
cafe, ok := b.Uint16() pkSig := []byte("PK\003\004")
return ok && cafe == 0xCAFE pkIndex := bytes.Index(in, pkSig)
} // 30 is the offset of the file name in the header.
fNameOffset := pkIndex + 30
// zipIterator iterates over a zip file returning the name of the zip entries // end if signature not found or file name offset outside of file.
// in that file. if pkIndex == -1 || fNameOffset > len(in) {
type zipIterator struct { return
b scan.Bytes
}
type zipEntries []struct {
name []byte
dir bool // dir means checking just the prefix of the entry, not the whole path
}
func (z zipEntries) match(file []byte) bool {
for i := range z {
if z[i].dir && bytes.HasPrefix(file, z[i].name) {
return true
} }
if bytes.Equal(file, z[i].name) {
fNameLen := int(binary.LittleEndian.Uint16(in[pkIndex+26 : pkIndex+28]))
if fNameLen <= 0 || fNameOffset+fNameLen > len(in) {
return
}
t.i += fNameOffset + fNameLen
return string(in[fNameOffset : fNameOffset+fNameLen])
}
// zipContains returns true if the zip file headers from in contain any of the paths.
func zipContains(in []byte, paths ...string) bool {
t := zipTokenizer{in: in}
for i, tok := 0, t.next(); tok != ""; i, tok = i+1, t.next() {
for p := range paths {
if strings.HasPrefix(tok, paths[p]) {
return true return true
} }
} }
return false
}
func zipHas(raw scan.Bytes, searchFor zipEntries, stopAfter int) bool {
iter := zipIterator{raw}
for i := 0; i < stopAfter; i++ {
f := iter.next()
if len(f) == 0 {
break
}
if searchFor.match(f) {
return true
}
} }
return false return false
} }
// msoxml behaves like zipHas, but it puts restrictions on what the first zip
// entry can be.
func msoxml(raw scan.Bytes, searchFor zipEntries, stopAfter int) bool {
iter := zipIterator{raw}
for i := 0; i < stopAfter; i++ {
f := iter.next()
if len(f) == 0 {
break
}
if searchFor.match(f) {
return true
}
// If the first is not one of the next usually expected entries,
// then abort this check.
if i == 0 {
if !bytes.Equal(f, []byte("[Content_Types].xml")) &&
!bytes.Equal(f, []byte("_rels/.rels")) &&
!bytes.Equal(f, []byte("docProps")) &&
!bytes.Equal(f, []byte("customXml")) &&
!bytes.Equal(f, []byte("[trash]")) {
return false
}
}
}
return false
}
// next extracts the name of the next zip entry.
func (i *zipIterator) next() []byte {
pk := []byte("PK\003\004")
n := bytes.Index(i.b, pk)
if n == -1 {
return nil
}
i.b.Advance(n)
if !i.b.Advance(0x1A) {
return nil
}
l, ok := i.b.Uint16()
if !ok {
return nil
}
if !i.b.Advance(0x02) {
return nil
}
if len(i.b) < int(l) {
return nil
}
return i.b[:l]
}
// APK matches an Android Package Archive.
// The source of signatures is https://github.com/file/file/blob/1778642b8ba3d947a779a36fcd81f8e807220a19/magic/Magdir/archive#L1820-L1887
func APK(raw []byte, _ uint32) bool {
return zipHas(raw, zipEntries{{
name: []byte("AndroidManifest.xml"),
}, {
name: []byte("META-INF/com/android/build/gradle/app-metadata.properties"),
}, {
name: []byte("classes.dex"),
}, {
name: []byte("resources.arsc"),
}, {
name: []byte("res/drawable"),
}}, 100)
}

View File

@@ -103,17 +103,15 @@ func (m *MIME) match(in []byte, readLimit uint32) *MIME {
"text/html": charset.FromHTML, "text/html": charset.FromHTML,
"text/xml": charset.FromXML, "text/xml": charset.FromXML,
} }
charset := "" // ps holds optional MIME parameters.
ps := map[string]string{}
if f, ok := needsCharset[m.mime]; ok { if f, ok := needsCharset[m.mime]; ok {
// The charset comes from BOM, from HTML headers, from XML headers. if cset := f(in); cset != "" {
// Limit the number of bytes searched for to 1024. ps["charset"] = cset
charset = f(in[:min(len(in), 1024)])
} }
if m == root {
return m
} }
return m.cloneHierarchy(charset) return m.cloneHierarchy(ps)
} }
// flatten transforms an hierarchy of MIMEs into a slice of MIMEs. // flatten transforms an hierarchy of MIMEs into a slice of MIMEs.
@@ -127,10 +125,10 @@ func (m *MIME) flatten() []*MIME {
} }
// clone creates a new MIME with the provided optional MIME parameters. // clone creates a new MIME with the provided optional MIME parameters.
func (m *MIME) clone(charset string) *MIME { func (m *MIME) clone(ps map[string]string) *MIME {
clonedMIME := m.mime clonedMIME := m.mime
if charset != "" { if len(ps) > 0 {
clonedMIME = m.mime + "; charset=" + charset clonedMIME = mime.FormatMediaType(m.mime, ps)
} }
return &MIME{ return &MIME{
@@ -142,11 +140,11 @@ func (m *MIME) clone(charset string) *MIME {
// cloneHierarchy creates a clone of m and all its ancestors. The optional MIME // cloneHierarchy creates a clone of m and all its ancestors. The optional MIME
// parameters are set on the last child of the hierarchy. // parameters are set on the last child of the hierarchy.
func (m *MIME) cloneHierarchy(charset string) *MIME { func (m *MIME) cloneHierarchy(ps map[string]string) *MIME {
ret := m.clone(charset) ret := m.clone(ps)
lastChild := ret lastChild := ret
for p := m.Parent(); p != nil; p = p.Parent() { for p := m.Parent(); p != nil; p = p.Parent() {
pClone := p.clone("") pClone := p.clone(nil)
lastChild.parent = pClone lastChild.parent = pClone
lastChild = pClone lastChild = pClone
} }

View File

@@ -7,15 +7,14 @@ package mimetype
import ( import (
"io" "io"
"io/ioutil"
"mime" "mime"
"os" "os"
"sync/atomic" "sync/atomic"
) )
var defaultLimit uint32 = 3072
// readLimit is the maximum number of bytes from the input used when detecting. // readLimit is the maximum number of bytes from the input used when detecting.
var readLimit uint32 = defaultLimit var readLimit uint32 = 3072
// Detect returns the MIME type found from the provided byte slice. // Detect returns the MIME type found from the provided byte slice.
// //
@@ -49,7 +48,7 @@ func DetectReader(r io.Reader) (*MIME, error) {
// Using atomic because readLimit can be written at the same time in other goroutine. // Using atomic because readLimit can be written at the same time in other goroutine.
l := atomic.LoadUint32(&readLimit) l := atomic.LoadUint32(&readLimit)
if l == 0 { if l == 0 {
in, err = io.ReadAll(r) in, err = ioutil.ReadAll(r)
if err != nil { if err != nil {
return errMIME, err return errMIME, err
} }
@@ -104,7 +103,6 @@ func EqualsAny(s string, mimes ...string) bool {
// SetLimit sets the maximum number of bytes read from input when detecting the MIME type. // SetLimit sets the maximum number of bytes read from input when detecting the MIME type.
// Increasing the limit provides better detection for file formats which store // Increasing the limit provides better detection for file formats which store
// their magical numbers towards the end of the file: docx, pptx, xlsx, etc. // their magical numbers towards the end of the file: docx, pptx, xlsx, etc.
// During detection data is read in a single block of size limit, i.e. it is not buffered.
// A limit of 0 means the whole input file will be used. // A limit of 0 means the whole input file will be used.
func SetLimit(limit uint32) { func SetLimit(limit uint32) {
// Using atomic because readLimit can be read at the same time in other goroutine. // Using atomic because readLimit can be read at the same time in other goroutine.

View File

@@ -1,4 +1,4 @@
## 191 Supported MIME types ## 173 Supported MIME types
This file is automatically generated when running tests. Do not edit manually. This file is automatically generated when running tests. Do not edit manually.
Extension | MIME type | Aliases Extension | MIME type | Aliases
@@ -7,12 +7,11 @@ Extension | MIME type | Aliases
**.xpm** | image/x-xpixmap | - **.xpm** | image/x-xpixmap | -
**.7z** | application/x-7z-compressed | - **.7z** | application/x-7z-compressed | -
**.zip** | application/zip | application/x-zip, application/x-zip-compressed **.zip** | application/zip | application/x-zip, application/x-zip-compressed
**.xlsx** | application/vnd.openxmlformats-officedocument.spreadsheetml.sheet | -
**.docx** | application/vnd.openxmlformats-officedocument.wordprocessingml.document | - **.docx** | application/vnd.openxmlformats-officedocument.wordprocessingml.document | -
**.pptx** | application/vnd.openxmlformats-officedocument.presentationml.presentation | - **.pptx** | application/vnd.openxmlformats-officedocument.presentationml.presentation | -
**.xlsx** | application/vnd.openxmlformats-officedocument.spreadsheetml.sheet | -
**.epub** | application/epub+zip | - **.epub** | application/epub+zip | -
**.apk** | application/vnd.android.package-archive | - **.jar** | application/jar | -
**.jar** | application/java-archive | application/jar, application/jar-archive, application/x-java-archive
**.odt** | application/vnd.oasis.opendocument.text | application/x-vnd.oasis.opendocument.text **.odt** | application/vnd.oasis.opendocument.text | application/x-vnd.oasis.opendocument.text
**.ott** | application/vnd.oasis.opendocument.text-template | application/x-vnd.oasis.opendocument.text-template **.ott** | application/vnd.oasis.opendocument.text-template | application/x-vnd.oasis.opendocument.text-template
**.ods** | application/vnd.oasis.opendocument.spreadsheet | application/x-vnd.oasis.opendocument.spreadsheet **.ods** | application/vnd.oasis.opendocument.spreadsheet | application/x-vnd.oasis.opendocument.spreadsheet
@@ -24,8 +23,6 @@ Extension | MIME type | Aliases
**.odf** | application/vnd.oasis.opendocument.formula | application/x-vnd.oasis.opendocument.formula **.odf** | application/vnd.oasis.opendocument.formula | application/x-vnd.oasis.opendocument.formula
**.odc** | application/vnd.oasis.opendocument.chart | application/x-vnd.oasis.opendocument.chart **.odc** | application/vnd.oasis.opendocument.chart | application/x-vnd.oasis.opendocument.chart
**.sxc** | application/vnd.sun.xml.calc | - **.sxc** | application/vnd.sun.xml.calc | -
**.kmz** | application/vnd.google-earth.kmz | -
**.vsdx** | application/vnd.ms-visio.drawing.main+xml | -
**.pdf** | application/pdf | application/x-pdf **.pdf** | application/pdf | application/x-pdf
**.fdf** | application/vnd.fdf | - **.fdf** | application/vnd.fdf | -
**n/a** | application/x-ole-storage | - **n/a** | application/x-ole-storage | -
@@ -63,10 +60,9 @@ Extension | MIME type | Aliases
**.tar** | application/x-tar | - **.tar** | application/x-tar | -
**.xar** | application/x-xar | - **.xar** | application/x-xar | -
**.bz2** | application/x-bzip2 | - **.bz2** | application/x-bzip2 | -
**.fits** | application/fits | image/fits **.fits** | application/fits | -
**.tiff** | image/tiff | - **.tiff** | image/tiff | -
**.bmp** | image/bmp | image/x-bmp, image/x-ms-bmp **.bmp** | image/bmp | image/x-bmp, image/x-ms-bmp
**.123** | application/vnd.lotus-1-2-3 | -
**.ico** | image/x-icon | - **.ico** | image/x-icon | -
**.mp3** | audio/mpeg | audio/x-mpeg, audio/mp3 **.mp3** | audio/mpeg | audio/x-mpeg, audio/mp3
**.flac** | audio/flac | - **.flac** | audio/flac | -
@@ -79,28 +75,21 @@ Extension | MIME type | Aliases
**.au** | audio/basic | - **.au** | audio/basic | -
**.mpeg** | video/mpeg | - **.mpeg** | video/mpeg | -
**.mov** | video/quicktime | - **.mov** | video/quicktime | -
**.mqv** | video/quicktime | -
**.mp4** | video/mp4 | - **.mp4** | video/mp4 | -
**.avif** | image/avif | - **.webm** | video/webm | audio/webm
**.3gp** | video/3gpp | video/3gp, audio/3gpp **.3gp** | video/3gpp | video/3gp, audio/3gpp
**.3g2** | video/3gpp2 | video/3g2, audio/3gpp2 **.3g2** | video/3gpp2 | video/3g2, audio/3gpp2
**.mp4** | audio/mp4 | audio/x-mp4a
**.mqv** | video/quicktime | -
**.m4a** | audio/x-m4a | -
**.m4v** | video/x-m4v | -
**.heic** | image/heic | -
**.heic** | image/heic-sequence | -
**.heif** | image/heif | -
**.heif** | image/heif-sequence | -
**.mj2** | video/mj2 | -
**.dvb** | video/vnd.dvb.file | -
**.webm** | video/webm | audio/webm
**.avi** | video/x-msvideo | video/avi, video/msvideo **.avi** | video/x-msvideo | video/avi, video/msvideo
**.flv** | video/x-flv | - **.flv** | video/x-flv | -
**.mkv** | video/x-matroska | - **.mkv** | video/x-matroska | -
**.asf** | video/x-ms-asf | video/asf, video/x-ms-wmv **.asf** | video/x-ms-asf | video/asf, video/x-ms-wmv
**.aac** | audio/aac | - **.aac** | audio/aac | -
**.voc** | audio/x-unknown | - **.voc** | audio/x-unknown | -
**.mp4** | audio/mp4 | audio/x-m4a, audio/x-mp4a
**.m4a** | audio/x-m4a | -
**.m3u** | application/vnd.apple.mpegurl | audio/mpegurl **.m3u** | application/vnd.apple.mpegurl | audio/mpegurl
**.m4v** | video/x-m4v | -
**.rmvb** | application/vnd.rn-realmedia-vbr | - **.rmvb** | application/vnd.rn-realmedia-vbr | -
**.gz** | application/gzip | application/x-gzip, application/x-gunzip, application/gzipped, application/gzip-compressed, application/x-gzip-compressed, gzip/document **.gz** | application/gzip | application/x-gzip, application/x-gunzip, application/gzipped, application/gzip-compressed, application/x-gzip-compressed, gzip/document
**.class** | application/x-java-applet | - **.class** | application/x-java-applet | -
@@ -122,7 +111,6 @@ Extension | MIME type | Aliases
**.mobi** | application/x-mobipocket-ebook | - **.mobi** | application/x-mobipocket-ebook | -
**.lit** | application/x-ms-reader | - **.lit** | application/x-ms-reader | -
**.bpg** | image/bpg | - **.bpg** | image/bpg | -
**.cbor** | application/cbor | -
**.sqlite** | application/vnd.sqlite3 | application/x-sqlite3 **.sqlite** | application/vnd.sqlite3 | application/x-sqlite3
**.dwg** | image/vnd.dwg | image/x-dwg, application/acad, application/x-acad, application/autocad_dwg, application/dwg, application/x-dwg, application/x-autocad, drawing/dwg **.dwg** | image/vnd.dwg | image/x-dwg, application/acad, application/x-acad, application/autocad_dwg, application/dwg, application/x-dwg, application/x-autocad, drawing/dwg
**.nes** | application/vnd.nintendo.snes.rom | - **.nes** | application/vnd.nintendo.snes.rom | -
@@ -130,6 +118,10 @@ Extension | MIME type | Aliases
**.macho** | application/x-mach-binary | - **.macho** | application/x-mach-binary | -
**.qcp** | audio/qcelp | - **.qcp** | audio/qcelp | -
**.icns** | image/x-icns | - **.icns** | image/x-icns | -
**.heic** | image/heic | -
**.heic** | image/heic-sequence | -
**.heif** | image/heif | -
**.heif** | image/heif-sequence | -
**.hdr** | image/vnd.radiance | - **.hdr** | image/vnd.radiance | -
**.mrc** | application/marc | - **.mrc** | application/marc | -
**.mdb** | application/x-msaccess | - **.mdb** | application/x-msaccess | -
@@ -146,15 +138,13 @@ Extension | MIME type | Aliases
**.pat** | image/x-gimp-pat | - **.pat** | image/x-gimp-pat | -
**.gbr** | image/x-gimp-gbr | - **.gbr** | image/x-gimp-gbr | -
**.glb** | model/gltf-binary | - **.glb** | model/gltf-binary | -
**.avif** | image/avif | -
**.cab** | application/x-installshield | - **.cab** | application/x-installshield | -
**.jxr** | image/jxr | image/vnd.ms-photo **.jxr** | image/jxr | image/vnd.ms-photo
**.parquet** | application/vnd.apache.parquet | application/x-parquet
**.one** | application/onenote | -
**.chm** | application/vnd.ms-htmlhelp | -
**.txt** | text/plain | - **.txt** | text/plain | -
**.svg** | image/svg+xml | -
**.html** | text/html | - **.html** | text/html | -
**.xml** | text/xml | application/xml **.svg** | image/svg+xml | -
**.xml** | text/xml | -
**.rss** | application/rss+xml | text/rss **.rss** | application/rss+xml | text/rss
**.atom** | application/atom+xml | - **.atom** | application/atom+xml | -
**.x3d** | model/x3d+xml | - **.x3d** | model/x3d+xml | -
@@ -168,19 +158,16 @@ Extension | MIME type | Aliases
**.3mf** | application/vnd.ms-package.3dmanufacturing-3dmodel+xml | - **.3mf** | application/vnd.ms-package.3dmanufacturing-3dmodel+xml | -
**.xfdf** | application/vnd.adobe.xfdf | - **.xfdf** | application/vnd.adobe.xfdf | -
**.owl** | application/owl+xml | - **.owl** | application/owl+xml | -
**.html** | application/xhtml+xml | -
**.php** | text/x-php | - **.php** | text/x-php | -
**.js** | text/javascript | application/x-javascript, application/javascript **.js** | application/javascript | application/x-javascript, text/javascript
**.lua** | text/x-lua | - **.lua** | text/x-lua | -
**.pl** | text/x-perl | - **.pl** | text/x-perl | -
**.py** | text/x-python | text/x-script.python, application/x-python **.py** | text/x-python | text/x-script.python, application/x-python
**.rb** | text/x-ruby | application/x-ruby
**.json** | application/json | - **.json** | application/json | -
**.geojson** | application/geo+json | - **.geojson** | application/geo+json | -
**.har** | application/json | - **.har** | application/json | -
**.gltf** | model/gltf+json | -
**.ndjson** | application/x-ndjson | - **.ndjson** | application/x-ndjson | -
**.rtf** | text/rtf | application/rtf **.rtf** | text/rtf | -
**.srt** | application/x-subrip | application/x-srt, text/x-srt **.srt** | application/x-subrip | application/x-srt, text/x-srt
**.tcl** | text/x-tcl | application/x-tcl **.tcl** | text/x-tcl | application/x-tcl
**.csv** | text/csv | - **.csv** | text/csv | -
@@ -189,8 +176,3 @@ Extension | MIME type | Aliases
**.ics** | text/calendar | - **.ics** | text/calendar | -
**.warc** | application/warc | - **.warc** | application/warc | -
**.vtt** | text/vtt | - **.vtt** | text/vtt | -
**.sh** | text/x-shellscript | text/x-sh, application/x-shellscript, application/x-sh
**.pbm** | image/x-portable-bitmap | -
**.pgm** | image/x-portable-graymap | -
**.ppm** | image/x-portable-pixmap | -
**.pam** | image/x-portable-arbitrarymap | -

View File

@@ -18,14 +18,14 @@ import (
var root = newMIME("application/octet-stream", "", var root = newMIME("application/octet-stream", "",
func([]byte, uint32) bool { return true }, func([]byte, uint32) bool { return true },
xpm, sevenZ, zip, pdf, fdf, ole, ps, psd, p7s, ogg, png, jpg, jxl, jp2, jpx, xpm, sevenZ, zip, pdf, fdf, ole, ps, psd, p7s, ogg, png, jpg, jxl, jp2, jpx,
jpm, jxs, gif, webp, exe, elf, ar, tar, xar, bz2, fits, tiff, bmp, lotus, ico, jpm, jxs, gif, webp, exe, elf, ar, tar, xar, bz2, fits, tiff, bmp, ico, mp3, flac,
mp3, flac, midi, ape, musePack, amr, wav, aiff, au, mpeg, quickTime, mp4, webM, midi, ape, musePack, amr, wav, aiff, au, mpeg, quickTime, mqv, mp4, webM,
avi, flv, mkv, asf, aac, voc, m3u, rmvb, gzip, class, swf, crx, ttf, woff, threeGP, threeG2, avi, flv, mkv, asf, aac, voc, aMp4, m4a, m3u, m4v, rmvb,
woff2, otf, ttc, eot, wasm, shx, dbf, dcm, rar, djvu, mobi, lit, bpg, cbor, gzip, class, swf, crx, ttf, woff, woff2, otf, ttc, eot, wasm, shx, dbf, dcm, rar,
sqlite3, dwg, nes, lnk, macho, qcp, icns, hdr, mrc, mdb, accdb, zstd, cab, djvu, mobi, lit, bpg, sqlite3, dwg, nes, lnk, macho, qcp, icns, heic,
rpm, xz, lzip, torrent, cpio, tzif, xcf, pat, gbr, glb, cabIS, jxr, parquet, heicSeq, heif, heifSeq, hdr, mrc, mdb, accdb, zstd, cab, rpm, xz, lzip,
oneNote, chm, torrent, cpio, tzif, xcf, pat, gbr, glb, avif, cabIS, jxr,
// Keep text last because it is the slowest check. // Keep text last because it is the slowest check
text, text,
) )
@@ -45,26 +45,19 @@ var (
"application/gzip-compressed", "application/x-gzip-compressed", "application/gzip-compressed", "application/x-gzip-compressed",
"gzip/document") "gzip/document")
sevenZ = newMIME("application/x-7z-compressed", ".7z", magic.SevenZ) sevenZ = newMIME("application/x-7z-compressed", ".7z", magic.SevenZ)
// APK must be checked before JAR because APK is a subset of JAR. zip = newMIME("application/zip", ".zip", magic.Zip, xlsx, docx, pptx, epub, jar, odt, ods, odp, odg, odf, odc, sxc).
// This means APK should be a child of JAR detector, but in practice,
// the decisive signature for JAR might be located at the end of the file
// and not reachable because of library readLimit.
zip = newMIME("application/zip", ".zip", magic.Zip, docx, pptx, xlsx, epub, apk, jar, odt, ods, odp, odg, odf, odc, sxc, kmz, visio).
alias("application/x-zip", "application/x-zip-compressed") alias("application/x-zip", "application/x-zip-compressed")
tar = newMIME("application/x-tar", ".tar", magic.Tar) tar = newMIME("application/x-tar", ".tar", magic.Tar)
xar = newMIME("application/x-xar", ".xar", magic.Xar) xar = newMIME("application/x-xar", ".xar", magic.Xar)
bz2 = newMIME("application/x-bzip2", ".bz2", magic.Bz2) bz2 = newMIME("application/x-bzip2", ".bz2", magic.Bz2)
pdf = newMIME("application/pdf", ".pdf", magic.PDF). pdf = newMIME("application/pdf", ".pdf", magic.Pdf).
alias("application/x-pdf") alias("application/x-pdf")
fdf = newMIME("application/vnd.fdf", ".fdf", magic.Fdf) fdf = newMIME("application/vnd.fdf", ".fdf", magic.Fdf)
xlsx = newMIME("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", ".xlsx", magic.Xlsx) xlsx = newMIME("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", ".xlsx", magic.Xlsx)
docx = newMIME("application/vnd.openxmlformats-officedocument.wordprocessingml.document", ".docx", magic.Docx) docx = newMIME("application/vnd.openxmlformats-officedocument.wordprocessingml.document", ".docx", magic.Docx)
pptx = newMIME("application/vnd.openxmlformats-officedocument.presentationml.presentation", ".pptx", magic.Pptx) pptx = newMIME("application/vnd.openxmlformats-officedocument.presentationml.presentation", ".pptx", magic.Pptx)
visio = newMIME("application/vnd.ms-visio.drawing.main+xml", ".vsdx", magic.Visio)
epub = newMIME("application/epub+zip", ".epub", magic.Epub) epub = newMIME("application/epub+zip", ".epub", magic.Epub)
jar = newMIME("application/java-archive", ".jar", magic.Jar). jar = newMIME("application/jar", ".jar", magic.Jar)
alias("application/jar", "application/jar-archive", "application/x-java-archive")
apk = newMIME("application/vnd.android.package-archive", ".apk", magic.APK)
ole = newMIME("application/x-ole-storage", "", magic.Ole, msi, aaf, msg, xls, pub, ppt, doc) ole = newMIME("application/x-ole-storage", "", magic.Ole, msi, aaf, msg, xls, pub, ppt, doc)
msi = newMIME("application/x-ms-installer", ".msi", magic.Msi). msi = newMIME("application/x-ms-installer", ".msi", magic.Msi).
alias("application/x-windows-installer", "application/x-msi") alias("application/x-windows-installer", "application/x-msi")
@@ -78,26 +71,24 @@ var (
alias("application/msexcel") alias("application/msexcel")
msg = newMIME("application/vnd.ms-outlook", ".msg", magic.Msg) msg = newMIME("application/vnd.ms-outlook", ".msg", magic.Msg)
ps = newMIME("application/postscript", ".ps", magic.Ps) ps = newMIME("application/postscript", ".ps", magic.Ps)
fits = newMIME("application/fits", ".fits", magic.Fits).alias("image/fits") fits = newMIME("application/fits", ".fits", magic.Fits)
ogg = newMIME("application/ogg", ".ogg", magic.Ogg, oggAudio, oggVideo). ogg = newMIME("application/ogg", ".ogg", magic.Ogg, oggAudio, oggVideo).
alias("application/x-ogg") alias("application/x-ogg")
oggAudio = newMIME("audio/ogg", ".oga", magic.OggAudio) oggAudio = newMIME("audio/ogg", ".oga", magic.OggAudio)
oggVideo = newMIME("video/ogg", ".ogv", magic.OggVideo) oggVideo = newMIME("video/ogg", ".ogv", magic.OggVideo)
text = newMIME("text/plain", ".txt", magic.Text, svg, html, xml, php, js, lua, perl, python, ruby, json, ndJSON, rtf, srt, tcl, csv, tsv, vCard, iCalendar, warc, vtt, shell, netpbm, netpgm, netppm, netpam) text = newMIME("text/plain", ".txt", magic.Text, html, svg, xml, php, js, lua, perl, python, json, ndJSON, rtf, srt, tcl, csv, tsv, vCard, iCalendar, warc, vtt)
xml = newMIME("text/xml", ".xml", magic.XML, rss, atom, x3d, kml, xliff, collada, gml, gpx, tcx, amf, threemf, xfdf, owl2, xhtml). xml = newMIME("text/xml", ".xml", magic.XML, rss, atom, x3d, kml, xliff, collada, gml, gpx, tcx, amf, threemf, xfdf, owl2)
alias("application/xml") json = newMIME("application/json", ".json", magic.JSON, geoJSON, har)
xhtml = newMIME("application/xhtml+xml", ".html", magic.XHTML)
json = newMIME("application/json", ".json", magic.JSON, geoJSON, har, gltf)
har = newMIME("application/json", ".har", magic.HAR) har = newMIME("application/json", ".har", magic.HAR)
csv = newMIME("text/csv", ".csv", magic.CSV) csv = newMIME("text/csv", ".csv", magic.Csv)
tsv = newMIME("text/tab-separated-values", ".tsv", magic.TSV) tsv = newMIME("text/tab-separated-values", ".tsv", magic.Tsv)
geoJSON = newMIME("application/geo+json", ".geojson", magic.GeoJSON) geoJSON = newMIME("application/geo+json", ".geojson", magic.GeoJSON)
ndJSON = newMIME("application/x-ndjson", ".ndjson", magic.NdJSON) ndJSON = newMIME("application/x-ndjson", ".ndjson", magic.NdJSON)
html = newMIME("text/html", ".html", magic.HTML) html = newMIME("text/html", ".html", magic.HTML)
php = newMIME("text/x-php", ".php", magic.Php) php = newMIME("text/x-php", ".php", magic.Php)
rtf = newMIME("text/rtf", ".rtf", magic.Rtf).alias("application/rtf") rtf = newMIME("text/rtf", ".rtf", magic.Rtf)
js = newMIME("text/javascript", ".js", magic.Js). js = newMIME("application/javascript", ".js", magic.Js).
alias("application/x-javascript", "application/javascript") alias("application/x-javascript", "text/javascript")
srt = newMIME("application/x-subrip", ".srt", magic.Srt). srt = newMIME("application/x-subrip", ".srt", magic.Srt).
alias("application/x-srt", "text/x-srt") alias("application/x-srt", "text/x-srt")
vtt = newMIME("text/vtt", ".vtt", magic.Vtt) vtt = newMIME("text/vtt", ".vtt", magic.Vtt)
@@ -105,10 +96,6 @@ var (
perl = newMIME("text/x-perl", ".pl", magic.Perl) perl = newMIME("text/x-perl", ".pl", magic.Perl)
python = newMIME("text/x-python", ".py", magic.Python). python = newMIME("text/x-python", ".py", magic.Python).
alias("text/x-script.python", "application/x-python") alias("text/x-script.python", "application/x-python")
ruby = newMIME("text/x-ruby", ".rb", magic.Ruby).
alias("application/x-ruby")
shell = newMIME("text/x-shellscript", ".sh", magic.Shell).
alias("text/x-sh", "application/x-shellscript", "application/x-sh")
tcl = newMIME("text/x-tcl", ".tcl", magic.Tcl). tcl = newMIME("text/x-tcl", ".tcl", magic.Tcl).
alias("application/x-tcl") alias("application/x-tcl")
vCard = newMIME("text/vcard", ".vcf", magic.VCard) vCard = newMIME("text/vcard", ".vcf", magic.VCard)
@@ -120,7 +107,6 @@ var (
atom = newMIME("application/atom+xml", ".atom", magic.Atom) atom = newMIME("application/atom+xml", ".atom", magic.Atom)
x3d = newMIME("model/x3d+xml", ".x3d", magic.X3d) x3d = newMIME("model/x3d+xml", ".x3d", magic.X3d)
kml = newMIME("application/vnd.google-earth.kml+xml", ".kml", magic.Kml) kml = newMIME("application/vnd.google-earth.kml+xml", ".kml", magic.Kml)
kmz = newMIME("application/vnd.google-earth.kmz", ".kmz", magic.KMZ)
xliff = newMIME("application/x-xliff+xml", ".xlf", magic.Xliff) xliff = newMIME("application/x-xliff+xml", ".xlf", magic.Xliff)
collada = newMIME("model/vnd.collada+xml", ".dae", magic.Collada) collada = newMIME("model/vnd.collada+xml", ".dae", magic.Collada)
gml = newMIME("application/gml+xml", ".gml", magic.Gml) gml = newMIME("application/gml+xml", ".gml", magic.Gml)
@@ -144,9 +130,6 @@ var (
tiff = newMIME("image/tiff", ".tiff", magic.Tiff) tiff = newMIME("image/tiff", ".tiff", magic.Tiff)
bmp = newMIME("image/bmp", ".bmp", magic.Bmp). bmp = newMIME("image/bmp", ".bmp", magic.Bmp).
alias("image/x-bmp", "image/x-ms-bmp") alias("image/x-bmp", "image/x-ms-bmp")
// lotus check must be done before ico because some ico detection is a bit
// relaxed and some lotus files are wrongfully identified as ico otherwise.
lotus = newMIME("application/vnd.lotus-1-2-3", ".123", magic.Lotus123)
ico = newMIME("image/x-icon", ".ico", magic.Ico) ico = newMIME("image/x-icon", ".ico", magic.Ico)
icns = newMIME("image/x-icns", ".icns", magic.Icns) icns = newMIME("image/x-icns", ".icns", magic.Icns)
psd = newMIME("image/vnd.adobe.photoshop", ".psd", magic.Psd). psd = newMIME("image/vnd.adobe.photoshop", ".psd", magic.Psd).
@@ -173,14 +156,12 @@ var (
aac = newMIME("audio/aac", ".aac", magic.AAC) aac = newMIME("audio/aac", ".aac", magic.AAC)
voc = newMIME("audio/x-unknown", ".voc", magic.Voc) voc = newMIME("audio/x-unknown", ".voc", magic.Voc)
aMp4 = newMIME("audio/mp4", ".mp4", magic.AMp4). aMp4 = newMIME("audio/mp4", ".mp4", magic.AMp4).
alias("audio/x-mp4a") alias("audio/x-m4a", "audio/x-mp4a")
m4a = newMIME("audio/x-m4a", ".m4a", magic.M4a) m4a = newMIME("audio/x-m4a", ".m4a", magic.M4a)
m3u = newMIME("application/vnd.apple.mpegurl", ".m3u", magic.M3u). m3u = newMIME("application/vnd.apple.mpegurl", ".m3u", magic.M3u).
alias("audio/mpegurl") alias("audio/mpegurl")
m4v = newMIME("video/x-m4v", ".m4v", magic.M4v) m4v = newMIME("video/x-m4v", ".m4v", magic.M4v)
mj2 = newMIME("video/mj2", ".mj2", magic.Mj2) mp4 = newMIME("video/mp4", ".mp4", magic.Mp4)
dvb = newMIME("video/vnd.dvb.file", ".dvb", magic.Dvb)
mp4 = newMIME("video/mp4", ".mp4", magic.Mp4, avif, threeGP, threeG2, aMp4, mqv, m4a, m4v, heic, heicSeq, heif, heifSeq, mj2, dvb)
webM = newMIME("video/webm", ".webm", magic.WebM). webM = newMIME("video/webm", ".webm", magic.WebM).
alias("audio/webm") alias("audio/webm")
mpeg = newMIME("video/mpeg", ".mpeg", magic.Mpeg) mpeg = newMIME("video/mpeg", ".mpeg", magic.Mpeg)
@@ -274,16 +255,6 @@ var (
pat = newMIME("image/x-gimp-pat", ".pat", magic.Pat) pat = newMIME("image/x-gimp-pat", ".pat", magic.Pat)
gbr = newMIME("image/x-gimp-gbr", ".gbr", magic.Gbr) gbr = newMIME("image/x-gimp-gbr", ".gbr", magic.Gbr)
xfdf = newMIME("application/vnd.adobe.xfdf", ".xfdf", magic.Xfdf) xfdf = newMIME("application/vnd.adobe.xfdf", ".xfdf", magic.Xfdf)
glb = newMIME("model/gltf-binary", ".glb", magic.GLB) glb = newMIME("model/gltf-binary", ".glb", magic.Glb)
gltf = newMIME("model/gltf+json", ".gltf", magic.GLTF)
jxr = newMIME("image/jxr", ".jxr", magic.Jxr).alias("image/vnd.ms-photo") jxr = newMIME("image/jxr", ".jxr", magic.Jxr).alias("image/vnd.ms-photo")
parquet = newMIME("application/vnd.apache.parquet", ".parquet", magic.Par1).
alias("application/x-parquet")
netpbm = newMIME("image/x-portable-bitmap", ".pbm", magic.NetPBM)
netpgm = newMIME("image/x-portable-graymap", ".pgm", magic.NetPGM)
netppm = newMIME("image/x-portable-pixmap", ".ppm", magic.NetPPM)
netpam = newMIME("image/x-portable-arbitrarymap", ".pam", magic.NetPAM)
cbor = newMIME("application/cbor", ".cbor", magic.CBOR)
oneNote = newMIME("application/onenote", ".one", magic.One)
chm = newMIME("application/vnd.ms-htmlhelp", ".chm", magic.CHM)
) )

View File

@@ -1,7 +1,7 @@
# Server-Sent Events # Server-Sent Events
[![Go Reference](https://pkg.go.dev/badge/github.com/gin-contrib/sse.svg)](https://pkg.go.dev/github.com/gin-contrib/sse) [![GoDoc](https://godoc.org/github.com/gin-contrib/sse?status.svg)](https://godoc.org/github.com/gin-contrib/sse)
[![Run Tests](https://github.com/gin-contrib/sse/actions/workflows/go.yml/badge.svg)](https://github.com/gin-contrib/sse/actions/workflows/go.yml) [![Build Status](https://travis-ci.org/gin-contrib/sse.svg)](https://travis-ci.org/gin-contrib/sse)
[![codecov](https://codecov.io/gh/gin-contrib/sse/branch/master/graph/badge.svg)](https://codecov.io/gh/gin-contrib/sse) [![codecov](https://codecov.io/gh/gin-contrib/sse/branch/master/graph/badge.svg)](https://codecov.io/gh/gin-contrib/sse)
[![Go Report Card](https://goreportcard.com/badge/github.com/gin-contrib/sse)](https://goreportcard.com/report/github.com/gin-contrib/sse) [![Go Report Card](https://goreportcard.com/badge/github.com/gin-contrib/sse)](https://goreportcard.com/report/github.com/gin-contrib/sse)
@@ -34,8 +34,7 @@ func httpHandler(w http.ResponseWriter, req *http.Request) {
}) })
} }
``` ```
```
```sh
event: message event: message
data: some data\\nmore data data: some data\\nmore data
@@ -50,8 +49,7 @@ data: {"content":"hi!","date":1431540810,"user":"manu"}
```go ```go
fmt.Println(sse.ContentType) fmt.Println(sse.ContentType)
``` ```
```
```sh
text/event-stream text/event-stream
``` ```

View File

@@ -7,6 +7,7 @@ package sse
import ( import (
"bytes" "bytes"
"io" "io"
"io/ioutil"
) )
type decoder struct { type decoder struct {
@@ -21,8 +22,7 @@ func Decode(r io.Reader) ([]Event, error) {
func (d *decoder) dispatchEvent(event Event, data string) { func (d *decoder) dispatchEvent(event Event, data string) {
dataLength := len(data) dataLength := len(data)
if dataLength > 0 { if dataLength > 0 {
// If the data buffer's last character is a U+000A LINE FEED (LF) character, //If the data buffer's last character is a U+000A LINE FEED (LF) character, then remove the last character from the data buffer.
// then remove the last character from the data buffer.
data = data[:dataLength-1] data = data[:dataLength-1]
dataLength-- dataLength--
} }
@@ -37,13 +37,13 @@ func (d *decoder) dispatchEvent(event Event, data string) {
} }
func (d *decoder) decode(r io.Reader) ([]Event, error) { func (d *decoder) decode(r io.Reader) ([]Event, error) {
buf, err := io.ReadAll(r) buf, err := ioutil.ReadAll(r)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var currentEvent Event var currentEvent Event
dataBuffer := new(bytes.Buffer) var dataBuffer *bytes.Buffer = new(bytes.Buffer)
// TODO (and unit tests) // TODO (and unit tests)
// Lines must be separated by either a U+000D CARRIAGE RETURN U+000A LINE FEED (CRLF) character pair, // Lines must be separated by either a U+000D CARRIAGE RETURN U+000A LINE FEED (CRLF) character pair,
// a single U+000A LINE FEED (LF) character, // a single U+000A LINE FEED (LF) character,
@@ -96,8 +96,7 @@ func (d *decoder) decode(r io.Reader) ([]Event, error) {
currentEvent.Id = string(value) currentEvent.Id = string(value)
case "retry": case "retry":
// If the field value consists of only characters in the range U+0030 DIGIT ZERO (0) to U+0039 DIGIT NINE (9), // If the field value consists of only characters in the range U+0030 DIGIT ZERO (0) to U+0039 DIGIT NINE (9),
// then interpret the field value as an integer in base ten, and set the event stream's // then interpret the field value as an integer in base ten, and set the event stream's reconnection time to that integer.
// reconnection time to that integer.
// Otherwise, ignore the field. // Otherwise, ignore the field.
currentEvent.Id = string(value) currentEvent.Id = string(value)
case "data": case "data":
@@ -106,7 +105,7 @@ func (d *decoder) decode(r io.Reader) ([]Event, error) {
// then append a single U+000A LINE FEED (LF) character to the data buffer. // then append a single U+000A LINE FEED (LF) character to the data buffer.
dataBuffer.WriteString("\n") dataBuffer.WriteString("\n")
default: default:
// Otherwise. The field is ignored. //Otherwise. The field is ignored.
continue continue
} }
} }

View File

@@ -18,12 +18,10 @@ import (
// W3C Working Draft 29 October 2009 // W3C Working Draft 29 October 2009
// http://www.w3.org/TR/2009/WD-eventsource-20091029/ // http://www.w3.org/TR/2009/WD-eventsource-20091029/
const ContentType = "text/event-stream;charset=utf-8" const ContentType = "text/event-stream"
var ( var contentType = []string{ContentType}
contentType = []string{ContentType} var noCache = []string{"no-cache"}
noCache = []string{"no-cache"}
)
var fieldReplacer = strings.NewReplacer( var fieldReplacer = strings.NewReplacer(
"\n", "\\n", "\n", "\\n",
@@ -50,48 +48,40 @@ func Encode(writer io.Writer, event Event) error {
func writeId(w stringWriter, id string) { func writeId(w stringWriter, id string) {
if len(id) > 0 { if len(id) > 0 {
_, _ = w.WriteString("id:") w.WriteString("id:")
_, _ = fieldReplacer.WriteString(w, id) fieldReplacer.WriteString(w, id)
_, _ = w.WriteString("\n") w.WriteString("\n")
} }
} }
func writeEvent(w stringWriter, event string) { func writeEvent(w stringWriter, event string) {
if len(event) > 0 { if len(event) > 0 {
_, _ = w.WriteString("event:") w.WriteString("event:")
_, _ = fieldReplacer.WriteString(w, event) fieldReplacer.WriteString(w, event)
_, _ = w.WriteString("\n") w.WriteString("\n")
} }
} }
func writeRetry(w stringWriter, retry uint) { func writeRetry(w stringWriter, retry uint) {
if retry > 0 { if retry > 0 {
_, _ = w.WriteString("retry:") w.WriteString("retry:")
_, _ = w.WriteString(strconv.FormatUint(uint64(retry), 10)) w.WriteString(strconv.FormatUint(uint64(retry), 10))
_, _ = w.WriteString("\n") w.WriteString("\n")
} }
} }
func writeData(w stringWriter, data interface{}) error { func writeData(w stringWriter, data interface{}) error {
_, _ = w.WriteString("data:") w.WriteString("data:")
switch kindOfData(data) {
bData, ok := data.([]byte)
if ok {
_, _ = dataReplacer.WriteString(w, string(bData))
_, _ = w.WriteString("\n\n")
return nil
}
switch kindOfData(data) { //nolint:exhaustive
case reflect.Struct, reflect.Slice, reflect.Map: case reflect.Struct, reflect.Slice, reflect.Map:
err := json.NewEncoder(w).Encode(data) err := json.NewEncoder(w).Encode(data)
if err != nil { if err != nil {
return err return err
} }
_, _ = w.WriteString("\n") w.WriteString("\n")
default: default:
_, _ = dataReplacer.WriteString(w, fmt.Sprint(data)) dataReplacer.WriteString(w, fmt.Sprint(data))
_, _ = w.WriteString("\n\n") w.WriteString("\n\n")
} }
return nil return nil
} }

View File

@@ -12,7 +12,7 @@ type stringWrapper struct {
} }
func (w stringWrapper) WriteString(str string) (int, error) { func (w stringWrapper) WriteString(str string) (int, error) {
return w.Write([]byte(str)) return w.Writer.Write([]byte(str))
} }
func checkWriter(writer io.Writer) stringWriter { func checkWriter(writer io.Writer) stringWriter {

View File

@@ -5,7 +5,6 @@
package gin package gin
import ( import (
"crypto/tls"
"fmt" "fmt"
"html/template" "html/template"
"net" "net"
@@ -42,10 +41,8 @@ var defaultTrustedCIDRs = []*net.IPNet{
}, },
} }
var ( var regSafePrefix = regexp.MustCompile("[^a-zA-Z0-9/-]+")
regSafePrefix = regexp.MustCompile("[^a-zA-Z0-9/-]+") var regRemoveRepeatedChar = regexp.MustCompile("/{2,}")
regRemoveRepeatedChar = regexp.MustCompile("/{2,}")
)
// HandlerFunc defines the handler used by gin middleware as return value. // HandlerFunc defines the handler used by gin middleware as return value.
type HandlerFunc func(*Context) type HandlerFunc func(*Context)
@@ -518,15 +515,7 @@ func (engine *Engine) RunTLS(addr, certFile, keyFile string) (err error) {
"Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.") "Please check https://pkg.go.dev/github.com/gin-gonic/gin#readme-don-t-trust-all-proxies for details.")
} }
server := &http.Server{ err = http.ListenAndServeTLS(addr, certFile, keyFile, engine.Handler())
Addr: addr,
Handler: engine.Handler(),
TLSConfig: &tls.Config{
MinVersion: tls.VersionTLS12, // TLS 1.2 or higher
},
}
err = server.ListenAndServeTLS(certFile, keyFile)
return return
} }

View File

@@ -3,7 +3,7 @@ GOCMD=go
linters-install: linters-install:
@golangci-lint --version >/dev/null 2>&1 || { \ @golangci-lint --version >/dev/null 2>&1 || { \
echo "installing linting tools..."; \ echo "installing linting tools..."; \
curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s v2.0.2; \ curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s v1.41.1; \
} }
lint: linters-install lint: linters-install

View File

@@ -1,7 +1,8 @@
Package validator Package validator
================= =================
<img align="right" src="logo.png">[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/go-playground/validator)](https://github.com/go-playground/validator/releases) <img align="right" src="logo.png">[![Join the chat at https://gitter.im/go-playground/validator](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/go-playground/validator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://github.com/go-playground/validator/actions/workflows/workflow.yml/badge.svg)](https://github.com/go-playground/validator/actions) ![Project status](https://img.shields.io/badge/version-10.20.0-green.svg)
[![Build Status](https://travis-ci.org/go-playground/validator.svg?branch=master)](https://travis-ci.org/go-playground/validator)
[![Coverage Status](https://coveralls.io/repos/go-playground/validator/badge.svg?branch=master&service=github)](https://coveralls.io/github/go-playground/validator?branch=master) [![Coverage Status](https://coveralls.io/repos/go-playground/validator/badge.svg?branch=master&service=github)](https://coveralls.io/github/go-playground/validator?branch=master)
[![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/validator)](https://goreportcard.com/report/github.com/go-playground/validator) [![Go Report Card](https://goreportcard.com/badge/github.com/go-playground/validator)](https://goreportcard.com/report/github.com/go-playground/validator)
[![GoDoc](https://godoc.org/github.com/go-playground/validator?status.svg)](https://pkg.go.dev/github.com/go-playground/validator/v10) [![GoDoc](https://godoc.org/github.com/go-playground/validator?status.svg)](https://pkg.go.dev/github.com/go-playground/validator/v10)
@@ -21,11 +22,6 @@ It has the following **unique** features:
- Customizable i18n aware error messages. - Customizable i18n aware error messages.
- Default validator for the [gin](https://github.com/gin-gonic/gin) web framework; upgrading from v8 to v9 in gin see [here](https://github.com/go-playground/validator/tree/master/_examples/gin-upgrading-overriding) - Default validator for the [gin](https://github.com/gin-gonic/gin) web framework; upgrading from v8 to v9 in gin see [here](https://github.com/go-playground/validator/tree/master/_examples/gin-upgrading-overriding)
A Call for Maintainers
----------------------
Please read the discussiong started [here](https://github.com/go-playground/validator/discussions/1330) if you are interested in contributing/helping maintain this package.
Installation Installation
------------ ------------
@@ -167,12 +163,10 @@ validate := validator.New(validator.WithRequiredStructEnabled())
| btc_addr_bech32 | Bitcoin Bech32 Address (segwit) | | btc_addr_bech32 | Bitcoin Bech32 Address (segwit) |
| credit_card | Credit Card Number | | credit_card | Credit Card Number |
| mongodb | MongoDB ObjectID | | mongodb | MongoDB ObjectID |
| mongodb_connection_string | MongoDB Connection String |
| cron | Cron | | cron | Cron |
| spicedb | SpiceDb ObjectID/Permission/Type | | spicedb | SpiceDb ObjectID/Permission/Type |
| datetime | Datetime | | datetime | Datetime |
| e164 | e164 formatted phone number | | e164 | e164 formatted phone number |
| ein | U.S. Employeer Identification Number |
| email | E-mail String | email | E-mail String
| eth_addr | Ethereum Address | | eth_addr | Ethereum Address |
| hexadecimal | Hexadecimal String | | hexadecimal | Hexadecimal String |
@@ -262,8 +256,6 @@ validate := validator.New(validator.WithRequiredStructEnabled())
| excluded_without | Excluded Without | | excluded_without | Excluded Without |
| excluded_without_all | Excluded Without All | | excluded_without_all | Excluded Without All |
| unique | Unique | | unique | Unique |
| validateFn | Verify if the method `Validate() error` does not return an error (or any specified method) |
#### Aliases: #### Aliases:
| Tag | Description | | Tag | Description |
@@ -273,75 +265,74 @@ validate := validator.New(validator.WithRequiredStructEnabled())
Benchmarks Benchmarks
------ ------
###### Run on MacBook Pro Max M3 ###### Run on MacBook Pro (15-inch, 2017) go version go1.10.2 darwin/amd64
```go ```go
go version go1.23.3 darwin/arm64 go version go1.21.0 darwin/arm64
goos: darwin goos: darwin
goarch: arm64 goarch: arm64
cpu: Apple M3 Max
pkg: github.com/go-playground/validator/v10 pkg: github.com/go-playground/validator/v10
BenchmarkFieldSuccess-16 42461943 27.88 ns/op 0 B/op 0 allocs/op BenchmarkFieldSuccess-8 33142266 35.94 ns/op 0 B/op 0 allocs/op
BenchmarkFieldSuccessParallel-16 486632887 2.289 ns/op 0 B/op 0 allocs/op BenchmarkFieldSuccessParallel-8 200816191 6.568 ns/op 0 B/op 0 allocs/op
BenchmarkFieldFailure-16 9566167 121.3 ns/op 200 B/op 4 allocs/op BenchmarkFieldFailure-8 6779707 175.1 ns/op 200 B/op 4 allocs/op
BenchmarkFieldFailureParallel-16 17551471 83.68 ns/op 200 B/op 4 allocs/op BenchmarkFieldFailureParallel-8 11044147 108.4 ns/op 200 B/op 4 allocs/op
BenchmarkFieldArrayDiveSuccess-16 7602306 155.6 ns/op 97 B/op 5 allocs/op BenchmarkFieldArrayDiveSuccess-8 6054232 194.4 ns/op 97 B/op 5 allocs/op
BenchmarkFieldArrayDiveSuccessParallel-16 20664610 59.80 ns/op 97 B/op 5 allocs/op BenchmarkFieldArrayDiveSuccessParallel-8 12523388 94.07 ns/op 97 B/op 5 allocs/op
BenchmarkFieldArrayDiveFailure-16 4659756 252.9 ns/op 301 B/op 10 allocs/op BenchmarkFieldArrayDiveFailure-8 3587043 334.3 ns/op 300 B/op 10 allocs/op
BenchmarkFieldArrayDiveFailureParallel-16 8010116 152.9 ns/op 301 B/op 10 allocs/op BenchmarkFieldArrayDiveFailureParallel-8 5816665 200.8 ns/op 300 B/op 10 allocs/op
BenchmarkFieldMapDiveSuccess-16 2834575 421.2 ns/op 288 B/op 14 allocs/op BenchmarkFieldMapDiveSuccess-8 2217910 540.1 ns/op 288 B/op 14 allocs/op
BenchmarkFieldMapDiveSuccessParallel-16 7179700 171.8 ns/op 288 B/op 14 allocs/op BenchmarkFieldMapDiveSuccessParallel-8 4446698 258.7 ns/op 288 B/op 14 allocs/op
BenchmarkFieldMapDiveFailure-16 3081728 384.4 ns/op 376 B/op 13 allocs/op BenchmarkFieldMapDiveFailure-8 2392759 504.6 ns/op 376 B/op 13 allocs/op
BenchmarkFieldMapDiveFailureParallel-16 6058137 204.0 ns/op 377 B/op 13 allocs/op BenchmarkFieldMapDiveFailureParallel-8 4244199 286.9 ns/op 376 B/op 13 allocs/op
BenchmarkFieldMapDiveWithKeysSuccess-16 2544975 464.8 ns/op 288 B/op 14 allocs/op BenchmarkFieldMapDiveWithKeysSuccess-8 2005857 592.1 ns/op 288 B/op 14 allocs/op
BenchmarkFieldMapDiveWithKeysSuccessParallel-16 6661954 181.4 ns/op 288 B/op 14 allocs/op BenchmarkFieldMapDiveWithKeysSuccessParallel-8 4400850 296.9 ns/op 288 B/op 14 allocs/op
BenchmarkFieldMapDiveWithKeysFailure-16 2435484 490.7 ns/op 553 B/op 16 allocs/op BenchmarkFieldMapDiveWithKeysFailure-8 1850227 643.8 ns/op 553 B/op 16 allocs/op
BenchmarkFieldMapDiveWithKeysFailureParallel-16 4249617 282.0 ns/op 554 B/op 16 allocs/op BenchmarkFieldMapDiveWithKeysFailureParallel-8 3293233 375.1 ns/op 553 B/op 16 allocs/op
BenchmarkFieldCustomTypeSuccess-16 14943525 77.35 ns/op 32 B/op 2 allocs/op BenchmarkFieldCustomTypeSuccess-8 12174412 98.25 ns/op 32 B/op 2 allocs/op
BenchmarkFieldCustomTypeSuccessParallel-16 64051954 20.61 ns/op 32 B/op 2 allocs/op BenchmarkFieldCustomTypeSuccessParallel-8 34389907 35.49 ns/op 32 B/op 2 allocs/op
BenchmarkFieldCustomTypeFailure-16 10721384 107.1 ns/op 184 B/op 3 allocs/op BenchmarkFieldCustomTypeFailure-8 7582524 156.6 ns/op 184 B/op 3 allocs/op
BenchmarkFieldCustomTypeFailureParallel-16 18714495 69.77 ns/op 184 B/op 3 allocs/op BenchmarkFieldCustomTypeFailureParallel-8 13019902 92.79 ns/op 184 B/op 3 allocs/op
BenchmarkFieldOrTagSuccess-16 4063124 294.3 ns/op 16 B/op 1 allocs/op BenchmarkFieldOrTagSuccess-8 3427260 349.4 ns/op 16 B/op 1 allocs/op
BenchmarkFieldOrTagSuccessParallel-16 31903756 41.22 ns/op 18 B/op 1 allocs/op BenchmarkFieldOrTagSuccessParallel-8 15144128 81.25 ns/op 16 B/op 1 allocs/op
BenchmarkFieldOrTagFailure-16 7748558 146.8 ns/op 216 B/op 5 allocs/op BenchmarkFieldOrTagFailure-8 5913546 201.9 ns/op 216 B/op 5 allocs/op
BenchmarkFieldOrTagFailureParallel-16 13139854 92.05 ns/op 216 B/op 5 allocs/op BenchmarkFieldOrTagFailureParallel-8 9810212 113.7 ns/op 216 B/op 5 allocs/op
BenchmarkStructLevelValidationSuccess-16 16808389 70.25 ns/op 16 B/op 1 allocs/op BenchmarkStructLevelValidationSuccess-8 13456327 87.66 ns/op 16 B/op 1 allocs/op
BenchmarkStructLevelValidationSuccessParallel-16 90686955 14.47 ns/op 16 B/op 1 allocs/op BenchmarkStructLevelValidationSuccessParallel-8 41818888 27.77 ns/op 16 B/op 1 allocs/op
BenchmarkStructLevelValidationFailure-16 5818791 200.2 ns/op 264 B/op 7 allocs/op BenchmarkStructLevelValidationFailure-8 4166284 272.6 ns/op 264 B/op 7 allocs/op
BenchmarkStructLevelValidationFailureParallel-16 11115874 107.5 ns/op 264 B/op 7 allocs/op BenchmarkStructLevelValidationFailureParallel-8 7594581 152.1 ns/op 264 B/op 7 allocs/op
BenchmarkStructSimpleCustomTypeSuccess-16 7764956 151.9 ns/op 32 B/op 2 allocs/op BenchmarkStructSimpleCustomTypeSuccess-8 6508082 182.6 ns/op 32 B/op 2 allocs/op
BenchmarkStructSimpleCustomTypeSuccessParallel-16 52316265 30.37 ns/op 32 B/op 2 allocs/op BenchmarkStructSimpleCustomTypeSuccessParallel-8 23078605 54.78 ns/op 32 B/op 2 allocs/op
BenchmarkStructSimpleCustomTypeFailure-16 4195429 277.2 ns/op 416 B/op 9 allocs/op BenchmarkStructSimpleCustomTypeFailure-8 3118352 381.0 ns/op 416 B/op 9 allocs/op
BenchmarkStructSimpleCustomTypeFailureParallel-16 7305661 164.6 ns/op 432 B/op 10 allocs/op BenchmarkStructSimpleCustomTypeFailureParallel-8 5300738 224.1 ns/op 432 B/op 10 allocs/op
BenchmarkStructFilteredSuccess-16 6312625 186.1 ns/op 216 B/op 5 allocs/op BenchmarkStructFilteredSuccess-8 4761807 251.1 ns/op 216 B/op 5 allocs/op
BenchmarkStructFilteredSuccessParallel-16 13684459 93.42 ns/op 216 B/op 5 allocs/op BenchmarkStructFilteredSuccessParallel-8 8792598 128.6 ns/op 216 B/op 5 allocs/op
BenchmarkStructFilteredFailure-16 6751482 171.2 ns/op 216 B/op 5 allocs/op BenchmarkStructFilteredFailure-8 5202573 232.1 ns/op 216 B/op 5 allocs/op
BenchmarkStructFilteredFailureParallel-16 14146070 86.93 ns/op 216 B/op 5 allocs/op BenchmarkStructFilteredFailureParallel-8 9591267 121.4 ns/op 216 B/op 5 allocs/op
BenchmarkStructPartialSuccess-16 6544448 177.3 ns/op 224 B/op 4 allocs/op BenchmarkStructPartialSuccess-8 5188512 231.6 ns/op 224 B/op 4 allocs/op
BenchmarkStructPartialSuccessParallel-16 13951946 88.73 ns/op 224 B/op 4 allocs/op BenchmarkStructPartialSuccessParallel-8 9179776 123.1 ns/op 224 B/op 4 allocs/op
BenchmarkStructPartialFailure-16 4075833 287.5 ns/op 440 B/op 9 allocs/op BenchmarkStructPartialFailure-8 3071212 392.5 ns/op 440 B/op 9 allocs/op
BenchmarkStructPartialFailureParallel-16 7490805 161.3 ns/op 440 B/op 9 allocs/op BenchmarkStructPartialFailureParallel-8 5344261 223.7 ns/op 440 B/op 9 allocs/op
BenchmarkStructExceptSuccess-16 4107187 281.4 ns/op 424 B/op 8 allocs/op BenchmarkStructExceptSuccess-8 3184230 375.0 ns/op 424 B/op 8 allocs/op
BenchmarkStructExceptSuccessParallel-16 15979173 80.86 ns/op 208 B/op 3 allocs/op BenchmarkStructExceptSuccessParallel-8 10090130 108.9 ns/op 208 B/op 3 allocs/op
BenchmarkStructExceptFailure-16 4434372 264.3 ns/op 424 B/op 8 allocs/op BenchmarkStructExceptFailure-8 3347226 357.7 ns/op 424 B/op 8 allocs/op
BenchmarkStructExceptFailureParallel-16 8081367 154.1 ns/op 424 B/op 8 allocs/op BenchmarkStructExceptFailureParallel-8 5654923 209.5 ns/op 424 B/op 8 allocs/op
BenchmarkStructSimpleCrossFieldSuccess-16 6459542 183.4 ns/op 56 B/op 3 allocs/op BenchmarkStructSimpleCrossFieldSuccess-8 5232265 229.1 ns/op 56 B/op 3 allocs/op
BenchmarkStructSimpleCrossFieldSuccessParallel-16 41013781 37.95 ns/op 56 B/op 3 allocs/op BenchmarkStructSimpleCrossFieldSuccessParallel-8 17436674 64.75 ns/op 56 B/op 3 allocs/op
BenchmarkStructSimpleCrossFieldFailure-16 4034998 292.1 ns/op 272 B/op 8 allocs/op BenchmarkStructSimpleCrossFieldFailure-8 3128613 383.6 ns/op 272 B/op 8 allocs/op
BenchmarkStructSimpleCrossFieldFailureParallel-16 11348446 115.3 ns/op 272 B/op 8 allocs/op BenchmarkStructSimpleCrossFieldFailureParallel-8 6994113 168.8 ns/op 272 B/op 8 allocs/op
BenchmarkStructSimpleCrossStructCrossFieldSuccess-16 4448528 267.7 ns/op 64 B/op 4 allocs/op BenchmarkStructSimpleCrossStructCrossFieldSuccess-8 3506487 340.9 ns/op 64 B/op 4 allocs/op
BenchmarkStructSimpleCrossStructCrossFieldSuccessParallel-16 26813619 48.33 ns/op 64 B/op 4 allocs/op BenchmarkStructSimpleCrossStructCrossFieldSuccessParallel-8 13431300 91.77 ns/op 64 B/op 4 allocs/op
BenchmarkStructSimpleCrossStructCrossFieldFailure-16 3090646 384.5 ns/op 288 B/op 9 allocs/op BenchmarkStructSimpleCrossStructCrossFieldFailure-8 2410566 500.9 ns/op 288 B/op 9 allocs/op
BenchmarkStructSimpleCrossStructCrossFieldFailureParallel-16 9870906 129.5 ns/op 288 B/op 9 allocs/op BenchmarkStructSimpleCrossStructCrossFieldFailureParallel-8 6344510 188.2 ns/op 288 B/op 9 allocs/op
BenchmarkStructSimpleSuccess-16 10675562 109.5 ns/op 0 B/op 0 allocs/op BenchmarkStructSimpleSuccess-8 8922726 133.8 ns/op 0 B/op 0 allocs/op
BenchmarkStructSimpleSuccessParallel-16 131159784 8.932 ns/op 0 B/op 0 allocs/op BenchmarkStructSimpleSuccessParallel-8 55291153 23.63 ns/op 0 B/op 0 allocs/op
BenchmarkStructSimpleFailure-16 4094979 286.6 ns/op 416 B/op 9 allocs/op BenchmarkStructSimpleFailure-8 3171553 378.4 ns/op 416 B/op 9 allocs/op
BenchmarkStructSimpleFailureParallel-16 7606663 157.9 ns/op 416 B/op 9 allocs/op BenchmarkStructSimpleFailureParallel-8 5571692 212.0 ns/op 416 B/op 9 allocs/op
BenchmarkStructComplexSuccess-16 2073470 576.0 ns/op 224 B/op 5 allocs/op BenchmarkStructComplexSuccess-8 1683750 714.5 ns/op 224 B/op 5 allocs/op
BenchmarkStructComplexSuccessParallel-16 7821831 161.3 ns/op 224 B/op 5 allocs/op BenchmarkStructComplexSuccessParallel-8 4578046 257.0 ns/op 224 B/op 5 allocs/op
BenchmarkStructComplexFailure-16 576358 2001 ns/op 3042 B/op 48 allocs/op BenchmarkStructComplexFailure-8 481585 2547 ns/op 3041 B/op 48 allocs/op
BenchmarkStructComplexFailureParallel-16 1000000 1171 ns/op 3041 B/op 48 allocs/op BenchmarkStructComplexFailureParallel-8 965764 1577 ns/op 3040 B/op 48 allocs/op
BenchmarkOneof-16 22503973 52.82 ns/op 0 B/op 0 allocs/op BenchmarkOneof-8 17380881 68.50 ns/op 0 B/op 0 allocs/op
BenchmarkOneofParallel-16 8538474 140.4 ns/op 0 B/op 0 allocs/op BenchmarkOneofParallel-8 8084733 153.5 ns/op 0 B/op 0 allocs/op
``` ```
Complementary Software Complementary Software
@@ -357,20 +348,6 @@ How to Contribute
Make a pull request... Make a pull request...
Maintenance and support for SDK major versions
----------------------------------------------
See prior discussion [here](https://github.com/go-playground/validator/discussions/1342) for more details.
This package is aligned with the [Go release policy](https://go.dev/doc/devel/release) in that support is guaranteed for
the two most recent major versions.
This does not mean the package will not work with older versions of Go, only that we reserve the right to increase the
MSGV(Minimum Supported Go Version) when the need arises to address Security issues/patches, OS issues & support or newly
introduced functionality that would greatly benefit the maintenance and/or usage of this package.
If and when the MSGV is increased it will be done so in a minimum of a `Minor` release bump.
License License
------- -------
Distributed under MIT License, please see license file within the code for more details. Distributed under MIT License, please see license file within the code for more details.

File diff suppressed because it is too large Load Diff

View File

@@ -21,7 +21,6 @@ const (
typeKeys typeKeys
typeEndKeys typeEndKeys
typeOmitNil typeOmitNil
typeOmitZero
) )
const ( const (
@@ -124,6 +123,7 @@ func (v *Validate) extractStructCache(current reflect.Value, sName string) *cStr
var customName string var customName string
for i := 0; i < numFields; i++ { for i := 0; i < numFields; i++ {
fld = typ.Field(i) fld = typ.Field(i)
if !v.privateFieldValidation && !fld.Anonymous && len(fld.PkgPath) > 0 { if !v.privateFieldValidation && !fld.Anonymous && len(fld.PkgPath) > 0 {
@@ -190,6 +190,7 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
} else { } else {
next, curr := v.parseFieldTagsRecursive(tagsVal, fieldName, t, true) next, curr := v.parseFieldTagsRecursive(tagsVal, fieldName, t, true)
current.next, current = next, curr current.next, current = next, curr
} }
continue continue
} }
@@ -208,6 +209,7 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
switch t { switch t {
case diveTag: case diveTag:
current.typeof = typeDive current.typeof = typeDive
continue
case keysTag: case keysTag:
current.typeof = typeKeys current.typeof = typeKeys
@@ -216,6 +218,8 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
panic(fmt.Sprintf("'%s' tag must be immediately preceded by the '%s' tag", keysTag, diveTag)) panic(fmt.Sprintf("'%s' tag must be immediately preceded by the '%s' tag", keysTag, diveTag))
} }
current.typeof = typeKeys
// need to pass along only keys tag // need to pass along only keys tag
// need to increment i to skip over the keys tags // need to increment i to skip over the keys tags
b := make([]byte, 0, 64) b := make([]byte, 0, 64)
@@ -223,6 +227,7 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
i++ i++
for ; i < len(tags); i++ { for ; i < len(tags); i++ {
b = append(b, tags[i]...) b = append(b, tags[i]...)
b = append(b, ',') b = append(b, ',')
@@ -232,6 +237,7 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
} }
current.keys, _ = v.parseFieldTagsRecursive(string(b[:len(b)-1]), fieldName, "", false) current.keys, _ = v.parseFieldTagsRecursive(string(b[:len(b)-1]), fieldName, "", false)
continue
case endKeysTag: case endKeysTag:
current.typeof = typeEndKeys current.typeof = typeEndKeys
@@ -243,21 +249,21 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
} }
return return
case omitzero:
current.typeof = typeOmitZero
continue
case omitempty: case omitempty:
current.typeof = typeOmitEmpty current.typeof = typeOmitEmpty
continue
case omitnil: case omitnil:
current.typeof = typeOmitNil current.typeof = typeOmitNil
continue
case structOnlyTag: case structOnlyTag:
current.typeof = typeStructOnly current.typeof = typeStructOnly
continue
case noStructLevelTag: case noStructLevelTag:
current.typeof = typeNoStructLevel current.typeof = typeNoStructLevel
continue
default: default:
if t == isdefault { if t == isdefault {
@@ -288,7 +294,7 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
if wrapper, ok := v.validations[current.tag]; ok { if wrapper, ok := v.validations[current.tag]; ok {
current.fn = wrapper.fn current.fn = wrapper.fn
current.runValidationWhenNil = wrapper.runValidationOnNil current.runValidationWhenNil = wrapper.runValidatinOnNil
} else { } else {
panic(strings.TrimSpace(fmt.Sprintf(undefinedValidation, current.tag, fieldName))) panic(strings.TrimSpace(fmt.Sprintf(undefinedValidation, current.tag, fieldName)))
} }
@@ -298,7 +304,7 @@ func (v *Validate) parseFieldTagsRecursive(tag string, fieldName string, alias s
} }
if len(vals) > 1 { if len(vals) > 1 {
current.param = strings.ReplaceAll(strings.ReplaceAll(vals[1], utf8HexComma, ","), utf8Pipe, "|") current.param = strings.Replace(strings.Replace(vals[1], utf8HexComma, ",", -1), utf8Pipe, "|", -1)
} }
} }
current.isBlockEnd = true current.isBlockEnd = true

File diff suppressed because it is too large Load Diff

View File

@@ -1,79 +1,79 @@
package validator package validator
var iso4217 = map[string]struct{}{ var iso4217 = map[string]bool{
"AFN": {}, "EUR": {}, "ALL": {}, "DZD": {}, "USD": {}, "AFN": true, "EUR": true, "ALL": true, "DZD": true, "USD": true,
"AOA": {}, "XCD": {}, "ARS": {}, "AMD": {}, "AWG": {}, "AOA": true, "XCD": true, "ARS": true, "AMD": true, "AWG": true,
"AUD": {}, "AZN": {}, "BSD": {}, "BHD": {}, "BDT": {}, "AUD": true, "AZN": true, "BSD": true, "BHD": true, "BDT": true,
"BBD": {}, "BYN": {}, "BZD": {}, "XOF": {}, "BMD": {}, "BBD": true, "BYN": true, "BZD": true, "XOF": true, "BMD": true,
"INR": {}, "BTN": {}, "BOB": {}, "BOV": {}, "BAM": {}, "INR": true, "BTN": true, "BOB": true, "BOV": true, "BAM": true,
"BWP": {}, "NOK": {}, "BRL": {}, "BND": {}, "BGN": {}, "BWP": true, "NOK": true, "BRL": true, "BND": true, "BGN": true,
"BIF": {}, "CVE": {}, "KHR": {}, "XAF": {}, "CAD": {}, "BIF": true, "CVE": true, "KHR": true, "XAF": true, "CAD": true,
"KYD": {}, "CLP": {}, "CLF": {}, "CNY": {}, "COP": {}, "KYD": true, "CLP": true, "CLF": true, "CNY": true, "COP": true,
"COU": {}, "KMF": {}, "CDF": {}, "NZD": {}, "CRC": {}, "COU": true, "KMF": true, "CDF": true, "NZD": true, "CRC": true,
"HRK": {}, "CUP": {}, "CUC": {}, "ANG": {}, "CZK": {}, "HRK": true, "CUP": true, "CUC": true, "ANG": true, "CZK": true,
"DKK": {}, "DJF": {}, "DOP": {}, "EGP": {}, "SVC": {}, "DKK": true, "DJF": true, "DOP": true, "EGP": true, "SVC": true,
"ERN": {}, "SZL": {}, "ETB": {}, "FKP": {}, "FJD": {}, "ERN": true, "SZL": true, "ETB": true, "FKP": true, "FJD": true,
"XPF": {}, "GMD": {}, "GEL": {}, "GHS": {}, "GIP": {}, "XPF": true, "GMD": true, "GEL": true, "GHS": true, "GIP": true,
"GTQ": {}, "GBP": {}, "GNF": {}, "GYD": {}, "HTG": {}, "GTQ": true, "GBP": true, "GNF": true, "GYD": true, "HTG": true,
"HNL": {}, "HKD": {}, "HUF": {}, "ISK": {}, "IDR": {}, "HNL": true, "HKD": true, "HUF": true, "ISK": true, "IDR": true,
"XDR": {}, "IRR": {}, "IQD": {}, "ILS": {}, "JMD": {}, "XDR": true, "IRR": true, "IQD": true, "ILS": true, "JMD": true,
"JPY": {}, "JOD": {}, "KZT": {}, "KES": {}, "KPW": {}, "JPY": true, "JOD": true, "KZT": true, "KES": true, "KPW": true,
"KRW": {}, "KWD": {}, "KGS": {}, "LAK": {}, "LBP": {}, "KRW": true, "KWD": true, "KGS": true, "LAK": true, "LBP": true,
"LSL": {}, "ZAR": {}, "LRD": {}, "LYD": {}, "CHF": {}, "LSL": true, "ZAR": true, "LRD": true, "LYD": true, "CHF": true,
"MOP": {}, "MKD": {}, "MGA": {}, "MWK": {}, "MYR": {}, "MOP": true, "MKD": true, "MGA": true, "MWK": true, "MYR": true,
"MVR": {}, "MRU": {}, "MUR": {}, "XUA": {}, "MXN": {}, "MVR": true, "MRU": true, "MUR": true, "XUA": true, "MXN": true,
"MXV": {}, "MDL": {}, "MNT": {}, "MAD": {}, "MZN": {}, "MXV": true, "MDL": true, "MNT": true, "MAD": true, "MZN": true,
"MMK": {}, "NAD": {}, "NPR": {}, "NIO": {}, "NGN": {}, "MMK": true, "NAD": true, "NPR": true, "NIO": true, "NGN": true,
"OMR": {}, "PKR": {}, "PAB": {}, "PGK": {}, "PYG": {}, "OMR": true, "PKR": true, "PAB": true, "PGK": true, "PYG": true,
"PEN": {}, "PHP": {}, "PLN": {}, "QAR": {}, "RON": {}, "PEN": true, "PHP": true, "PLN": true, "QAR": true, "RON": true,
"RUB": {}, "RWF": {}, "SHP": {}, "WST": {}, "STN": {}, "RUB": true, "RWF": true, "SHP": true, "WST": true, "STN": true,
"SAR": {}, "RSD": {}, "SCR": {}, "SLL": {}, "SGD": {}, "SAR": true, "RSD": true, "SCR": true, "SLL": true, "SGD": true,
"XSU": {}, "SBD": {}, "SOS": {}, "SSP": {}, "LKR": {}, "XSU": true, "SBD": true, "SOS": true, "SSP": true, "LKR": true,
"SDG": {}, "SRD": {}, "SEK": {}, "CHE": {}, "CHW": {}, "SDG": true, "SRD": true, "SEK": true, "CHE": true, "CHW": true,
"SYP": {}, "TWD": {}, "TJS": {}, "TZS": {}, "THB": {}, "SYP": true, "TWD": true, "TJS": true, "TZS": true, "THB": true,
"TOP": {}, "TTD": {}, "TND": {}, "TRY": {}, "TMT": {}, "TOP": true, "TTD": true, "TND": true, "TRY": true, "TMT": true,
"UGX": {}, "UAH": {}, "AED": {}, "USN": {}, "UYU": {}, "UGX": true, "UAH": true, "AED": true, "USN": true, "UYU": true,
"UYI": {}, "UYW": {}, "UZS": {}, "VUV": {}, "VES": {}, "UYI": true, "UYW": true, "UZS": true, "VUV": true, "VES": true,
"VND": {}, "YER": {}, "ZMW": {}, "ZWL": {}, "XBA": {}, "VND": true, "YER": true, "ZMW": true, "ZWL": true, "XBA": true,
"XBB": {}, "XBC": {}, "XBD": {}, "XTS": {}, "XXX": {}, "XBB": true, "XBC": true, "XBD": true, "XTS": true, "XXX": true,
"XAU": {}, "XPD": {}, "XPT": {}, "XAG": {}, "XAU": true, "XPD": true, "XPT": true, "XAG": true,
} }
var iso4217_numeric = map[int]struct{}{ var iso4217_numeric = map[int]bool{
8: {}, 12: {}, 32: {}, 36: {}, 44: {}, 8: true, 12: true, 32: true, 36: true, 44: true,
48: {}, 50: {}, 51: {}, 52: {}, 60: {}, 48: true, 50: true, 51: true, 52: true, 60: true,
64: {}, 68: {}, 72: {}, 84: {}, 90: {}, 64: true, 68: true, 72: true, 84: true, 90: true,
96: {}, 104: {}, 108: {}, 116: {}, 124: {}, 96: true, 104: true, 108: true, 116: true, 124: true,
132: {}, 136: {}, 144: {}, 152: {}, 156: {}, 132: true, 136: true, 144: true, 152: true, 156: true,
170: {}, 174: {}, 188: {}, 191: {}, 192: {}, 170: true, 174: true, 188: true, 191: true, 192: true,
203: {}, 208: {}, 214: {}, 222: {}, 230: {}, 203: true, 208: true, 214: true, 222: true, 230: true,
232: {}, 238: {}, 242: {}, 262: {}, 270: {}, 232: true, 238: true, 242: true, 262: true, 270: true,
292: {}, 320: {}, 324: {}, 328: {}, 332: {}, 292: true, 320: true, 324: true, 328: true, 332: true,
340: {}, 344: {}, 348: {}, 352: {}, 356: {}, 340: true, 344: true, 348: true, 352: true, 356: true,
360: {}, 364: {}, 368: {}, 376: {}, 388: {}, 360: true, 364: true, 368: true, 376: true, 388: true,
392: {}, 398: {}, 400: {}, 404: {}, 408: {}, 392: true, 398: true, 400: true, 404: true, 408: true,
410: {}, 414: {}, 417: {}, 418: {}, 422: {}, 410: true, 414: true, 417: true, 418: true, 422: true,
426: {}, 430: {}, 434: {}, 446: {}, 454: {}, 426: true, 430: true, 434: true, 446: true, 454: true,
458: {}, 462: {}, 480: {}, 484: {}, 496: {}, 458: true, 462: true, 480: true, 484: true, 496: true,
498: {}, 504: {}, 512: {}, 516: {}, 524: {}, 498: true, 504: true, 512: true, 516: true, 524: true,
532: {}, 533: {}, 548: {}, 554: {}, 558: {}, 532: true, 533: true, 548: true, 554: true, 558: true,
566: {}, 578: {}, 586: {}, 590: {}, 598: {}, 566: true, 578: true, 586: true, 590: true, 598: true,
600: {}, 604: {}, 608: {}, 634: {}, 643: {}, 600: true, 604: true, 608: true, 634: true, 643: true,
646: {}, 654: {}, 682: {}, 690: {}, 694: {}, 646: true, 654: true, 682: true, 690: true, 694: true,
702: {}, 704: {}, 706: {}, 710: {}, 728: {}, 702: true, 704: true, 706: true, 710: true, 728: true,
748: {}, 752: {}, 756: {}, 760: {}, 764: {}, 748: true, 752: true, 756: true, 760: true, 764: true,
776: {}, 780: {}, 784: {}, 788: {}, 800: {}, 776: true, 780: true, 784: true, 788: true, 800: true,
807: {}, 818: {}, 826: {}, 834: {}, 840: {}, 807: true, 818: true, 826: true, 834: true, 840: true,
858: {}, 860: {}, 882: {}, 886: {}, 901: {}, 858: true, 860: true, 882: true, 886: true, 901: true,
927: {}, 928: {}, 929: {}, 930: {}, 931: {}, 927: true, 928: true, 929: true, 930: true, 931: true,
932: {}, 933: {}, 934: {}, 936: {}, 938: {}, 932: true, 933: true, 934: true, 936: true, 938: true,
940: {}, 941: {}, 943: {}, 944: {}, 946: {}, 940: true, 941: true, 943: true, 944: true, 946: true,
947: {}, 948: {}, 949: {}, 950: {}, 951: {}, 947: true, 948: true, 949: true, 950: true, 951: true,
952: {}, 953: {}, 955: {}, 956: {}, 957: {}, 952: true, 953: true, 955: true, 956: true, 957: true,
958: {}, 959: {}, 960: {}, 961: {}, 962: {}, 958: true, 959: true, 960: true, 961: true, 962: true,
963: {}, 964: {}, 965: {}, 967: {}, 968: {}, 963: true, 964: true, 965: true, 967: true, 968: true,
969: {}, 970: {}, 971: {}, 972: {}, 973: {}, 969: true, 970: true, 971: true, 972: true, 973: true,
975: {}, 976: {}, 977: {}, 978: {}, 979: {}, 975: true, 976: true, 977: true, 978: true, 979: true,
980: {}, 981: {}, 984: {}, 985: {}, 986: {}, 980: true, 981: true, 984: true, 985: true, 986: true,
990: {}, 994: {}, 997: {}, 999: {}, 990: true, 994: true, 997: true, 999: true,
} }

View File

@@ -188,7 +188,7 @@ Same as structonly tag except that any struct level validations will not run.
# Omit Empty # Omit Empty
Allows conditional validation, for example, if a field is not set with Allows conditional validation, for example if a field is not set with
a value (Determined by the "required" validator) then other validation a value (Determined by the "required" validator) then other validation
such as min or max won't run, but if a value is set validation will run. such as min or max won't run, but if a value is set validation will run.
@@ -253,7 +253,7 @@ Example #2
This validates that the value is not the data types default zero value. This validates that the value is not the data types default zero value.
For numbers ensures value is not zero. For strings ensures value is For numbers ensures value is not zero. For strings ensures value is
not "". For booleans ensures value is not false. For slices, maps, pointers, interfaces, channels and functions not "". For slices, maps, pointers, interfaces, channels and functions
ensures the value is not nil. For structs ensures value is not the zero value when using WithRequiredStructEnabled. ensures the value is not nil. For structs ensures value is not the zero value when using WithRequiredStructEnabled.
Usage: required Usage: required
@@ -489,19 +489,12 @@ For strings, ints, and uints, oneof will ensure that the value
is one of the values in the parameter. The parameter should be is one of the values in the parameter. The parameter should be
a list of values separated by whitespace. Values may be a list of values separated by whitespace. Values may be
strings or numbers. To match strings with spaces in them, include strings or numbers. To match strings with spaces in them, include
the target string between single quotes. Kind of like an 'enum'. the target string between single quotes.
Usage: oneof=red green Usage: oneof=red green
oneof='red green' 'blue yellow' oneof='red green' 'blue yellow'
oneof=5 7 9 oneof=5 7 9
# One Of Case Insensitive
Works the same as oneof but is case insensitive and therefore only accepts strings.
Usage: oneofci=red green
oneofci='red green' 'blue yellow'
# Greater Than # Greater Than
For numbers, this will ensure that the value is greater than the For numbers, this will ensure that the value is greater than the
@@ -756,20 +749,6 @@ in a field of the struct specified via a parameter.
// For slices of struct: // For slices of struct:
Usage: unique=field Usage: unique=field
# ValidateFn
This validates that an object responds to a method that can return error or bool.
By default it expects an interface `Validate() error` and check that the method
does not return an error. Other methods can be specified using two signatures:
If the method returns an error, it check if the return value is nil.
If the method returns a boolean, it checks if the value is true.
// to use the default method Validate() error
Usage: validateFn
// to use the custom method IsValid() bool (or error)
Usage: validateFn=IsValid
# Alpha Only # Alpha Only
This validates that a string value contains ASCII alpha characters only This validates that a string value contains ASCII alpha characters only
@@ -932,7 +911,7 @@ This will accept any uri the golang request uri accepts
# Urn RFC 2141 String # Urn RFC 2141 String
This validates that a string value contains a valid URN This validataes that a string value contains a valid URN
according to the RFC 2141 spec. according to the RFC 2141 spec.
Usage: urn_rfc2141 Usage: urn_rfc2141
@@ -973,7 +952,7 @@ Although an empty string is a valid base64 URL safe value, this will report
an empty string as an error, if you wish to accept an empty string as valid an empty string as an error, if you wish to accept an empty string as valid
you can use this with the omitempty tag. you can use this with the omitempty tag.
Usage: base64rawurl Usage: base64url
# Bitcoin Address # Bitcoin Address
@@ -987,7 +966,7 @@ Bitcoin Bech32 Address (segwit)
This validates that a string value contains a valid bitcoin Bech32 address as defined This validates that a string value contains a valid bitcoin Bech32 address as defined
by bip-0173 (https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki) by bip-0173 (https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki)
Special thanks to Pieter Wuille for providing reference implementations. Special thanks to Pieter Wuille for providng reference implementations.
Usage: btc_addr_bech32 Usage: btc_addr_bech32
@@ -1148,12 +1127,6 @@ This validates that a string value contains a valid longitude.
Usage: longitude Usage: longitude
# Employeer Identification Number EIN
This validates that a string value contains a valid U.S. Employer Identification Number.
Usage: ein
# Social Security Number SSN # Social Security Number SSN
This validates that a string value contains a valid U.S. Social Security Number. This validates that a string value contains a valid U.S. Social Security Number.
@@ -1326,7 +1299,7 @@ may not exist at the time of validation.
# HostPort # HostPort
This validates that a string value contains a valid DNS hostname and port that This validates that a string value contains a valid DNS hostname and port that
can be used to validate fields typically passed to sockets and connections. can be used to valiate fields typically passed to sockets and connections.
Usage: hostname_port Usage: hostname_port
@@ -1413,19 +1386,11 @@ This validates that a string value contains a valid credit card number using Luh
This validates that a string or (u)int value contains a valid checksum using the Luhn algorithm. This validates that a string or (u)int value contains a valid checksum using the Luhn algorithm.
# MongoDB # MongoDb ObjectID
This validates that a string is a valid 24 character hexadecimal string or valid connection string. This validates that a string is a valid 24 character hexadecimal string.
Usage: mongodb Usage: mongodb
mongodb_connection_string
Example:
type Test struct {
ObjectIdField string `validate:"mongodb"`
ConnectionStringField string `validate:"mongodb_connection_string"`
}
# Cron # Cron

View File

@@ -24,6 +24,7 @@ type InvalidValidationError struct {
// Error returns InvalidValidationError message // Error returns InvalidValidationError message
func (e *InvalidValidationError) Error() string { func (e *InvalidValidationError) Error() string {
if e.Type == nil { if e.Type == nil {
return "validator: (nil)" return "validator: (nil)"
} }
@@ -40,9 +41,11 @@ type ValidationErrors []FieldError
// All information to create an error message specific to your application is contained within // All information to create an error message specific to your application is contained within
// the FieldError found within the ValidationErrors array // the FieldError found within the ValidationErrors array
func (ve ValidationErrors) Error() string { func (ve ValidationErrors) Error() string {
buff := bytes.NewBufferString("") buff := bytes.NewBufferString("")
for i := 0; i < len(ve); i++ { for i := 0; i < len(ve); i++ {
buff.WriteString(ve[i].Error()) buff.WriteString(ve[i].Error())
buff.WriteString("\n") buff.WriteString("\n")
} }
@@ -52,6 +55,7 @@ func (ve ValidationErrors) Error() string {
// Translate translates all of the ValidationErrors // Translate translates all of the ValidationErrors
func (ve ValidationErrors) Translate(ut ut.Translator) ValidationErrorsTranslations { func (ve ValidationErrors) Translate(ut ut.Translator) ValidationErrorsTranslations {
trans := make(ValidationErrorsTranslations) trans := make(ValidationErrorsTranslations)
var fe *fieldError var fe *fieldError
@@ -105,24 +109,22 @@ type FieldError interface {
// StructNamespace returns the namespace for the field error, with the field's // StructNamespace returns the namespace for the field error, with the field's
// actual name. // actual name.
// //
// eg. "User.FirstName" see Namespace for comparison // eq. "User.FirstName" see Namespace for comparison
// //
// NOTE: this field can be blank when validating a single primitive field // NOTE: this field can be blank when validating a single primitive field
// using validate.Field(...) as there is no way to extract its name // using validate.Field(...) as there is no way to extract its name
StructNamespace() string StructNamespace() string
// Field returns the field's name with the tag name taking precedence over the // Field returns the fields name with the tag name taking precedence over the
// field's actual name. // field's actual name.
// //
// `RegisterTagNameFunc` must be registered to get tag value. // eq. JSON name "fname"
//
// eg. JSON name "fname"
// see StructField for comparison // see StructField for comparison
Field() string Field() string
// StructField returns the field's actual name from the struct, when able to determine. // StructField returns the field's actual name from the struct, when able to determine.
// //
// eg. "FirstName" // eq. "FirstName"
// see Field for comparison // see Field for comparison
StructField() string StructField() string
@@ -202,6 +204,7 @@ func (fe *fieldError) StructNamespace() string {
// Field returns the field's name with the tag name taking precedence over the // Field returns the field's name with the tag name taking precedence over the
// field's actual name. // field's actual name.
func (fe *fieldError) Field() string { func (fe *fieldError) Field() string {
return fe.ns[len(fe.ns)-int(fe.fieldLen):] return fe.ns[len(fe.ns)-int(fe.fieldLen):]
// // return fe.field // // return fe.field
// fld := fe.ns[len(fe.ns)-int(fe.fieldLen):] // fld := fe.ns[len(fe.ns)-int(fe.fieldLen):]

View File

@@ -1,9 +1,6 @@
package validator package validator
import ( import "regexp"
"regexp"
"sync"
)
var postCodePatternDict = map[string]string{ var postCodePatternDict = map[string]string{
"GB": `^GIR[ ]?0AA|((AB|AL|B|BA|BB|BD|BH|BL|BN|BR|BS|BT|CA|CB|CF|CH|CM|CO|CR|CT|CV|CW|DA|DD|DE|DG|DH|DL|DN|DT|DY|E|EC|EH|EN|EX|FK|FY|G|GL|GY|GU|HA|HD|HG|HP|HR|HS|HU|HX|IG|IM|IP|IV|JE|KA|KT|KW|KY|L|LA|LD|LE|LL|LN|LS|LU|M|ME|MK|ML|N|NE|NG|NN|NP|NR|NW|OL|OX|PA|PE|PH|PL|PO|PR|RG|RH|RM|S|SA|SE|SG|SK|SL|SM|SN|SO|SP|SR|SS|ST|SW|SY|TA|TD|TF|TN|TQ|TR|TS|TW|UB|W|WA|WC|WD|WF|WN|WR|WS|WV|YO|ZE)(\d[\dA-Z]?[ ]?\d[ABD-HJLN-UW-Z]{2}))|BFPO[ ]?\d{1,4}$`, "GB": `^GIR[ ]?0AA|((AB|AL|B|BA|BB|BD|BH|BL|BN|BR|BS|BT|CA|CB|CF|CH|CM|CO|CR|CT|CV|CW|DA|DD|DE|DG|DH|DL|DN|DT|DY|E|EC|EH|EN|EX|FK|FY|G|GL|GY|GU|HA|HD|HG|HP|HR|HS|HU|HX|IG|IM|IP|IV|JE|KA|KT|KW|KY|L|LA|LD|LE|LL|LN|LS|LU|M|ME|MK|ML|N|NE|NG|NN|NP|NR|NW|OL|OX|PA|PE|PH|PL|PO|PR|RG|RH|RM|S|SA|SE|SG|SK|SL|SM|SN|SO|SP|SR|SS|ST|SW|SY|TA|TD|TF|TN|TQ|TR|TS|TW|UB|W|WA|WC|WD|WF|WN|WR|WS|WV|YO|ZE)(\d[\dA-Z]?[ ]?\d[ABD-HJLN-UW-Z]{2}))|BFPO[ ]?\d{1,4}$`,
@@ -167,12 +164,9 @@ var postCodePatternDict = map[string]string{
"YT": `^976\d{2}$`, "YT": `^976\d{2}$`,
} }
var ( var postCodeRegexDict = map[string]*regexp.Regexp{}
postcodeRegexInit sync.Once
postCodeRegexDict = map[string]*regexp.Regexp{}
)
func initPostcodes() { func init() {
for countryCode, pattern := range postCodePatternDict { for countryCode, pattern := range postCodePatternDict {
postCodeRegexDict[countryCode] = regexp.MustCompile(pattern) postCodeRegexDict[countryCode] = regexp.MustCompile(pattern)
} }

View File

@@ -1,9 +1,6 @@
package validator package validator
import ( import "regexp"
"regexp"
"sync"
)
const ( const (
alphaRegexString = "^[a-zA-Z]+$" alphaRegexString = "^[a-zA-Z]+$"
@@ -69,97 +66,82 @@ const (
splitParamsRegexString = `'[^']*'|\S+` splitParamsRegexString = `'[^']*'|\S+`
bicRegexString = `^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$` bicRegexString = `^[A-Za-z]{6}[A-Za-z0-9]{2}([A-Za-z0-9]{3})?$`
semverRegexString = `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$` // numbered capture groups https://semver.org/ semverRegexString = `^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$` // numbered capture groups https://semver.org/
dnsRegexStringRFC1035Label = "^[a-z]([-a-z0-9]*[a-z0-9])?$" dnsRegexStringRFC1035Label = "^[a-z]([-a-z0-9]*[a-z0-9]){0,62}$"
cveRegexString = `^CVE-(1999|2\d{3})-(0[^0]\d{2}|0\d[^0]\d{1}|0\d{2}[^0]|[1-9]{1}\d{3,})$` // CVE Format Id https://cve.mitre.org/cve/identifiers/syntaxchange.html cveRegexString = `^CVE-(1999|2\d{3})-(0[^0]\d{2}|0\d[^0]\d{1}|0\d{2}[^0]|[1-9]{1}\d{3,})$` // CVE Format Id https://cve.mitre.org/cve/identifiers/syntaxchange.html
mongodbIdRegexString = "^[a-f\\d]{24}$" mongodbRegexString = "^[a-f\\d]{24}$"
mongodbConnStringRegexString = "^mongodb(\\+srv)?:\\/\\/(([a-zA-Z\\d]+):([a-zA-Z\\d$:\\/?#\\[\\]@]+)@)?(([a-z\\d.-]+)(:[\\d]+)?)((,(([a-z\\d.-]+)(:(\\d+))?))*)?(\\/[a-zA-Z-_]{1,64})?(\\?(([a-zA-Z]+)=([a-zA-Z\\d]+))(&(([a-zA-Z\\d]+)=([a-zA-Z\\d]+))?)*)?$" cronRegexString = `(@(annually|yearly|monthly|weekly|daily|hourly|reboot))|(@every (\d+(ns|us|µs|ms|s|m|h))+)|((((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|\*) ?){5,7})`
cronRegexString = `(@(annually|yearly|monthly|weekly|daily|hourly|reboot))|(@every (\d+(ns|us|µs|ms|s|m|h))+)|((((\d+,)+\d+|((\*|\d+)(\/|-)\d+)|\d+|\*) ?){5,7})`
spicedbIDRegexString = `^(([a-zA-Z0-9/_|\-=+]{1,})|\*)$` spicedbIDRegexString = `^(([a-zA-Z0-9/_|\-=+]{1,})|\*)$`
spicedbPermissionRegexString = "^([a-z][a-z0-9_]{1,62}[a-z0-9])?$" spicedbPermissionRegexString = "^([a-z][a-z0-9_]{1,62}[a-z0-9])?$"
spicedbTypeRegexString = "^([a-z][a-z0-9_]{1,61}[a-z0-9]/)?[a-z][a-z0-9_]{1,62}[a-z0-9]$" spicedbTypeRegexString = "^([a-z][a-z0-9_]{1,61}[a-z0-9]/)?[a-z][a-z0-9_]{1,62}[a-z0-9]$"
einRegexString = "^(\\d{2}-\\d{7})$"
) )
func lazyRegexCompile(str string) func() *regexp.Regexp {
var regex *regexp.Regexp
var once sync.Once
return func() *regexp.Regexp {
once.Do(func() {
regex = regexp.MustCompile(str)
})
return regex
}
}
var ( var (
alphaRegex = lazyRegexCompile(alphaRegexString) alphaRegex = regexp.MustCompile(alphaRegexString)
alphaNumericRegex = lazyRegexCompile(alphaNumericRegexString) alphaNumericRegex = regexp.MustCompile(alphaNumericRegexString)
alphaUnicodeRegex = lazyRegexCompile(alphaUnicodeRegexString) alphaUnicodeRegex = regexp.MustCompile(alphaUnicodeRegexString)
alphaUnicodeNumericRegex = lazyRegexCompile(alphaUnicodeNumericRegexString) alphaUnicodeNumericRegex = regexp.MustCompile(alphaUnicodeNumericRegexString)
numericRegex = lazyRegexCompile(numericRegexString) numericRegex = regexp.MustCompile(numericRegexString)
numberRegex = lazyRegexCompile(numberRegexString) numberRegex = regexp.MustCompile(numberRegexString)
hexadecimalRegex = lazyRegexCompile(hexadecimalRegexString) hexadecimalRegex = regexp.MustCompile(hexadecimalRegexString)
hexColorRegex = lazyRegexCompile(hexColorRegexString) hexColorRegex = regexp.MustCompile(hexColorRegexString)
rgbRegex = lazyRegexCompile(rgbRegexString) rgbRegex = regexp.MustCompile(rgbRegexString)
rgbaRegex = lazyRegexCompile(rgbaRegexString) rgbaRegex = regexp.MustCompile(rgbaRegexString)
hslRegex = lazyRegexCompile(hslRegexString) hslRegex = regexp.MustCompile(hslRegexString)
hslaRegex = lazyRegexCompile(hslaRegexString) hslaRegex = regexp.MustCompile(hslaRegexString)
e164Regex = lazyRegexCompile(e164RegexString) e164Regex = regexp.MustCompile(e164RegexString)
emailRegex = lazyRegexCompile(emailRegexString) emailRegex = regexp.MustCompile(emailRegexString)
base32Regex = lazyRegexCompile(base32RegexString) base32Regex = regexp.MustCompile(base32RegexString)
base64Regex = lazyRegexCompile(base64RegexString) base64Regex = regexp.MustCompile(base64RegexString)
base64URLRegex = lazyRegexCompile(base64URLRegexString) base64URLRegex = regexp.MustCompile(base64URLRegexString)
base64RawURLRegex = lazyRegexCompile(base64RawURLRegexString) base64RawURLRegex = regexp.MustCompile(base64RawURLRegexString)
iSBN10Regex = lazyRegexCompile(iSBN10RegexString) iSBN10Regex = regexp.MustCompile(iSBN10RegexString)
iSBN13Regex = lazyRegexCompile(iSBN13RegexString) iSBN13Regex = regexp.MustCompile(iSBN13RegexString)
iSSNRegex = lazyRegexCompile(iSSNRegexString) iSSNRegex = regexp.MustCompile(iSSNRegexString)
uUID3Regex = lazyRegexCompile(uUID3RegexString) uUID3Regex = regexp.MustCompile(uUID3RegexString)
uUID4Regex = lazyRegexCompile(uUID4RegexString) uUID4Regex = regexp.MustCompile(uUID4RegexString)
uUID5Regex = lazyRegexCompile(uUID5RegexString) uUID5Regex = regexp.MustCompile(uUID5RegexString)
uUIDRegex = lazyRegexCompile(uUIDRegexString) uUIDRegex = regexp.MustCompile(uUIDRegexString)
uUID3RFC4122Regex = lazyRegexCompile(uUID3RFC4122RegexString) uUID3RFC4122Regex = regexp.MustCompile(uUID3RFC4122RegexString)
uUID4RFC4122Regex = lazyRegexCompile(uUID4RFC4122RegexString) uUID4RFC4122Regex = regexp.MustCompile(uUID4RFC4122RegexString)
uUID5RFC4122Regex = lazyRegexCompile(uUID5RFC4122RegexString) uUID5RFC4122Regex = regexp.MustCompile(uUID5RFC4122RegexString)
uUIDRFC4122Regex = lazyRegexCompile(uUIDRFC4122RegexString) uUIDRFC4122Regex = regexp.MustCompile(uUIDRFC4122RegexString)
uLIDRegex = lazyRegexCompile(uLIDRegexString) uLIDRegex = regexp.MustCompile(uLIDRegexString)
md4Regex = lazyRegexCompile(md4RegexString) md4Regex = regexp.MustCompile(md4RegexString)
md5Regex = lazyRegexCompile(md5RegexString) md5Regex = regexp.MustCompile(md5RegexString)
sha256Regex = lazyRegexCompile(sha256RegexString) sha256Regex = regexp.MustCompile(sha256RegexString)
sha384Regex = lazyRegexCompile(sha384RegexString) sha384Regex = regexp.MustCompile(sha384RegexString)
sha512Regex = lazyRegexCompile(sha512RegexString) sha512Regex = regexp.MustCompile(sha512RegexString)
ripemd128Regex = lazyRegexCompile(ripemd128RegexString) ripemd128Regex = regexp.MustCompile(ripemd128RegexString)
ripemd160Regex = lazyRegexCompile(ripemd160RegexString) ripemd160Regex = regexp.MustCompile(ripemd160RegexString)
tiger128Regex = lazyRegexCompile(tiger128RegexString) tiger128Regex = regexp.MustCompile(tiger128RegexString)
tiger160Regex = lazyRegexCompile(tiger160RegexString) tiger160Regex = regexp.MustCompile(tiger160RegexString)
tiger192Regex = lazyRegexCompile(tiger192RegexString) tiger192Regex = regexp.MustCompile(tiger192RegexString)
aSCIIRegex = lazyRegexCompile(aSCIIRegexString) aSCIIRegex = regexp.MustCompile(aSCIIRegexString)
printableASCIIRegex = lazyRegexCompile(printableASCIIRegexString) printableASCIIRegex = regexp.MustCompile(printableASCIIRegexString)
multibyteRegex = lazyRegexCompile(multibyteRegexString) multibyteRegex = regexp.MustCompile(multibyteRegexString)
dataURIRegex = lazyRegexCompile(dataURIRegexString) dataURIRegex = regexp.MustCompile(dataURIRegexString)
latitudeRegex = lazyRegexCompile(latitudeRegexString) latitudeRegex = regexp.MustCompile(latitudeRegexString)
longitudeRegex = lazyRegexCompile(longitudeRegexString) longitudeRegex = regexp.MustCompile(longitudeRegexString)
sSNRegex = lazyRegexCompile(sSNRegexString) sSNRegex = regexp.MustCompile(sSNRegexString)
hostnameRegexRFC952 = lazyRegexCompile(hostnameRegexStringRFC952) hostnameRegexRFC952 = regexp.MustCompile(hostnameRegexStringRFC952)
hostnameRegexRFC1123 = lazyRegexCompile(hostnameRegexStringRFC1123) hostnameRegexRFC1123 = regexp.MustCompile(hostnameRegexStringRFC1123)
fqdnRegexRFC1123 = lazyRegexCompile(fqdnRegexStringRFC1123) fqdnRegexRFC1123 = regexp.MustCompile(fqdnRegexStringRFC1123)
btcAddressRegex = lazyRegexCompile(btcAddressRegexString) btcAddressRegex = regexp.MustCompile(btcAddressRegexString)
btcUpperAddressRegexBech32 = lazyRegexCompile(btcAddressUpperRegexStringBech32) btcUpperAddressRegexBech32 = regexp.MustCompile(btcAddressUpperRegexStringBech32)
btcLowerAddressRegexBech32 = lazyRegexCompile(btcAddressLowerRegexStringBech32) btcLowerAddressRegexBech32 = regexp.MustCompile(btcAddressLowerRegexStringBech32)
ethAddressRegex = lazyRegexCompile(ethAddressRegexString) ethAddressRegex = regexp.MustCompile(ethAddressRegexString)
uRLEncodedRegex = lazyRegexCompile(uRLEncodedRegexString) uRLEncodedRegex = regexp.MustCompile(uRLEncodedRegexString)
hTMLEncodedRegex = lazyRegexCompile(hTMLEncodedRegexString) hTMLEncodedRegex = regexp.MustCompile(hTMLEncodedRegexString)
hTMLRegex = lazyRegexCompile(hTMLRegexString) hTMLRegex = regexp.MustCompile(hTMLRegexString)
jWTRegex = lazyRegexCompile(jWTRegexString) jWTRegex = regexp.MustCompile(jWTRegexString)
splitParamsRegex = lazyRegexCompile(splitParamsRegexString) splitParamsRegex = regexp.MustCompile(splitParamsRegexString)
bicRegex = lazyRegexCompile(bicRegexString) bicRegex = regexp.MustCompile(bicRegexString)
semverRegex = lazyRegexCompile(semverRegexString) semverRegex = regexp.MustCompile(semverRegexString)
dnsRegexRFC1035Label = lazyRegexCompile(dnsRegexStringRFC1035Label) dnsRegexRFC1035Label = regexp.MustCompile(dnsRegexStringRFC1035Label)
cveRegex = lazyRegexCompile(cveRegexString) cveRegex = regexp.MustCompile(cveRegexString)
mongodbIdRegex = lazyRegexCompile(mongodbIdRegexString) mongodbRegex = regexp.MustCompile(mongodbRegexString)
mongodbConnectionRegex = lazyRegexCompile(mongodbConnStringRegexString) cronRegex = regexp.MustCompile(cronRegexString)
cronRegex = lazyRegexCompile(cronRegexString) spicedbIDRegex = regexp.MustCompile(spicedbIDRegexString)
spicedbIDRegex = lazyRegexCompile(spicedbIDRegexString) spicedbPermissionRegex = regexp.MustCompile(spicedbPermissionRegexString)
spicedbPermissionRegex = lazyRegexCompile(spicedbPermissionRegexString) spicedbTypeRegex = regexp.MustCompile(spicedbTypeRegexString)
spicedbTypeRegex = lazyRegexCompile(spicedbTypeRegexString)
einRegex = lazyRegexCompile(einRegexString)
) )

View File

@@ -46,9 +46,9 @@ type StructLevel interface {
// //
// NOTES: // NOTES:
// //
// fieldName and structFieldName get appended to the existing // fieldName and altName get appended to the existing namespace that
// namespace that validator is on. e.g. pass 'FirstName' or // validator is on. e.g. pass 'FirstName' or 'Names[0]' depending
// 'Names[0]' depending on the nesting // on the nesting
// //
// tag can be an existing validation tag or just something you make up // tag can be an existing validation tag or just something you make up
// and process on the flip side it's up to you. // and process on the flip side it's up to you.
@@ -107,6 +107,7 @@ func (v *validate) ExtractType(field reflect.Value) (reflect.Value, reflect.Kind
// ReportError reports an error just by passing the field and tag information // ReportError reports an error just by passing the field and tag information
func (v *validate) ReportError(field interface{}, fieldName, structFieldName, tag, param string) { func (v *validate) ReportError(field interface{}, fieldName, structFieldName, tag, param string) {
fv, kind, _ := v.extractTypeInternal(reflect.ValueOf(field), false) fv, kind, _ := v.extractTypeInternal(reflect.ValueOf(field), false)
if len(structFieldName) == 0 { if len(structFieldName) == 0 {
@@ -122,6 +123,7 @@ func (v *validate) ReportError(field interface{}, fieldName, structFieldName, ta
} }
if kind == reflect.Invalid { if kind == reflect.Invalid {
v.errs = append(v.errs, v.errs = append(v.errs,
&fieldError{ &fieldError{
v: v.v, v: v.v,
@@ -147,7 +149,7 @@ func (v *validate) ReportError(field interface{}, fieldName, structFieldName, ta
structNs: v.str2, structNs: v.str2,
fieldLen: uint8(len(fieldName)), fieldLen: uint8(len(fieldName)),
structfieldLen: uint8(len(structFieldName)), structfieldLen: uint8(len(structFieldName)),
value: getValue(fv), value: fv.Interface(),
param: param, param: param,
kind: kind, kind: kind,
typ: fv.Type(), typ: fv.Type(),
@@ -159,9 +161,11 @@ func (v *validate) ReportError(field interface{}, fieldName, structFieldName, ta
// //
// NOTE: this function prepends the current namespace to the relative ones. // NOTE: this function prepends the current namespace to the relative ones.
func (v *validate) ReportValidationErrors(relativeNamespace, relativeStructNamespace string, errs ValidationErrors) { func (v *validate) ReportValidationErrors(relativeNamespace, relativeStructNamespace string, errs ValidationErrors) {
var err *fieldError var err *fieldError
for i := 0; i < len(errs); i++ { for i := 0; i < len(errs); i++ {
err = errs[i].(*fieldError) err = errs[i].(*fieldError)
err.ns = string(append(append(v.ns, relativeNamespace...), err.ns...)) err.ns = string(append(append(v.ns, relativeNamespace...), err.ns...))
err.structNs = string(append(append(v.actualNs, relativeStructNamespace...), err.structNs...)) err.structNs = string(append(append(v.actualNs, relativeStructNamespace...), err.structNs...))

View File

@@ -13,6 +13,7 @@ import (
// It will dive into pointers, customTypes and return you the // It will dive into pointers, customTypes and return you the
// underlying value and it's kind. // underlying value and it's kind.
func (v *validate) extractTypeInternal(current reflect.Value, nullable bool) (reflect.Value, reflect.Kind, bool) { func (v *validate) extractTypeInternal(current reflect.Value, nullable bool) (reflect.Value, reflect.Kind, bool) {
BEGIN: BEGIN:
switch current.Kind() { switch current.Kind() {
case reflect.Ptr: case reflect.Ptr:
@@ -43,6 +44,7 @@ BEGIN:
default: default:
if v.v.hasCustomFuncs { if v.v.hasCustomFuncs {
if fn, ok := v.v.customFuncs[current.Type()]; ok { if fn, ok := v.v.customFuncs[current.Type()]; ok {
current = reflect.ValueOf(fn(current)) current = reflect.ValueOf(fn(current))
goto BEGIN goto BEGIN
@@ -59,6 +61,7 @@ BEGIN:
// NOTE: when not successful ok will be false, this can happen when a nested struct is nil and so the field // NOTE: when not successful ok will be false, this can happen when a nested struct is nil and so the field
// could not be retrieved because it didn't exist. // could not be retrieved because it didn't exist.
func (v *validate) getStructFieldOKInternal(val reflect.Value, namespace string) (current reflect.Value, kind reflect.Kind, nullable bool, found bool) { func (v *validate) getStructFieldOKInternal(val reflect.Value, namespace string) (current reflect.Value, kind reflect.Kind, nullable bool, found bool) {
BEGIN: BEGIN:
current, kind, nullable = v.ExtractType(val) current, kind, nullable = v.ExtractType(val)
if kind == reflect.Invalid { if kind == reflect.Invalid {
@@ -71,6 +74,7 @@ BEGIN:
} }
switch kind { switch kind {
case reflect.Ptr, reflect.Interface: case reflect.Ptr, reflect.Interface:
return return
@@ -81,6 +85,7 @@ BEGIN:
var ns string var ns string
if !typ.ConvertibleTo(timeType) { if !typ.ConvertibleTo(timeType) {
idx := strings.Index(namespace, namespaceSeparator) idx := strings.Index(namespace, namespaceSeparator)
if idx != -1 { if idx != -1 {
@@ -217,7 +222,7 @@ BEGIN:
panic("Invalid field namespace") panic("Invalid field namespace")
} }
// asInt returns the parameter as an int64 // asInt returns the parameter as a int64
// or panics if it can't convert // or panics if it can't convert
func asInt(param string) int64 { func asInt(param string) int64 {
i, err := strconv.ParseInt(param, 0, 64) i, err := strconv.ParseInt(param, 0, 64)
@@ -251,6 +256,7 @@ func asIntFromType(t reflect.Type, param string) int64 {
// asUint returns the parameter as a uint64 // asUint returns the parameter as a uint64
// or panics if it can't convert // or panics if it can't convert
func asUint(param string) uint64 { func asUint(param string) uint64 {
i, err := strconv.ParseUint(param, 0, 64) i, err := strconv.ParseUint(param, 0, 64)
panicIf(err) panicIf(err)
@@ -265,7 +271,7 @@ func asFloat64(param string) float64 {
return i return i
} }
// asFloat32 returns the parameter as a float32 // asFloat64 returns the parameter as a float64
// or panics if it can't convert // or panics if it can't convert
func asFloat32(param string) float64 { func asFloat32(param string) float64 {
i, err := strconv.ParseFloat(param, 32) i, err := strconv.ParseFloat(param, 32)
@@ -276,6 +282,7 @@ func asFloat32(param string) float64 {
// asBool returns the parameter as a bool // asBool returns the parameter as a bool
// or panics if it can't convert // or panics if it can't convert
func asBool(param string) bool { func asBool(param string) bool {
i, err := strconv.ParseBool(param) i, err := strconv.ParseBool(param)
panicIf(err) panicIf(err)
@@ -290,13 +297,12 @@ func panicIf(err error) {
// Checks if field value matches regex. If fl.Field can be cast to Stringer, it uses the Stringer interfaces // Checks if field value matches regex. If fl.Field can be cast to Stringer, it uses the Stringer interfaces
// String() return value. Otherwise, it uses fl.Field's String() value. // String() return value. Otherwise, it uses fl.Field's String() value.
func fieldMatchesRegexByStringerValOrString(regexFn func() *regexp.Regexp, fl FieldLevel) bool { func fieldMatchesRegexByStringerValOrString(regex *regexp.Regexp, fl FieldLevel) bool {
regex := regexFn()
switch fl.Field().Kind() { switch fl.Field().Kind() {
case reflect.String: case reflect.String:
return regex.MatchString(fl.Field().String()) return regex.MatchString(fl.Field().String())
default: default:
if stringer, ok := getValue(fl.Field()).(fmt.Stringer); ok { if stringer, ok := fl.Field().Interface().(fmt.Stringer); ok {
return regex.MatchString(stringer.String()) return regex.MatchString(stringer.String())
} else { } else {
return regex.MatchString(fl.Field().String()) return regex.MatchString(fl.Field().String())

View File

@@ -32,12 +32,14 @@ type validate struct {
// parent and current will be the same the first run of validateStruct // parent and current will be the same the first run of validateStruct
func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, current reflect.Value, typ reflect.Type, ns []byte, structNs []byte, ct *cTag) { func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, current reflect.Value, typ reflect.Type, ns []byte, structNs []byte, ct *cTag) {
cs, ok := v.v.structCache.Get(typ) cs, ok := v.v.structCache.Get(typ)
if !ok { if !ok {
cs = v.v.extractStructCache(current, typ.Name()) cs = v.v.extractStructCache(current, typ.Name())
} }
if len(ns) == 0 && len(cs.name) != 0 { if len(ns) == 0 && len(cs.name) != 0 {
ns = append(ns, cs.name...) ns = append(ns, cs.name...)
ns = append(ns, '.') ns = append(ns, '.')
@@ -48,17 +50,21 @@ func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, cur
// ct is nil on top level struct, and structs as fields that have no tag info // ct is nil on top level struct, and structs as fields that have no tag info
// so if nil or if not nil and the structonly tag isn't present // so if nil or if not nil and the structonly tag isn't present
if ct == nil || ct.typeof != typeStructOnly { if ct == nil || ct.typeof != typeStructOnly {
var f *cField var f *cField
for i := 0; i < len(cs.fields); i++ { for i := 0; i < len(cs.fields); i++ {
f = cs.fields[i] f = cs.fields[i]
if v.isPartial { if v.isPartial {
if v.ffn != nil { if v.ffn != nil {
// used with StructFiltered // used with StructFiltered
if v.ffn(append(structNs, f.name...)) { if v.ffn(append(structNs, f.name...)) {
continue continue
} }
} else { } else {
// used with StructPartial & StructExcept // used with StructPartial & StructExcept
_, ok = v.includeExclude[string(append(structNs, f.name...))] _, ok = v.includeExclude[string(append(structNs, f.name...))]
@@ -77,6 +83,7 @@ func (v *validate) validateStruct(ctx context.Context, parent reflect.Value, cur
// first iteration will have no info about nostructlevel tag, and is checked prior to // first iteration will have no info about nostructlevel tag, and is checked prior to
// calling the next iteration of validateStruct called from traverseField. // calling the next iteration of validateStruct called from traverseField.
if cs.fn != nil { if cs.fn != nil {
v.slflParent = parent v.slflParent = parent
v.slCurrent = current v.slCurrent = current
v.ns = ns v.ns = ns
@@ -110,10 +117,6 @@ func (v *validate) traverseField(ctx context.Context, parent reflect.Value, curr
return return
} }
if ct.typeof == typeOmitZero {
return
}
if ct.hasTag { if ct.hasTag {
if kind == reflect.Invalid { if kind == reflect.Invalid {
v.str1 = string(append(ns, cf.altName...)) v.str1 = string(append(ns, cf.altName...))
@@ -235,19 +238,6 @@ OUTER:
ct = ct.next ct = ct.next
continue continue
case typeOmitZero:
v.slflParent = parent
v.flField = current
v.cf = cf
v.ct = ct
if !hasNotZeroValue(v) {
return
}
ct = ct.next
continue
case typeOmitNil: case typeOmitNil:
v.slflParent = parent v.slflParent = parent
v.flField = current v.flField = current
@@ -260,7 +250,7 @@ OUTER:
return return
} }
default: default:
if v.fldIsPointer && getValue(field) == nil { if v.fldIsPointer && field.Interface() == nil {
return return
} }
} }
@@ -284,6 +274,7 @@ OUTER:
reusableCF := &cField{} reusableCF := &cField{}
for i := 0; i < current.Len(); i++ { for i := 0; i < current.Len(); i++ {
i64 = int64(i) i64 = int64(i)
v.misc = append(v.misc[0:0], cf.name...) v.misc = append(v.misc[0:0], cf.name...)
@@ -296,6 +287,7 @@ OUTER:
if cf.namesEqual { if cf.namesEqual {
reusableCF.altName = reusableCF.name reusableCF.altName = reusableCF.name
} else { } else {
v.misc = append(v.misc[0:0], cf.altName...) v.misc = append(v.misc[0:0], cf.altName...)
v.misc = append(v.misc, '[') v.misc = append(v.misc, '[')
v.misc = strconv.AppendInt(v.misc, i64, 10) v.misc = strconv.AppendInt(v.misc, i64, 10)
@@ -312,7 +304,8 @@ OUTER:
reusableCF := &cField{} reusableCF := &cField{}
for _, key := range current.MapKeys() { for _, key := range current.MapKeys() {
pv = fmt.Sprintf("%v", key)
pv = fmt.Sprintf("%v", key.Interface())
v.misc = append(v.misc[0:0], cf.name...) v.misc = append(v.misc[0:0], cf.name...)
v.misc = append(v.misc, '[') v.misc = append(v.misc, '[')
@@ -337,18 +330,6 @@ OUTER:
// can be nil when just keys being validated // can be nil when just keys being validated
if ct.next != nil { if ct.next != nil {
v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct.next) v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct.next)
} else {
// Struct fallback when map values are structs
val := current.MapIndex(key)
switch val.Kind() {
case reflect.Ptr:
if val.Elem().Kind() == reflect.Struct {
// Dive into the struct so its own tags run
v.traverseField(ctx, parent, val, ns, structNs, reusableCF, nil)
}
case reflect.Struct:
v.traverseField(ctx, parent, val, ns, structNs, reusableCF, nil)
}
} }
} else { } else {
v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct) v.traverseField(ctx, parent, current.MapIndex(key), ns, structNs, reusableCF, ct)
@@ -368,6 +349,7 @@ OUTER:
v.misc = v.misc[0:0] v.misc = v.misc[0:0]
for { for {
// set Field Level fields // set Field Level fields
v.slflParent = parent v.slflParent = parent
v.flField = current v.flField = current
@@ -382,6 +364,7 @@ OUTER:
// drain rest of the 'or' values, then continue or leave // drain rest of the 'or' values, then continue or leave
for { for {
ct = ct.next ct = ct.next
if ct == nil { if ct == nil {
@@ -418,6 +401,7 @@ OUTER:
} }
if ct.hasAlias { if ct.hasAlias {
v.errs = append(v.errs, v.errs = append(v.errs,
&fieldError{ &fieldError{
v: v.v, v: v.v,
@@ -433,7 +417,9 @@ OUTER:
typ: typ, typ: typ,
}, },
) )
} else { } else {
tVal := string(v.misc)[1:] tVal := string(v.misc)[1:]
v.errs = append(v.errs, v.errs = append(v.errs,
@@ -497,6 +483,7 @@ OUTER:
ct = ct.next ct = ct.next
} }
} }
} }
func getValue(val reflect.Value) interface{} { func getValue(val reflect.Value) interface{} {

View File

@@ -21,7 +21,6 @@ const (
tagKeySeparator = "=" tagKeySeparator = "="
structOnlyTag = "structonly" structOnlyTag = "structonly"
noStructLevelTag = "nostructlevel" noStructLevelTag = "nostructlevel"
omitzero = "omitzero"
omitempty = "omitempty" omitempty = "omitempty"
omitnil = "omitnil" omitnil = "omitnil"
isdefault = "isdefault" isdefault = "isdefault"
@@ -76,7 +75,7 @@ type TagNameFunc func(field reflect.StructField) string
type internalValidationFuncWrapper struct { type internalValidationFuncWrapper struct {
fn FuncCtx fn FuncCtx
runValidationOnNil bool runValidatinOnNil bool
} }
// Validate contains the validator settings and cache // Validate contains the validator settings and cache
@@ -104,6 +103,7 @@ type Validate struct {
// in essence only parsing your validation tags once per struct type. // in essence only parsing your validation tags once per struct type.
// Using multiple instances neglects the benefit of caching. // Using multiple instances neglects the benefit of caching.
func New(options ...Option) *Validate { func New(options ...Option) *Validate {
tc := new(tagCache) tc := new(tagCache)
tc.m.Store(make(map[string]*cTag)) tc.m.Store(make(map[string]*cTag))
@@ -125,6 +125,7 @@ func New(options ...Option) *Validate {
// must copy validators for separate validations to be used in each instance // must copy validators for separate validations to be used in each instance
for k, val := range bakedInValidators { for k, val := range bakedInValidators {
switch k { switch k {
// these require that even if the value is nil that the validation should run, omitempty still overrides this behaviour // these require that even if the value is nil that the validation should run, omitempty still overrides this behaviour
case requiredIfTag, requiredUnlessTag, requiredWithTag, requiredWithAllTag, requiredWithoutTag, requiredWithoutAllTag, case requiredIfTag, requiredUnlessTag, requiredWithTag, requiredWithAllTag, requiredWithoutTag, requiredWithoutAllTag,
@@ -231,12 +232,30 @@ func (v *Validate) RegisterValidationCtx(tag string, fn FuncCtx, callValidationE
return v.registerValidation(tag, fn, false, nilCheckable) return v.registerValidation(tag, fn, false, nilCheckable)
} }
func (v *Validate) registerValidation(tag string, fn FuncCtx, bakedIn bool, nilCheckable bool) error {
if len(tag) == 0 {
return errors.New("function Key cannot be empty")
}
if fn == nil {
return errors.New("function cannot be empty")
}
_, ok := restrictedTags[tag]
if !bakedIn && (ok || strings.ContainsAny(tag, restrictedTagChars)) {
panic(fmt.Sprintf(restrictedTagErr, tag))
}
v.validations[tag] = internalValidationFuncWrapper{fn: fn, runValidatinOnNil: nilCheckable}
return nil
}
// RegisterAlias registers a mapping of a single validation tag that // RegisterAlias registers a mapping of a single validation tag that
// defines a common or complex set of validation(s) to simplify adding validation // defines a common or complex set of validation(s) to simplify adding validation
// to structs. // to structs.
// //
// NOTE: this function is not thread-safe it is intended that these all be registered prior to any validation // NOTE: this function is not thread-safe it is intended that these all be registered prior to any validation
func (v *Validate) RegisterAlias(alias, tags string) { func (v *Validate) RegisterAlias(alias, tags string) {
_, ok := restrictedTags[alias] _, ok := restrictedTags[alias]
if ok || strings.ContainsAny(alias, restrictedTagChars) { if ok || strings.ContainsAny(alias, restrictedTagChars) {
@@ -260,6 +279,7 @@ func (v *Validate) RegisterStructValidation(fn StructLevelFunc, types ...interfa
// NOTE: // NOTE:
// - this method is not thread-safe it is intended that these all be registered prior to any validation // - this method is not thread-safe it is intended that these all be registered prior to any validation
func (v *Validate) RegisterStructValidationCtx(fn StructLevelFuncCtx, types ...interface{}) { func (v *Validate) RegisterStructValidationCtx(fn StructLevelFuncCtx, types ...interface{}) {
if v.structLevelFuncs == nil { if v.structLevelFuncs == nil {
v.structLevelFuncs = make(map[reflect.Type]StructLevelFuncCtx) v.structLevelFuncs = make(map[reflect.Type]StructLevelFuncCtx)
} }
@@ -306,6 +326,7 @@ func (v *Validate) RegisterStructValidationMapRules(rules map[string]string, typ
// //
// NOTE: this method is not thread-safe it is intended that these all be registered prior to any validation // NOTE: this method is not thread-safe it is intended that these all be registered prior to any validation
func (v *Validate) RegisterCustomTypeFunc(fn CustomTypeFunc, types ...interface{}) { func (v *Validate) RegisterCustomTypeFunc(fn CustomTypeFunc, types ...interface{}) {
if v.customFuncs == nil { if v.customFuncs == nil {
v.customFuncs = make(map[reflect.Type]CustomTypeFunc) v.customFuncs = make(map[reflect.Type]CustomTypeFunc)
} }
@@ -319,6 +340,7 @@ func (v *Validate) RegisterCustomTypeFunc(fn CustomTypeFunc, types ...interface{
// RegisterTranslation registers translations against the provided tag. // RegisterTranslation registers translations against the provided tag.
func (v *Validate) RegisterTranslation(tag string, trans ut.Translator, registerFn RegisterTranslationsFunc, translationFn TranslationFunc) (err error) { func (v *Validate) RegisterTranslation(tag string, trans ut.Translator, registerFn RegisterTranslationsFunc, translationFn TranslationFunc) (err error) {
if v.transTagFunc == nil { if v.transTagFunc == nil {
v.transTagFunc = make(map[ut.Translator]map[string]TranslationFunc) v.transTagFunc = make(map[ut.Translator]map[string]TranslationFunc)
} }
@@ -352,6 +374,7 @@ func (v *Validate) Struct(s interface{}) error {
// It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise. // It returns InvalidValidationError for bad values passed in and nil or ValidationErrors as error otherwise.
// You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors. // You will need to assert the error if it's not nil eg. err.(validator.ValidationErrors) to access the array of errors.
func (v *Validate) StructCtx(ctx context.Context, s interface{}) (err error) { func (v *Validate) StructCtx(ctx context.Context, s interface{}) (err error) {
val := reflect.ValueOf(s) val := reflect.ValueOf(s)
top := val top := val
@@ -468,8 +491,10 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields .
name := typ.Name() name := typ.Name()
for _, k := range fields { for _, k := range fields {
flds := strings.Split(k, namespaceSeparator) flds := strings.Split(k, namespaceSeparator)
if len(flds) > 0 { if len(flds) > 0 {
vd.misc = append(vd.misc[0:0], name...) vd.misc = append(vd.misc[0:0], name...)
// Don't append empty name for unnamed structs // Don't append empty name for unnamed structs
if len(vd.misc) != 0 { if len(vd.misc) != 0 {
@@ -477,6 +502,7 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields .
} }
for _, s := range flds { for _, s := range flds {
idx := strings.Index(s, leftBracket) idx := strings.Index(s, leftBracket)
if idx != -1 { if idx != -1 {
@@ -492,6 +518,7 @@ func (v *Validate) StructPartialCtx(ctx context.Context, s interface{}, fields .
idx = strings.Index(s, leftBracket) idx = strings.Index(s, leftBracket)
} }
} else { } else {
vd.misc = append(vd.misc, s...) vd.misc = append(vd.misc, s...)
vd.includeExclude[string(vd.misc)] = struct{}{} vd.includeExclude[string(vd.misc)] = struct{}{}
} }
@@ -554,6 +581,7 @@ func (v *Validate) StructExceptCtx(ctx context.Context, s interface{}, fields ..
name := typ.Name() name := typ.Name()
for _, key := range fields { for _, key := range fields {
vd.misc = vd.misc[0:0] vd.misc = vd.misc[0:0]
if len(name) > 0 { if len(name) > 0 {
@@ -648,7 +676,7 @@ func (v *Validate) VarWithValue(field interface{}, other interface{}, tag string
} }
// VarWithValueCtx validates a single variable, against another variable/field's value using tag style validation and // VarWithValueCtx validates a single variable, against another variable/field's value using tag style validation and
// allows passing of contextual validation information via context.Context. // allows passing of contextual validation validation information via context.Context.
// eg. // eg.
// s1 := "abcd" // s1 := "abcd"
// s2 := "abcd" // s2 := "abcd"
@@ -680,20 +708,3 @@ func (v *Validate) VarWithValueCtx(ctx context.Context, field interface{}, other
v.pool.Put(vd) v.pool.Put(vd)
return return
} }
func (v *Validate) registerValidation(tag string, fn FuncCtx, bakedIn bool, nilCheckable bool) error {
if len(tag) == 0 {
return errors.New("function Key cannot be empty")
}
if fn == nil {
return errors.New("function cannot be empty")
}
_, ok := restrictedTags[tag]
if !bakedIn && (ok || strings.ContainsAny(tag, restrictedTagChars)) {
panic(fmt.Sprintf(restrictedTagErr, tag))
}
v.validations[tag] = internalValidationFuncWrapper{fn: fn, runValidationOnNil: nilCheckable}
return nil
}

View File

@@ -56,9 +56,6 @@ linters:
- cyclop - cyclop
- containedctx - containedctx
- revive - revive
- nosnakecase
- exhaustruct
- depguard
issues: issues:
exclude-rules: exclude-rules:

View File

@@ -30,7 +30,7 @@ golangci-lint: | $(BIN_DIR)
GOLANGCI_LINT_TMP_DIR=$$(mktemp -d); \ GOLANGCI_LINT_TMP_DIR=$$(mktemp -d); \
cd $$GOLANGCI_LINT_TMP_DIR; \ cd $$GOLANGCI_LINT_TMP_DIR; \
go mod init tmp; \ go mod init tmp; \
GOBIN=$(BIN_DIR) go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.54.2; \ GOBIN=$(BIN_DIR) go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.48.0; \
rm -rf $$GOLANGCI_LINT_TMP_DIR; \ rm -rf $$GOLANGCI_LINT_TMP_DIR; \
} }

View File

@@ -52,7 +52,7 @@ func (e *Encoder) EncodeContext(ctx context.Context, v interface{}, optFuncs ...
rctx.Option.Flag |= encoder.ContextOption rctx.Option.Flag |= encoder.ContextOption
rctx.Option.Context = ctx rctx.Option.Context = ctx
err := e.encodeWithOption(rctx, v, optFuncs...) //nolint: contextcheck err := e.encodeWithOption(rctx, v, optFuncs...)
encoder.ReleaseRuntimeContext(rctx) encoder.ReleaseRuntimeContext(rctx)
return err return err
@@ -120,7 +120,7 @@ func marshalContext(ctx context.Context, v interface{}, optFuncs ...EncodeOption
optFunc(rctx.Option) optFunc(rctx.Option)
} }
buf, err := encode(rctx, v) //nolint: contextcheck buf, err := encode(rctx, v)
if err != nil { if err != nil {
encoder.ReleaseRuntimeContext(rctx) encoder.ReleaseRuntimeContext(rctx)
return nil, err return nil, err

View File

@@ -5,7 +5,6 @@ import (
"fmt" "fmt"
"reflect" "reflect"
"strings" "strings"
"sync"
"sync/atomic" "sync/atomic"
"unicode" "unicode"
"unsafe" "unsafe"
@@ -18,27 +17,22 @@ var (
typeAddr *runtime.TypeAddr typeAddr *runtime.TypeAddr
cachedDecoderMap unsafe.Pointer // map[uintptr]decoder cachedDecoderMap unsafe.Pointer // map[uintptr]decoder
cachedDecoder []Decoder cachedDecoder []Decoder
initOnce sync.Once
) )
func initDecoder() { func init() {
initOnce.Do(func() {
typeAddr = runtime.AnalyzeTypeAddr() typeAddr = runtime.AnalyzeTypeAddr()
if typeAddr == nil { if typeAddr == nil {
typeAddr = &runtime.TypeAddr{} typeAddr = &runtime.TypeAddr{}
} }
cachedDecoder = make([]Decoder, typeAddr.AddrRange>>typeAddr.AddrShift+1) cachedDecoder = make([]Decoder, typeAddr.AddrRange>>typeAddr.AddrShift+1)
})
} }
func loadDecoderMap() map[uintptr]Decoder { func loadDecoderMap() map[uintptr]Decoder {
initDecoder()
p := atomic.LoadPointer(&cachedDecoderMap) p := atomic.LoadPointer(&cachedDecoderMap)
return *(*map[uintptr]Decoder)(unsafe.Pointer(&p)) return *(*map[uintptr]Decoder)(unsafe.Pointer(&p))
} }
func storeDecoder(typ uintptr, dec Decoder, m map[uintptr]Decoder) { func storeDecoder(typ uintptr, dec Decoder, m map[uintptr]Decoder) {
initDecoder()
newDecoderMap := make(map[uintptr]Decoder, len(m)+1) newDecoderMap := make(map[uintptr]Decoder, len(m)+1)
newDecoderMap[typ] = dec newDecoderMap[typ] = dec

View File

@@ -10,7 +10,6 @@ import (
) )
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) { func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
initDecoder()
typeptr := uintptr(unsafe.Pointer(typ)) typeptr := uintptr(unsafe.Pointer(typ))
if typeptr > typeAddr.MaxTypeAddr { if typeptr > typeAddr.MaxTypeAddr {
return compileToGetDecoderSlowPath(typeptr, typ) return compileToGetDecoderSlowPath(typeptr, typ)

View File

@@ -13,7 +13,6 @@ import (
var decMu sync.RWMutex var decMu sync.RWMutex
func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) { func CompileToGetDecoder(typ *runtime.Type) (Decoder, error) {
initDecoder()
typeptr := uintptr(unsafe.Pointer(typ)) typeptr := uintptr(unsafe.Pointer(typ))
if typeptr > typeAddr.MaxTypeAddr { if typeptr > typeAddr.MaxTypeAddr {
return compileToGetDecoderSlowPath(typeptr, typ) return compileToGetDecoderSlowPath(typeptr, typ)

View File

@@ -85,7 +85,6 @@ func (d *ptrDecoder) Decode(ctx *RuntimeContext, cursor, depth int64, p unsafe.P
} }
c, err := d.dec.Decode(ctx, cursor, depth, newptr) c, err := d.dec.Decode(ctx, cursor, depth, newptr)
if err != nil { if err != nil {
*(*unsafe.Pointer)(p) = nil
return 0, err return 0, err
} }
cursor = c cursor = c

View File

@@ -147,7 +147,7 @@ func (d *unmarshalTextDecoder) DecodePath(ctx *RuntimeContext, cursor, depth int
return nil, 0, fmt.Errorf("json: unmarshal text decoder does not support decode path") return nil, 0, fmt.Errorf("json: unmarshal text decoder does not support decode path")
} }
func unquoteBytes(s []byte) (t []byte, ok bool) { //nolint: nonamedreturns func unquoteBytes(s []byte) (t []byte, ok bool) {
length := len(s) length := len(s)
if length < 2 || s[0] != '"' || s[length-1] != '"' { if length < 2 || s[0] != '"' || s[length-1] != '"' {
return return

View File

@@ -213,8 +213,8 @@ func compactString(dst, src []byte, cursor int64, escape bool) ([]byte, int64, e
dst = append(dst, src[start:cursor]...) dst = append(dst, src[start:cursor]...)
dst = append(dst, `\u202`...) dst = append(dst, `\u202`...)
dst = append(dst, hex[src[cursor+2]&0xF]) dst = append(dst, hex[src[cursor+2]&0xF])
start = cursor + 3
cursor += 2 cursor += 2
start = cursor + 3
} }
} }
switch c { switch c {

View File

@@ -5,7 +5,6 @@ import (
"encoding" "encoding"
"encoding/json" "encoding/json"
"reflect" "reflect"
"sync"
"sync/atomic" "sync/atomic"
"unsafe" "unsafe"
@@ -25,17 +24,14 @@ var (
cachedOpcodeSets []*OpcodeSet cachedOpcodeSets []*OpcodeSet
cachedOpcodeMap unsafe.Pointer // map[uintptr]*OpcodeSet cachedOpcodeMap unsafe.Pointer // map[uintptr]*OpcodeSet
typeAddr *runtime.TypeAddr typeAddr *runtime.TypeAddr
initEncoderOnce sync.Once
) )
func initEncoder() { func init() {
initEncoderOnce.Do(func() {
typeAddr = runtime.AnalyzeTypeAddr() typeAddr = runtime.AnalyzeTypeAddr()
if typeAddr == nil { if typeAddr == nil {
typeAddr = &runtime.TypeAddr{} typeAddr = &runtime.TypeAddr{}
} }
cachedOpcodeSets = make([]*OpcodeSet, typeAddr.AddrRange>>typeAddr.AddrShift+1) cachedOpcodeSets = make([]*OpcodeSet, typeAddr.AddrRange>>typeAddr.AddrShift+1)
})
} }
func loadOpcodeMap() map[uintptr]*OpcodeSet { func loadOpcodeMap() map[uintptr]*OpcodeSet {
@@ -484,7 +480,7 @@ func (c *Compiler) mapCode(typ *runtime.Type) (*MapCode, error) {
func (c *Compiler) listElemCode(typ *runtime.Type) (Code, error) { func (c *Compiler) listElemCode(typ *runtime.Type) (Code, error) {
switch { switch {
case c.implementsMarshalJSONType(typ) || c.implementsMarshalJSONType(runtime.PtrTo(typ)): case c.isPtrMarshalJSONType(typ):
return c.marshalJSONCode(typ) return c.marshalJSONCode(typ)
case !typ.Implements(marshalTextType) && runtime.PtrTo(typ).Implements(marshalTextType): case !typ.Implements(marshalTextType) && runtime.PtrTo(typ).Implements(marshalTextType):
return c.marshalTextCode(typ) return c.marshalTextCode(typ)

View File

@@ -4,7 +4,6 @@
package encoder package encoder
func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) { func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) {
initEncoder()
if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr { if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr {
codeSet, err := compileToGetCodeSetSlowPath(typeptr) codeSet, err := compileToGetCodeSetSlowPath(typeptr)
if err != nil { if err != nil {

View File

@@ -10,7 +10,6 @@ import (
var setsMu sync.RWMutex var setsMu sync.RWMutex
func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) { func CompileToGetCodeSet(ctx *RuntimeContext, typeptr uintptr) (*OpcodeSet, error) {
initEncoder()
if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr { if typeptr > typeAddr.MaxTypeAddr || typeptr < typeAddr.BaseTypeAddr {
codeSet, err := compileToGetCodeSetSlowPath(typeptr) codeSet, err := compileToGetCodeSetSlowPath(typeptr)
if err != nil { if err != nil {

View File

@@ -406,11 +406,6 @@ func AppendMarshalJSON(ctx *RuntimeContext, code *Opcode, b []byte, v interface{
rv = newV rv = newV
} }
} }
if rv.Kind() == reflect.Ptr && rv.IsNil() {
return AppendNull(ctx, b), nil
}
v = rv.Interface() v = rv.Interface()
var bb []byte var bb []byte
if (code.Flags & MarshalerContextFlags) != 0 { if (code.Flags & MarshalerContextFlags) != 0 {

View File

@@ -1,27 +1,3 @@
// This files's processing codes are inspired by https://github.com/segmentio/encoding.
// The license notation is as follows.
//
// # MIT License
//
// Copyright (c) 2019 Segment.io, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package encoder package encoder
import ( import (

View File

@@ -1,27 +1,3 @@
// This files's string processing codes are inspired by https://github.com/segmentio/encoding.
// The license notation is as follows.
//
// # MIT License
//
// Copyright (c) 2019 Segment.io, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package encoder package encoder
import ( import (

Some files were not shown because too many files have changed in this diff Show More