处理AI胡乱生成的乱摊子

This commit is contained in:
2025-09-07 20:31:38 +08:00
parent c377a0784d
commit ba513e0827
202 changed files with 11751 additions and 67183 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1152,4 +1152,4 @@ func (self *_Compiler) checkIfSkip(p *_Program, vt reflect.Type, c byte) int {
p.pin(j)
p.int(_OP_add, 1)
return s
}
}

View File

@@ -67,4 +67,4 @@ func (self *_Assembler) debug_instr(i int, v *_Instr) {
}
self.force_gc()
}
}
}

View File

@@ -30,14 +30,14 @@ import (
)
const (
_F_use_int64 = iota
_F_use_number
_F_disable_urc
_F_disable_unknown
_F_copy_string
_F_validate_string
_F_use_int64 = 0
_F_disable_urc = 2
_F_disable_unknown = 3
_F_copy_string = 4
_F_allow_control = 31
_F_use_number = types.B_USE_NUMBER
_F_validate_string = types.B_VALIDATE_STRING
_F_allow_control = types.B_ALLOW_CONTROL
)
type Options uint64
@@ -252,4 +252,4 @@ func Skip(data []byte) (start int, end int) {
ret := native.SkipOne(&s, &p, m, uint64(0))
types.FreeStateMachine(m)
return ret, p
}
}

View File

@@ -44,35 +44,12 @@ func (self SyntaxError) Description() string {
}
func (self SyntaxError) description() string {
i := 16
p := self.Pos - i
q := self.Pos + i
/* check for empty source */
if self.Src == "" {
return fmt.Sprintf("no sources available: %#v", self)
}
/* prevent slicing before the beginning */
if p < 0 {
p, q, i = 0, q - p, i + p
}
/* prevent slicing beyond the end */
if n := len(self.Src); q > n {
n = q - n
q = len(self.Src)
/* move the left bound if possible */
if p > n {
i += n
p -= n
}
}
/* left and right length */
x := clamp_zero(i)
y := clamp_zero(q - p - i - 1)
p, x, q, y := calcBounds(len(self.Src), self.Pos)
/* compose the error description */
return fmt.Sprintf(
@@ -85,6 +62,39 @@ func (self SyntaxError) description() string {
)
}
func calcBounds(size int, pos int) (lbound int, lwidth int, rbound int, rwidth int) {
if pos >= size || pos < 0 {
return 0, 0, size, 0
}
i := 16
lbound = pos - i
rbound = pos + i
/* prevent slicing before the beginning */
if lbound < 0 {
lbound, rbound, i = 0, rbound - lbound, i + lbound
}
/* prevent slicing beyond the end */
if n := size; rbound > n {
n = rbound - n
rbound = size
/* move the left bound if possible */
if lbound > n {
i += n
lbound -= n
}
}
/* left and right length */
lwidth = clamp_zero(i)
rwidth = clamp_zero(rbound - lbound - i - 1)
return
}
func (self SyntaxError) Message() string {
if self.Msg == "" {
return self.Code.Message()
@@ -107,16 +117,19 @@ var stackOverflow = &json.UnsupportedValueError {
Value : reflect.ValueOf("..."),
}
//go:nosplit
func error_wrap(src string, pos int, code types.ParsingError) error {
return SyntaxError {
return *error_wrap_heap(src, pos, code)
}
//go:noinline
func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError {
return &SyntaxError {
Pos : pos,
Src : src,
Code : code,
}
}
//go:nosplit
func error_type(vt *rt.GoType) error {
return &json.UnmarshalTypeError{Type: vt.Pack()}
}
@@ -158,7 +171,6 @@ func (self MismatchTypeError) Description() string {
return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
}
//go:nosplit
func error_mismatch(src string, pos int, vt *rt.GoType) error {
return &MismatchTypeError {
Pos : pos,
@@ -167,12 +179,10 @@ func error_mismatch(src string, pos int, vt *rt.GoType) error {
}
}
//go:nosplit
func error_field(name string) error {
return errors.New("json: unknown field " + strconv.Quote(name))
}
//go:nosplit
func error_value(value string, vtype reflect.Type) error {
return &json.UnmarshalTypeError {
Type : vtype,

View File

@@ -1,776 +0,0 @@
// +build go1.15,!go1.17
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`encoding/json`
`fmt`
`reflect`
`strconv`
`github.com/bytedance/sonic/internal/jit`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
`github.com/twitchyliquid64/golang-asm/obj`
`github.com/twitchyliquid64/golang-asm/obj/x86`
)
/** Crucial Registers:
*
* ST(BX) : ro, decoder stack
* DF(R10) : ro, decoder flags
* EP(R11) : wo, error pointer
* IP(R12) : ro, input pointer
* IL(R13) : ro, input length
* IC(R14) : rw, input cursor
* VP(R15) : ro, value pointer (to an interface{})
*/
const (
_VD_args = 8 // 8 bytes for passing arguments to this functions
_VD_fargs = 64 // 64 bytes for passing arguments to other Go functions
_VD_saves = 40 // 40 bytes for saving the registers before CALL instructions
_VD_locals = 88 // 88 bytes for local variables
)
const (
_VD_offs = _VD_fargs + _VD_saves + _VD_locals
_VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer
)
var (
_VAR_ss = _VAR_ss_Vt
_VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves)
)
var (
_VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8)
_VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16)
_VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24)
_VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32)
_VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40)
_VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48)
)
var (
_VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56)
_VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64)
_VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72)
_VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80)
)
type _ValueDecoder struct {
jit.BaseAssembler
}
func (self *_ValueDecoder) build() uintptr {
self.Init(self.compile)
return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic))
}
/** Function Calling Helpers **/
func (self *_ValueDecoder) save(r ...obj.Addr) {
for i, v := range r {
if i > _VD_saves / 8 - 1 {
panic("too many registers to save")
} else {
self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8))
}
}
}
func (self *_ValueDecoder) load(r ...obj.Addr) {
for i, v := range r {
if i > _VD_saves / 8 - 1 {
panic("too many registers to load")
} else {
self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v)
}
}
}
func (self *_ValueDecoder) call(fn obj.Addr) {
self.Emit("MOVQ", fn, _AX) // MOVQ ${fn}, AX
self.Rjmp("CALL", _AX) // CALL AX
}
func (self *_ValueDecoder) call_go(fn obj.Addr) {
self.save(_REG_go...) // SAVE $REG_go
self.call(fn) // CALL ${fn}
self.load(_REG_go...) // LOAD $REG_go
}
/** Decoder Assembler **/
const (
_S_val = iota + 1
_S_arr
_S_arr_0
_S_obj
_S_obj_0
_S_obj_delim
_S_obj_sep
)
const (
_S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep)
_S_omask_end = (1 << _S_obj_0) | (1 << _S_obj)
_S_vmask = (1 << _S_val) | (1 << _S_arr_0)
)
const (
_A_init_len = 1
_A_init_cap = 16
)
const (
_ST_Sp = 0
_ST_Vt = _PtrBytes
_ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1)
)
var (
_V_true = jit.Imm(int64(pbool(true)))
_V_false = jit.Imm(int64(pbool(false)))
_F_value = jit.Imm(int64(native.S_value))
)
var (
_V_max = jit.Imm(int64(types.V_MAX))
_E_eof = jit.Imm(int64(types.ERR_EOF))
_E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR))
_E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX))
)
var (
_F_convTslice = jit.Func(convTslice)
_F_convTstring = jit.Func(convTstring)
_F_invalid_vtype = jit.Func(invalid_vtype)
)
var (
_T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil)))
_T_bool = jit.Type(reflect.TypeOf(false))
_T_int64 = jit.Type(reflect.TypeOf(int64(0)))
_T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem())
_T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil)))
_T_string = jit.Type(reflect.TypeOf(""))
_T_number = jit.Type(reflect.TypeOf(json.Number("")))
_T_float64 = jit.Type(reflect.TypeOf(float64(0)))
)
var _R_tab = map[int]string {
'[': "_decode_V_ARRAY",
'{': "_decode_V_OBJECT",
':': "_decode_V_KEY_SEP",
',': "_decode_V_ELEM_SEP",
']': "_decode_V_ARRAY_END",
'}': "_decode_V_OBJECT_END",
}
func (self *_ValueDecoder) compile() {
self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP
self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP)
self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP
/* initialize the state machine */
self.Emit("XORL", _CX, _CX) // XORL CX, CX
self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df
/* initialize digital buffer first */
self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf
/* add ST offset */
self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0]
self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0]
self.Sjmp("JMP" , "_next") // JMP _next
/* set the value from previous round */
self.Link("_set_value") // _set_value:
self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error
self.Emit("XORL" , _SI, _SI) // XORL SI, SI
self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI
self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI)
self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI)
/* check for value stack */
self.Link("_next") // _next:
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
self.Sjmp("JS" , "_return") // JS _return
/* fast path: test up to 4 characters manually */
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
/* at least 1 to 3 spaces */
for i := 0; i < 3; i++ {
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
}
/* at least 4 spaces */
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
/* fast path: use lookup table to select decoder */
self.Link("_decode_fast") // _decode_fast:
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
self.Sref("_decode_tab", 4) // .... &_decode_tab
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
self.Sjmp("JZ" , "_decode_native") // JZ _decode_native
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
self.Rjmp("JMP" , _AX) // JMP AX
/* decode with native decoder */
self.Link("_decode_native") // _decode_native:
self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI
self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI
self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX
self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX
self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8
self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8
self.call(_F_value) // CALL value
self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
/* check for errors */
self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
self.Sjmp("JS" , "_parsing_error")
self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype
self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max
self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype
/* jump table selector */
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
self.Sref("_switch_table", 4) // .... &_switch_table
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
self.Rjmp("JMP" , _AX) // JMP AX
/** V_EOF **/
self.Link("_decode_V_EOF") // _decode_V_EOF:
self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP
self.Sjmp("JMP" , "_error") // JMP _error
/** V_NULL **/
self.Link("_decode_V_NULL") // _decode_V_NULL:
self.Emit("XORL", _R8, _R8) // XORL R8, R8
self.Emit("XORL", _R9, _R9) // XORL R9, R9
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_TRUE **/
self.Link("_decode_V_TRUE") // _decode_V_TRUE:
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
// TODO: maybe modified by users?
self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_FALSE **/
self.Link("_decode_V_FALSE") // _decode_V_FALSE:
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9
self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_ARRAY **/
self.Link("_decode_V_ARRAY") // _decode_V_ARRAY
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
/* create a new array */
self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
self.call_go(_F_makeslice) // CALL_GO runtime.makeslice
self.Emit("MOVQ", jit.Ptr(_SP, 24), _DX) // MOVQ 24(SP), DX
/* pack into an interface */
self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
self.Emit("MOVQ", jit.Imm(_A_init_len), jit.Ptr(_SP, 8)) // MOVQ _A_init_len, 8(SP)
self.Emit("MOVQ", jit.Imm(_A_init_cap), jit.Ptr(_SP, 16)) // MOVQ _A_init_cap, 16(SP)
self.call_go(_F_convTslice) // CALL_GO runtime.convTslice
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8) // MOVQ 24(SP), R8
/* replace current state with an array */
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX]
self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI)
self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI)
/* add a new slot for the first element */
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/** V_OBJECT **/
self.Link("_decode_V_OBJECT") // _decode_V_OBJECT:
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small
self.Emit("MOVQ", jit.Ptr(_SP, 0), _AX) // MOVQ (SP), AX
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj, ST.Vt[CX]
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI)
self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI)
self.Sjmp("JMP" , "_next") // JMP _next
/** V_STRING **/
self.Link("_decode_V_STRING") // _decode_V_STRING:
self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX
self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX
/* check for escapes */
self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1
self.Sjmp("JNE" , "_unquote") // JNE _unquote
self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8
self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI
self.Sref("_copy_string_end", 4)
self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df)
self.Sjmp("JC", "copy_string")
self.Link("_copy_string_end")
self.Emit("XORL", _DX, _DX) // XORL DX, DX
/* strings with no escape sequences */
self.Link("_noescape") // _noescape:
self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI
self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI
self.Sjmp("JC" , "_object_key") // JC _object_key
/* check for pre-packed strings, avoid 1 allocation */
self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX
self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str
self.Emit("MOVQ" , _R8, jit.Ptr(_SP, 0)) // MOVQ R8, (SP)
self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 8)) // MOVQ AX, 8(SP)
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
self.Emit("MOVQ" , jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
/* packed string already in R9 */
self.Link("_packed_str") // _packed_str:
self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8
self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI
self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/* the string is an object key, get the map */
self.Link("_object_key")
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
/* add a new delimiter */
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX]
/* add a new slot int the map */
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
self.Emit("MOVQ", _DX, jit.Ptr(_SP, 0)) // MOVQ DX, (SP)
self.Emit("MOVQ", _SI, jit.Ptr(_SP, 8)) // MOVQ SI, 8(SP)
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 16)) // MOVQ R9, 16(SP)
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 24)) // MOVQ AX, 24(SP)
self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr
self.Emit("MOVQ", jit.Ptr(_SP, 32), _AX) // MOVQ 32(SP), AX
/* add to the pointer stack */
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/* allocate memory to store the string header and unquoted result */
self.Link("_unquote") // _unquote:
self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX
self.Emit("MOVQ", _T_byte, _CX) // MOVQ _T_byte, CX
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
self.Emit("MOVB", jit.Imm(0), jit.Ptr(_SP, 16)) // MOVB $0, 16(SP)
self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R9) // MOVQ 24(SP), R9
/* prepare the unquoting parameters */
self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI
self.Emit("NEGQ" , _CX) // NEGQ CX
self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX
self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX
self.Emit("XORL" , _R8, _R8) // XORL R8, R8
self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv
self.Emit("SETCC", _R8) // SETCC R8
self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
/* unquote the string, with R9 been preserved */
self.save(_R9) // SAVE R9
self.call(_F_unquote) // CALL unquote
self.load(_R9) // LOAD R9
/* check for errors */
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
self.Sjmp("JS" , "_unquote_error") // JS _unquote_error
self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8
self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9)
self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9)
self.Sjmp("JMP" , "_noescape") // JMP _noescape
/** V_DOUBLE **/
self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE:
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
self.Sjmp("JC" , "_use_number") // JC _use_number
self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
self.Sjmp("JMP" , "_use_float64") // JMP _use_float64
/** V_INTEGER **/
self.Link("_decode_V_INTEGER") // _decode_V_INTEGER:
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
self.Sjmp("JC" , "_use_number") // JC _use_number
self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df
self.Sjmp("JC" , "_use_int64") // JC _use_int64
self.Emit("MOVQ" , _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
self.Emit("CVTSQ2SD", _AX, _X0) // CVTSQ2SD AX, X0
/* represent numbers as `float64` */
self.Link("_use_float64") // _use_float64:
self.Emit("MOVSD", _X0, jit.Ptr(_SP, 0)) // MOVSD X0, (SP)
self.call_go(_F_convT64) // CALL_GO runtime.convT64
self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8
self.Emit("MOVQ" , jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/* represent numbers as `json.Number` */
self.Link("_use_number") // _use_number
self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX
self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI
self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX
self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX
self.Emit("MOVQ", _SI, jit.Ptr(_SP, 0)) // MOVQ SI, (SP)
self.Emit("MOVQ", _CX, jit.Ptr(_SP, 8)) // MOVQ CX, 8(SP)
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8
self.Emit("MOVQ", jit.Ptr(_SP, 16), _R9) // MOVQ 16(SP), R9
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/* represent numbers as `int64` */
self.Link("_use_int64") // _use_int64:
self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
self.call_go(_F_convT64) // CALL_GO runtime.convT64
self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8
self.Emit("MOVQ", jit.Ptr(_SP, 8), _R9) // MOVQ 8(SP), R9
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_KEY_SEP **/
self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP:
// self.Byte(0xcc)
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX]
self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1]
self.Sjmp("JMP" , "_next") // JMP _next
/** V_ELEM_SEP **/
self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP:
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("CMPQ" , _AX, jit.Imm(_S_arr)) // CMPQ _AX, _S_arr
self.Sjmp("JE" , "_array_sep") // JZ _next
self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt))
self.Sjmp("JMP" , "_next") // JMP _next
/* arrays */
self.Link("_array_sep")
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX
self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI)
self.Sjmp("JAE" , "_array_more") // JAE _array_more
/* add a slot for the new element */
self.Link("_array_append") // _array_append:
self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI)
self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
self.Sjmp("JAE" , "_stack_overflow")
self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX
self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX]
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX}
self.Sjmp("JMP" , "_next") // JMP _next
/** V_ARRAY_END **/
self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END:
self.Emit("XORL", _DX, _DX) // XORL DX, DX
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0
self.Sjmp("JE" , "_first_item") // JE _first_item
self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/* first element of an array */
self.Link("_first_item") // _first_item:
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1]
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
self.Sjmp("JMP" , "_next") // JMP _next
/** V_OBJECT_END **/
self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END:
self.Emit("MOVL", jit.Imm(_S_omask_end), _DX) // MOVL _S_omask, DI
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DX)
self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char
self.Emit("XORL", _AX, _AX) // XORL AX, AX
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/* return from decoder */
self.Link("_return") // _return:
self.Emit("XORL", _EP, _EP) // XORL EP, EP
self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0]
self.Link("_epilogue") // _epilogue:
self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST
self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP
self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP
self.Emit("RET") // RET
/* array expand */
self.Link("_array_more") // _array_more:
self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX
self.Emit("MOVOU", jit.Ptr(_SI, 0), _X0) // MOVOU (SI), X0
self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DX) // MOVQ 16(SI), DX
self.Emit("MOVQ" , _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
self.Emit("MOVOU", _X0, jit.Ptr(_SP, 8)) // MOVOU X0, 8(SP)
self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 24)) // MOVQ DX, 24(SP)
self.Emit("SHLQ" , jit.Imm(1), _DX) // SHLQ $1, DX
self.Emit("MOVQ" , _DX, jit.Ptr(_SP, 32)) // MOVQ DX, 32(SP)
self.call_go(_F_growslice) // CALL_GO runtime.growslice
self.Emit("MOVQ" , jit.Ptr(_SP, 40), _DI) // MOVOU 40(SP), DI
self.Emit("MOVQ" , jit.Ptr(_SP, 48), _DX) // MOVOU 48(SP), DX
self.Emit("MOVQ" , jit.Ptr(_SP, 56), _AX) // MOVQ 56(SP), AX
/* update the slice */
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX)
self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI)
self.Sjmp("JMP" , "_array_append") // JMP _array_append
/* copy string */
self.Link("copy_string") // pointer: R8, length: AX, return addr: DI
// self.Byte(0xcc)
self.Emit("MOVQ", _R8, _VAR_cs_p)
self.Emit("MOVQ", _AX, _VAR_cs_n)
self.Emit("MOVQ", _DI, _VAR_cs_LR)
self.Emit("MOVQ", _T_byte, jit.Ptr(_SP, 0))
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 8))
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
self.call_go(_F_makeslice)
self.Emit("MOVQ", jit.Ptr(_SP, 24), _R8)
self.Emit("MOVQ", _R8, _VAR_cs_d)
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 0))
self.Emit("MOVQ", _VAR_cs_p, _R8)
self.Emit("MOVQ", _R8, jit.Ptr(_SP, 8))
self.Emit("MOVQ", _VAR_cs_n, _AX)
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 16))
self.call_go(_F_memmove)
self.Emit("MOVQ", _VAR_cs_d, _R8)
self.Emit("MOVQ", _VAR_cs_n, _AX)
self.Emit("MOVQ", _VAR_cs_LR, _DI)
// self.Byte(0xcc)
self.Rjmp("JMP", _DI)
/* error handlers */
self.Link("_stack_overflow")
self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP
self.Sjmp("JMP" , "_error") // JMP _error
self.Link("_vtype_error") // _vtype_error:
self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
self.Sjmp("JMP" , "_error") // JMP _error
self.Link("_invalid_char") // _invalid_char:
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
self.Sjmp("JMP" , "_error") // JMP _error
self.Link("_unquote_error") // _unquote_error:
self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
self.Link("_parsing_error") // _parsing_error:
self.Emit("NEGQ" , _AX) // NEGQ AX
self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
self.Link("_error") // _error:
self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
self.Sjmp("JMP" , "_epilogue") // JMP _epilogue
/* invalid value type, never returns */
self.Link("_invalid_vtype")
self.Emit("MOVQ", _AX, jit.Ptr(_SP, 0)) // MOVQ AX, (SP)
self.call(_F_invalid_vtype) // CALL invalid_type
self.Emit("UD2") // UD2
/* switch jump table */
self.Link("_switch_table") // _switch_table:
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4
self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8
self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12
self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16
self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20
self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24
self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28
self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32
self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36
self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40
self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44
self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48
/* fast character lookup table */
self.Link("_decode_tab") // _decode_tab:
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
/* generate rest of the tabs */
for i := 1; i < 256; i++ {
if to, ok := _R_tab[i]; ok {
self.Sref(to, -int64(i) * 4)
} else {
self.Byte(0x00, 0x00, 0x00, 0x00)
}
}
}
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
self.Emit("MOVQ", _V_writeBarrier, _R10)
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
if saveDI {
self.save(_DI)
}
self.Emit("LEAQ", rec, _DI)
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
self.Rjmp("CALL", _R10)
if saveDI {
self.load(_DI)
}
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Emit("MOVQ", _AX, rec)
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
}
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
panic("rec contains AX!")
}
self.Emit("MOVQ", _V_writeBarrier, _R10)
self.Emit("CMPL", jit.Ptr(_R10, 0), jit.Imm(0))
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Emit("MOVQ", ptr, _AX)
if saveDI {
self.save(_DI)
}
self.Emit("LEAQ", rec, _DI)
self.Emit("MOVQ", _F_gcWriteBarrierAX, _R10) // MOVQ ${fn}, AX
self.Rjmp("CALL", _R10)
if saveDI {
self.load(_DI)
}
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Emit("MOVQ", ptr, rec)
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
}
/** Generic Decoder **/
var (
_subr_decode_value = new(_ValueDecoder).build()
)
//go:nosplit
func invalid_vtype(vt types.ValueType) {
throw(fmt.Sprintf("invalid value type: %d", vt))
}

View File

@@ -1,772 +0,0 @@
//go:build go1.17 && !go1.21
// +build go1.17,!go1.21
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`encoding/json`
`fmt`
`reflect`
`strconv`
`github.com/bytedance/sonic/internal/jit`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
`github.com/twitchyliquid64/golang-asm/obj`
`github.com/twitchyliquid64/golang-asm/obj/x86`
)
/** Crucial Registers:
*
* ST(R13) && 0(SP) : ro, decoder stack
* DF(AX) : ro, decoder flags
* EP(BX) : wo, error pointer
* IP(R10) : ro, input pointer
* IL(R12) : ro, input length
* IC(R11) : rw, input cursor
* VP(R15) : ro, value pointer (to an interface{})
*/
const (
_VD_args = 8 // 8 bytes for passing arguments to this functions
_VD_fargs = 64 // 64 bytes for passing arguments to other Go functions
_VD_saves = 48 // 48 bytes for saving the registers before CALL instructions
_VD_locals = 96 // 96 bytes for local variables
)
const (
_VD_offs = _VD_fargs + _VD_saves + _VD_locals
_VD_size = _VD_offs + 8 // 8 bytes for the parent frame pointer
)
var (
_VAR_ss = _VAR_ss_Vt
_VAR_df = jit.Ptr(_SP, _VD_fargs + _VD_saves)
)
var (
_VAR_ss_Vt = jit.Ptr(_SP, _VD_fargs + _VD_saves + 8)
_VAR_ss_Dv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 16)
_VAR_ss_Iv = jit.Ptr(_SP, _VD_fargs + _VD_saves + 24)
_VAR_ss_Ep = jit.Ptr(_SP, _VD_fargs + _VD_saves + 32)
_VAR_ss_Db = jit.Ptr(_SP, _VD_fargs + _VD_saves + 40)
_VAR_ss_Dc = jit.Ptr(_SP, _VD_fargs + _VD_saves + 48)
)
var (
_VAR_R9 = jit.Ptr(_SP, _VD_fargs + _VD_saves + 56)
)
type _ValueDecoder struct {
jit.BaseAssembler
}
var (
_VAR_cs_LR = jit.Ptr(_SP, _VD_fargs + _VD_saves + 64)
_VAR_cs_p = jit.Ptr(_SP, _VD_fargs + _VD_saves + 72)
_VAR_cs_n = jit.Ptr(_SP, _VD_fargs + _VD_saves + 80)
_VAR_cs_d = jit.Ptr(_SP, _VD_fargs + _VD_saves + 88)
)
func (self *_ValueDecoder) build() uintptr {
self.Init(self.compile)
return *(*uintptr)(self.Load("decode_value", _VD_size, _VD_args, argPtrs_generic, localPtrs_generic))
}
/** Function Calling Helpers **/
func (self *_ValueDecoder) save(r ...obj.Addr) {
for i, v := range r {
if i > _VD_saves / 8 - 1 {
panic("too many registers to save")
} else {
self.Emit("MOVQ", v, jit.Ptr(_SP, _VD_fargs + int64(i) * 8))
}
}
}
func (self *_ValueDecoder) load(r ...obj.Addr) {
for i, v := range r {
if i > _VD_saves / 8 - 1 {
panic("too many registers to load")
} else {
self.Emit("MOVQ", jit.Ptr(_SP, _VD_fargs + int64(i) * 8), v)
}
}
}
func (self *_ValueDecoder) call(fn obj.Addr) {
self.Emit("MOVQ", fn, _R9) // MOVQ ${fn}, AX
self.Rjmp("CALL", _R9) // CALL AX
}
func (self *_ValueDecoder) call_go(fn obj.Addr) {
self.save(_REG_go...) // SAVE $REG_go
self.call(fn) // CALL ${fn}
self.load(_REG_go...) // LOAD $REG_go
}
func (self *_ValueDecoder) callc(fn obj.Addr) {
self.Emit("XCHGQ", _IP, _BP)
self.call(fn)
self.Emit("XCHGQ", _IP, _BP)
}
func (self *_ValueDecoder) call_c(fn obj.Addr) {
self.Emit("XCHGQ", _IC, _BX)
self.callc(fn)
self.Emit("XCHGQ", _IC, _BX)
}
/** Decoder Assembler **/
const (
_S_val = iota + 1
_S_arr
_S_arr_0
_S_obj
_S_obj_0
_S_obj_delim
_S_obj_sep
)
const (
_S_omask_key = (1 << _S_obj_0) | (1 << _S_obj_sep)
_S_omask_end = (1 << _S_obj_0) | (1 << _S_obj)
_S_vmask = (1 << _S_val) | (1 << _S_arr_0)
)
const (
_A_init_len = 1
_A_init_cap = 16
)
const (
_ST_Sp = 0
_ST_Vt = _PtrBytes
_ST_Vp = _PtrBytes * (types.MAX_RECURSE + 1)
)
var (
_V_true = jit.Imm(int64(pbool(true)))
_V_false = jit.Imm(int64(pbool(false)))
_F_value = jit.Imm(int64(native.S_value))
)
var (
_V_max = jit.Imm(int64(types.V_MAX))
_E_eof = jit.Imm(int64(types.ERR_EOF))
_E_invalid = jit.Imm(int64(types.ERR_INVALID_CHAR))
_E_recurse = jit.Imm(int64(types.ERR_RECURSE_EXCEED_MAX))
)
var (
_F_convTslice = jit.Func(convTslice)
_F_convTstring = jit.Func(convTstring)
_F_invalid_vtype = jit.Func(invalid_vtype)
)
var (
_T_map = jit.Type(reflect.TypeOf((map[string]interface{})(nil)))
_T_bool = jit.Type(reflect.TypeOf(false))
_T_int64 = jit.Type(reflect.TypeOf(int64(0)))
_T_eface = jit.Type(reflect.TypeOf((*interface{})(nil)).Elem())
_T_slice = jit.Type(reflect.TypeOf(([]interface{})(nil)))
_T_string = jit.Type(reflect.TypeOf(""))
_T_number = jit.Type(reflect.TypeOf(json.Number("")))
_T_float64 = jit.Type(reflect.TypeOf(float64(0)))
)
var _R_tab = map[int]string {
'[': "_decode_V_ARRAY",
'{': "_decode_V_OBJECT",
':': "_decode_V_KEY_SEP",
',': "_decode_V_ELEM_SEP",
']': "_decode_V_ARRAY_END",
'}': "_decode_V_OBJECT_END",
}
func (self *_ValueDecoder) compile() {
self.Emit("SUBQ", jit.Imm(_VD_size), _SP) // SUBQ $_VD_size, SP
self.Emit("MOVQ", _BP, jit.Ptr(_SP, _VD_offs)) // MOVQ BP, _VD_offs(SP)
self.Emit("LEAQ", jit.Ptr(_SP, _VD_offs), _BP) // LEAQ _VD_offs(SP), BP
/* initialize the state machine */
self.Emit("XORL", _CX, _CX) // XORL CX, CX
self.Emit("MOVQ", _DF, _VAR_df) // MOVQ DF, df
/* initialize digital buffer first */
self.Emit("MOVQ", jit.Imm(_MaxDigitNums), _VAR_ss_Dc) // MOVQ $_MaxDigitNums, ss.Dcap
self.Emit("LEAQ", jit.Ptr(_ST, _DbufOffset), _AX) // LEAQ _DbufOffset(ST), AX
self.Emit("MOVQ", _AX, _VAR_ss_Db) // MOVQ AX, ss.Dbuf
/* add ST offset */
self.Emit("ADDQ", jit.Imm(_FsmOffset), _ST) // ADDQ _FsmOffset, _ST
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.WriteRecNotAX(0, _VP, jit.Ptr(_ST, _ST_Vp), false) // MOVQ VP, ST.Vp[0]
self.Emit("MOVQ", jit.Imm(_S_val), jit.Ptr(_ST, _ST_Vt)) // MOVQ _S_val, ST.Vt[0]
self.Sjmp("JMP" , "_next") // JMP _next
/* set the value from previous round */
self.Link("_set_value") // _set_value:
self.Emit("MOVL" , jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
self.Sjmp("JNC" , "_vtype_error") // JNC _vtype_error
self.Emit("XORL" , _SI, _SI) // XORL SI, SI
self.Emit("SUBQ" , jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
self.Emit("XCHGQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // XCHGQ ST.Vp[CX], SI
self.Emit("MOVQ" , _R8, jit.Ptr(_SI, 0)) // MOVQ R8, (SI)
self.WriteRecNotAX(1, _R9, jit.Ptr(_SI, 8), false) // MOVQ R9, 8(SI)
/* check for value stack */
self.Link("_next") // _next:
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _AX) // MOVQ ST.Sp, AX
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
self.Sjmp("JS" , "_return") // JS _return
/* fast path: test up to 4 characters manually */
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
self.Emit("MOVQ" , jit.Imm(_BM_space), _DX) // MOVQ _BM_space, DX
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
/* at least 1 to 3 spaces */
for i := 0; i < 3; i++ {
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
self.Emit("CMPQ" , _AX, jit.Imm(' ')) // CMPQ AX, $' '
self.Sjmp("JA" , "_decode_fast") // JA _decode_fast
self.Emit("BTQ" , _AX, _DX) // BTQ _AX, _DX
self.Sjmp("JNC" , "_decode_fast") // JNC _decode_fast
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
}
/* at least 4 spaces */
self.Emit("CMPQ" , _IC, _IL) // CMPQ IC, IL
self.Sjmp("JAE" , "_decode_V_EOF") // JAE _decode_V_EOF
self.Emit("MOVBQZX", jit.Sib(_IP, _IC, 1, 0), _AX) // MOVBQZX (IP)(IC), AX
/* fast path: use lookup table to select decoder */
self.Link("_decode_fast") // _decode_fast:
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
self.Sref("_decode_tab", 4) // .... &_decode_tab
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, 0), _AX) // MOVLQSX (DI)(AX*4), AX
self.Emit("TESTQ" , _AX, _AX) // TESTQ AX, AX
self.Sjmp("JZ" , "_decode_native") // JZ _decode_native
self.Emit("ADDQ" , jit.Imm(1), _IC) // ADDQ $1, IC
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
self.Rjmp("JMP" , _AX) // JMP AX
/* decode with native decoder */
self.Link("_decode_native") // _decode_native:
self.Emit("MOVQ", _IP, _DI) // MOVQ IP, DI
self.Emit("MOVQ", _IL, _SI) // MOVQ IL, SI
self.Emit("MOVQ", _IC, _DX) // MOVQ IC, DX
self.Emit("LEAQ", _VAR_ss, _CX) // LEAQ ss, CX
self.Emit("MOVQ", _VAR_df, _R8) // MOVQ $df, R8
self.Emit("BTSQ", jit.Imm(_F_allow_control), _R8) // ANDQ $1<<_F_allow_control, R8
self.callc(_F_value) // CALL value
self.Emit("MOVQ", _AX, _IC) // MOVQ AX, IC
/* check for errors */
self.Emit("MOVQ" , _VAR_ss_Vt, _AX) // MOVQ ss.Vt, AX
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
self.Sjmp("JS" , "_parsing_error")
self.Sjmp("JZ" , "_invalid_vtype") // JZ _invalid_vtype
self.Emit("CMPQ" , _AX, _V_max) // CMPQ AX, _V_max
self.Sjmp("JA" , "_invalid_vtype") // JA _invalid_vtype
/* jump table selector */
self.Byte(0x48, 0x8d, 0x3d) // LEAQ ?(PC), DI
self.Sref("_switch_table", 4) // .... &_switch_table
self.Emit("MOVLQSX", jit.Sib(_DI, _AX, 4, -4), _AX) // MOVLQSX -4(DI)(AX*4), AX
self.Emit("ADDQ" , _DI, _AX) // ADDQ DI, AX
self.Rjmp("JMP" , _AX) // JMP AX
/** V_EOF **/
self.Link("_decode_V_EOF") // _decode_V_EOF:
self.Emit("MOVL", _E_eof, _EP) // MOVL _E_eof, EP
self.Sjmp("JMP" , "_error") // JMP _error
/** V_NULL **/
self.Link("_decode_V_NULL") // _decode_V_NULL:
self.Emit("XORL", _R8, _R8) // XORL R8, R8
self.Emit("XORL", _R9, _R9) // XORL R9, R9
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_TRUE **/
self.Link("_decode_V_TRUE") // _decode_V_TRUE:
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
// TODO: maybe modified by users?
self.Emit("MOVQ", _V_true, _R9) // MOVQ _V_true, R9
self.Emit("LEAQ", jit.Ptr(_IC, -4), _DI) // LEAQ -4(IC), DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_FALSE **/
self.Link("_decode_V_FALSE") // _decode_V_FALSE:
self.Emit("MOVQ", _T_bool, _R8) // MOVQ _T_bool, R8
self.Emit("MOVQ", _V_false, _R9) // MOVQ _V_false, R9
self.Emit("LEAQ", jit.Ptr(_IC, -5), _DI) // LEAQ -5(IC), DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_ARRAY **/
self.Link("_decode_V_ARRAY") // _decode_V_ARRAY
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
/* create a new array */
self.Emit("MOVQ", _T_eface, _AX) // MOVQ _T_eface, AX
self.Emit("MOVQ", jit.Imm(_A_init_len), _BX) // MOVQ _A_init_len, BX
self.Emit("MOVQ", jit.Imm(_A_init_cap), _CX) // MOVQ _A_init_cap, CX
self.call_go(_F_makeslice) // CALL_GO runtime.makeslice
/* pack into an interface */
self.Emit("MOVQ", jit.Imm(_A_init_len), _BX) // MOVQ _A_init_len, BX
self.Emit("MOVQ", jit.Imm(_A_init_cap), _CX) // MOVQ _A_init_cap, CX
self.call_go(_F_convTslice) // CALL_GO runtime.convTslice
self.Emit("MOVQ", _AX, _R8) // MOVQ AX, R8
/* replace current state with an array */
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Imm(_S_arr), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr, ST.Vt[CX]
self.Emit("MOVQ", _T_slice, _AX) // MOVQ _T_slice, AX
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 0)) // MOVQ AX, (SI)
self.WriteRecNotAX(2, _R8, jit.Ptr(_SI, 8), false) // MOVQ R8, 8(SI)
/* add a new slot for the first element */
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
self.Emit("MOVQ", jit.Ptr(_R8, 0), _AX) // MOVQ (R8), AX
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.WritePtrAX(3, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
self.Emit("MOVQ", jit.Imm(_S_arr_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_arr_0, ST.Vt[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/** V_OBJECT **/
self.Link("_decode_V_OBJECT") // _decode_V_OBJECT:
self.Emit("MOVL", jit.Imm(_S_vmask), _DX) // MOVL _S_vmask, DX
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DX) // BTQ AX, DX
self.Sjmp("JNC" , "_invalid_char") // JNC _invalid_char
self.call_go(_F_makemap_small) // CALL_GO runtime.makemap_small
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Imm(_S_obj_0), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_0, ST.Vt[CX]
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", _T_map, _DX) // MOVQ _T_map, DX
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 0)) // MOVQ DX, (SI)
self.WritePtrAX(4, jit.Ptr(_SI, 8), false) // MOVQ AX, 8(SI)
self.Sjmp("JMP" , "_next") // JMP _next
/** V_STRING **/
self.Link("_decode_V_STRING") // _decode_V_STRING:
self.Emit("MOVQ", _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
self.Emit("MOVQ", _IC, _AX) // MOVQ IC, AX
self.Emit("SUBQ", _CX, _AX) // SUBQ CX, AX
/* check for escapes */
self.Emit("CMPQ", _VAR_ss_Ep, jit.Imm(-1)) // CMPQ ss.Ep, $-1
self.Sjmp("JNE" , "_unquote") // JNE _unquote
self.Emit("SUBQ", jit.Imm(1), _AX) // SUBQ $1, AX
self.Emit("LEAQ", jit.Sib(_IP, _CX, 1, 0), _R8) // LEAQ (IP)(CX), R8
self.Byte(0x48, 0x8d, 0x3d) // LEAQ (PC), DI
self.Sref("_copy_string_end", 4)
self.Emit("BTQ", jit.Imm(_F_copy_string), _VAR_df)
self.Sjmp("JC", "copy_string")
self.Link("_copy_string_end")
self.Emit("XORL", _DX, _DX)
/* strings with no escape sequences */
self.Link("_noescape") // _noescape:
self.Emit("MOVL", jit.Imm(_S_omask_key), _DI) // MOVL _S_omask, DI
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _SI) // MOVQ ST.Vt[CX], SI
self.Emit("BTQ" , _SI, _DI) // BTQ SI, DI
self.Sjmp("JC" , "_object_key") // JC _object_key
/* check for pre-packed strings, avoid 1 allocation */
self.Emit("TESTQ", _DX, _DX) // TESTQ DX, DX
self.Sjmp("JNZ" , "_packed_str") // JNZ _packed_str
self.Emit("MOVQ" , _AX, _BX) // MOVQ AX, BX
self.Emit("MOVQ" , _R8, _AX) // MOVQ R8, AX
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9
/* packed string already in R9 */
self.Link("_packed_str") // _packed_str:
self.Emit("MOVQ", _T_string, _R8) // MOVQ _T_string, R8
self.Emit("MOVQ", _VAR_ss_Iv, _DI) // MOVQ ss.Iv, DI
self.Emit("SUBQ", jit.Imm(1), _DI) // SUBQ $1, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/* the string is an object key, get the map */
self.Link("_object_key")
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
/* add a new delimiter */
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.Emit("MOVQ", jit.Imm(_S_obj_delim), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_obj_delim, ST.Vt[CX]
/* add a new slot int the map */
self.Emit("MOVQ", _AX, _DI) // MOVQ AX, DI
self.Emit("MOVQ", _T_map, _AX) // MOVQ _T_map, AX
self.Emit("MOVQ", _SI, _BX) // MOVQ SI, BX
self.Emit("MOVQ", _R8, _CX) // MOVQ R9, CX
self.call_go(_F_mapassign_faststr) // CALL_GO runtime.mapassign_faststr
/* add to the pointer stack */
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.WritePtrAX(6, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ AX, ST.Vp[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/* allocate memory to store the string header and unquoted result */
self.Link("_unquote") // _unquote:
self.Emit("ADDQ", jit.Imm(15), _AX) // ADDQ $15, AX
self.Emit("MOVQ", _T_byte, _BX) // MOVQ _T_byte, BX
self.Emit("MOVB", jit.Imm(0), _CX) // MOVB $0, CX
self.call_go(_F_mallocgc) // CALL_GO runtime.mallocgc
self.Emit("MOVQ", _AX, _R9) // MOVQ AX, R9
/* prepare the unquoting parameters */
self.Emit("MOVQ" , _VAR_ss_Iv, _CX) // MOVQ ss.Iv, CX
self.Emit("LEAQ" , jit.Sib(_IP, _CX, 1, 0), _DI) // LEAQ (IP)(CX), DI
self.Emit("NEGQ" , _CX) // NEGQ CX
self.Emit("LEAQ" , jit.Sib(_IC, _CX, 1, -1), _SI) // LEAQ -1(IC)(CX), SI
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _DX) // LEAQ 16(R8), DX
self.Emit("LEAQ" , _VAR_ss_Ep, _CX) // LEAQ ss.Ep, CX
self.Emit("XORL" , _R8, _R8) // XORL R8, R8
self.Emit("BTQ" , jit.Imm(_F_disable_urc), _VAR_df) // BTQ ${_F_disable_urc}, fv
self.Emit("SETCC", _R8) // SETCC R8
self.Emit("SHLQ" , jit.Imm(types.B_UNICODE_REPLACE), _R8) // SHLQ ${types.B_UNICODE_REPLACE}, R8
/* unquote the string, with R9 been preserved */
self.Emit("MOVQ", _R9, _VAR_R9) // SAVE R9
self.call_c(_F_unquote) // CALL unquote
self.Emit("MOVQ", _VAR_R9, _R9) // LOAD R9
/* check for errors */
self.Emit("TESTQ", _AX, _AX) // TESTQ AX, AX
self.Sjmp("JS" , "_unquote_error") // JS _unquote_error
self.Emit("MOVL" , jit.Imm(1), _DX) // MOVL $1, DX
self.Emit("LEAQ" , jit.Ptr(_R9, 16), _R8) // ADDQ $16, R8
self.Emit("MOVQ" , _R8, jit.Ptr(_R9, 0)) // MOVQ R8, (R9)
self.Emit("MOVQ" , _AX, jit.Ptr(_R9, 8)) // MOVQ AX, 8(R9)
self.Sjmp("JMP" , "_noescape") // JMP _noescape
/** V_DOUBLE **/
self.Link("_decode_V_DOUBLE") // _decode_V_DOUBLE:
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
self.Sjmp("JC" , "_use_number") // JC _use_number
self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
self.Sjmp("JMP" , "_use_float64") // JMP _use_float64
/** V_INTEGER **/
self.Link("_decode_V_INTEGER") // _decode_V_INTEGER:
self.Emit("BTQ" , jit.Imm(_F_use_number), _VAR_df) // BTQ _F_use_number, df
self.Sjmp("JC" , "_use_number") // JC _use_number
self.Emit("BTQ" , jit.Imm(_F_use_int64), _VAR_df) // BTQ _F_use_int64, df
self.Sjmp("JC" , "_use_int64") // JC _use_int64
//TODO: use ss.Dv directly
self.Emit("MOVSD", _VAR_ss_Dv, _X0) // MOVSD ss.Dv, X0
/* represent numbers as `float64` */
self.Link("_use_float64") // _use_float64:
self.Emit("MOVQ" , _X0, _AX) // MOVQ X0, AX
self.call_go(_F_convT64) // CALL_GO runtime.convT64
self.Emit("MOVQ" , _T_float64, _R8) // MOVQ _T_float64, R8
self.Emit("MOVQ" , _AX, _R9) // MOVQ AX, R9
self.Emit("MOVQ" , _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/* represent numbers as `json.Number` */
self.Link("_use_number") // _use_number
self.Emit("MOVQ", _VAR_ss_Ep, _AX) // MOVQ ss.Ep, AX
self.Emit("LEAQ", jit.Sib(_IP, _AX, 1, 0), _SI) // LEAQ (IP)(AX), SI
self.Emit("MOVQ", _IC, _CX) // MOVQ IC, CX
self.Emit("SUBQ", _AX, _CX) // SUBQ AX, CX
self.Emit("MOVQ", _SI, _AX) // MOVQ SI, AX
self.Emit("MOVQ", _CX, _BX) // MOVQ CX, BX
self.call_go(_F_convTstring) // CALL_GO runtime.convTstring
self.Emit("MOVQ", _T_number, _R8) // MOVQ _T_number, R8
self.Emit("MOVQ", _AX, _R9) // MOVQ AX, R9
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/* represent numbers as `int64` */
self.Link("_use_int64") // _use_int64:
self.Emit("MOVQ", _VAR_ss_Iv, _AX) // MOVQ ss.Iv, AX
self.call_go(_F_convT64) // CALL_GO runtime.convT64
self.Emit("MOVQ", _T_int64, _R8) // MOVQ _T_int64, R8
self.Emit("MOVQ", _AX, _R9) // MOVQ AX, R9
self.Emit("MOVQ", _VAR_ss_Ep, _DI) // MOVQ ss.Ep, DI
self.Sjmp("JMP" , "_set_value") // JMP _set_value
/** V_KEY_SEP **/
self.Link("_decode_V_KEY_SEP") // _decode_V_KEY_SEP:
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("CMPQ", _AX, jit.Imm(_S_obj_delim)) // CMPQ AX, _S_obj_delim
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX]
self.Emit("MOVQ", jit.Imm(_S_obj), jit.Sib(_ST, _CX, 8, _ST_Vt - 8)) // MOVQ _S_obj, ST.Vt[CX - 1]
self.Sjmp("JMP" , "_next") // JMP _next
/** V_ELEM_SEP **/
self.Link("_decode_V_ELEM_SEP") // _decode_V_ELEM_SEP:
self.Emit("MOVQ" , jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ" , jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("CMPQ" , _AX, jit.Imm(_S_arr))
self.Sjmp("JE" , "_array_sep") // JZ _next
self.Emit("CMPQ" , _AX, jit.Imm(_S_obj)) // CMPQ _AX, _S_arr
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
self.Emit("MOVQ" , jit.Imm(_S_obj_sep), jit.Sib(_ST, _CX, 8, _ST_Vt))
self.Sjmp("JMP" , "_next") // JMP _next
/* arrays */
self.Link("_array_sep")
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _DX) // MOVQ 8(SI), DX
self.Emit("CMPQ", _DX, jit.Ptr(_SI, 16)) // CMPQ DX, 16(SI)
self.Sjmp("JAE" , "_array_more") // JAE _array_more
/* add a slot for the new element */
self.Link("_array_append") // _array_append:
self.Emit("ADDQ", jit.Imm(1), jit.Ptr(_SI, 8)) // ADDQ $1, 8(SI)
self.Emit("MOVQ", jit.Ptr(_SI, 0), _SI) // MOVQ (SI), SI
self.Emit("ADDQ", jit.Imm(1), _CX) // ADDQ $1, CX
self.Emit("CMPQ", _CX, jit.Imm(types.MAX_RECURSE)) // CMPQ CX, ${types.MAX_RECURSE}
self.Sjmp("JAE" , "_stack_overflow") // JA _stack_overflow
self.Emit("SHLQ", jit.Imm(1), _DX) // SHLQ $1, DX
self.Emit("LEAQ", jit.Sib(_SI, _DX, 8, 0), _SI) // LEAQ (SI)(DX*8), SI
self.Emit("MOVQ", _CX, jit.Ptr(_ST, _ST_Sp)) // MOVQ CX, ST.Sp
self.WriteRecNotAX(7 , _SI, jit.Sib(_ST, _CX, 8, _ST_Vp), false) // MOVQ SI, ST.Vp[CX]
self.Emit("MOVQ", jit.Imm(_S_val), jit.Sib(_ST, _CX, 8, _ST_Vt)) // MOVQ _S_val, ST.Vt[CX}
self.Sjmp("JMP" , "_next") // JMP _next
/** V_ARRAY_END **/
self.Link("_decode_V_ARRAY_END") // _decode_V_ARRAY_END:
self.Emit("XORL", _DX, _DX) // XORL DX, DX
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("CMPQ", _AX, jit.Imm(_S_arr_0)) // CMPQ AX, _S_arr_0
self.Sjmp("JE" , "_first_item") // JE _first_item
self.Emit("CMPQ", _AX, jit.Imm(_S_arr)) // CMPQ AX, _S_arr
self.Sjmp("JNE" , "_invalid_char") // JNE _invalid_char
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/* first element of an array */
self.Link("_first_item") // _first_item:
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("SUBQ", jit.Imm(2), jit.Ptr(_ST, _ST_Sp)) // SUBQ $2, ST.Sp
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp - 8), _SI) // MOVQ ST.Vp[CX - 1], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp - 8)) // MOVQ DX, ST.Vp[CX - 1]
self.Emit("MOVQ", _DX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ DX, ST.Vp[CX]
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
self.Sjmp("JMP" , "_next") // JMP _next
/** V_OBJECT_END **/
self.Link("_decode_V_OBJECT_END") // _decode_V_OBJECT_END:
self.Emit("MOVL", jit.Imm(_S_omask_end), _DI) // MOVL _S_omask, DI
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vt), _AX) // MOVQ ST.Vt[CX], AX
self.Emit("BTQ" , _AX, _DI)
self.Sjmp("JNC" , "_invalid_char") // JNE _invalid_char
self.Emit("XORL", _AX, _AX) // XORL AX, AX
self.Emit("SUBQ", jit.Imm(1), jit.Ptr(_ST, _ST_Sp)) // SUBQ $1, ST.Sp
self.Emit("MOVQ", _AX, jit.Sib(_ST, _CX, 8, _ST_Vp)) // MOVQ AX, ST.Vp[CX]
self.Sjmp("JMP" , "_next") // JMP _next
/* return from decoder */
self.Link("_return") // _return:
self.Emit("XORL", _EP, _EP) // XORL EP, EP
self.Emit("MOVQ", _EP, jit.Ptr(_ST, _ST_Vp)) // MOVQ EP, ST.Vp[0]
self.Link("_epilogue") // _epilogue:
self.Emit("SUBQ", jit.Imm(_FsmOffset), _ST) // SUBQ _FsmOffset, _ST
self.Emit("MOVQ", jit.Ptr(_SP, _VD_offs), _BP) // MOVQ _VD_offs(SP), BP
self.Emit("ADDQ", jit.Imm(_VD_size), _SP) // ADDQ $_VD_size, SP
self.Emit("RET") // RET
/* array expand */
self.Link("_array_more") // _array_more:
self.Emit("MOVQ" , _T_eface, _AX) // MOVQ _T_eface, AX
self.Emit("MOVQ" , jit.Ptr(_SI, 0), _BX) // MOVQ (SI), BX
self.Emit("MOVQ" , jit.Ptr(_SI, 8), _CX) // MOVQ 8(SI), CX
self.Emit("MOVQ" , jit.Ptr(_SI, 16), _DI) // MOVQ 16(SI), DI
self.Emit("MOVQ" , _DI, _SI) // MOVQ DI, 24(SP)
self.Emit("SHLQ" , jit.Imm(1), _SI) // SHLQ $1, SI
self.call_go(_F_growslice) // CALL_GO runtime.growslice
self.Emit("MOVQ" , _AX, _DI) // MOVQ AX, DI
self.Emit("MOVQ" , _BX, _DX) // MOVQ BX, DX
self.Emit("MOVQ" , _CX, _AX) // MOVQ CX, AX
/* update the slice */
self.Emit("MOVQ", jit.Ptr(_ST, _ST_Sp), _CX) // MOVQ ST.Sp, CX
self.Emit("MOVQ", jit.Sib(_ST, _CX, 8, _ST_Vp), _SI) // MOVQ ST.Vp[CX], SI
self.Emit("MOVQ", jit.Ptr(_SI, 8), _SI) // MOVQ 8(SI), SI
self.Emit("MOVQ", _DX, jit.Ptr(_SI, 8)) // MOVQ DX, 8(SI)
self.Emit("MOVQ", _AX, jit.Ptr(_SI, 16)) // MOVQ AX, 16(AX)
self.WriteRecNotAX(8 , _DI, jit.Ptr(_SI, 0), false) // MOVQ R10, (SI)
self.Sjmp("JMP" , "_array_append") // JMP _array_append
/* copy string */
self.Link("copy_string") // pointer: R8, length: AX, return addr: DI
self.Emit("MOVQ", _R8, _VAR_cs_p)
self.Emit("MOVQ", _AX, _VAR_cs_n)
self.Emit("MOVQ", _DI, _VAR_cs_LR)
self.Emit("MOVQ", _AX, _BX)
self.Emit("MOVQ", _AX, _CX)
self.Emit("MOVQ", _T_byte, _AX)
self.call_go(_F_makeslice)
self.Emit("MOVQ", _AX, _VAR_cs_d)
self.Emit("MOVQ", _VAR_cs_p, _BX)
self.Emit("MOVQ", _VAR_cs_n, _CX)
self.call_go(_F_memmove)
self.Emit("MOVQ", _VAR_cs_d, _R8)
self.Emit("MOVQ", _VAR_cs_n, _AX)
self.Emit("MOVQ", _VAR_cs_LR, _DI)
self.Rjmp("JMP", _DI)
/* error handlers */
self.Link("_stack_overflow")
self.Emit("MOVL" , _E_recurse, _EP) // MOVQ _E_recurse, EP
self.Sjmp("JMP" , "_error") // JMP _error
self.Link("_vtype_error") // _vtype_error:
self.Emit("MOVQ" , _DI, _IC) // MOVQ DI, IC
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
self.Sjmp("JMP" , "_error") // JMP _error
self.Link("_invalid_char") // _invalid_char:
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
self.Emit("MOVL" , _E_invalid, _EP) // MOVL _E_invalid, EP
self.Sjmp("JMP" , "_error") // JMP _error
self.Link("_unquote_error") // _unquote_error:
self.Emit("MOVQ" , _VAR_ss_Iv, _IC) // MOVQ ss.Iv, IC
self.Emit("SUBQ" , jit.Imm(1), _IC) // SUBQ $1, IC
self.Link("_parsing_error") // _parsing_error:
self.Emit("NEGQ" , _AX) // NEGQ AX
self.Emit("MOVQ" , _AX, _EP) // MOVQ AX, EP
self.Link("_error") // _error:
self.Emit("PXOR" , _X0, _X0) // PXOR X0, X0
self.Emit("MOVOU", _X0, jit.Ptr(_VP, 0)) // MOVOU X0, (VP)
self.Sjmp("JMP" , "_epilogue") // JMP _epilogue
/* invalid value type, never returns */
self.Link("_invalid_vtype")
self.call_go(_F_invalid_vtype) // CALL invalid_type
self.Emit("UD2") // UD2
/* switch jump table */
self.Link("_switch_table") // _switch_table:
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
self.Sref("_decode_V_NULL", -4) // SREF &_decode_V_NULL, $-4
self.Sref("_decode_V_TRUE", -8) // SREF &_decode_V_TRUE, $-8
self.Sref("_decode_V_FALSE", -12) // SREF &_decode_V_FALSE, $-12
self.Sref("_decode_V_ARRAY", -16) // SREF &_decode_V_ARRAY, $-16
self.Sref("_decode_V_OBJECT", -20) // SREF &_decode_V_OBJECT, $-20
self.Sref("_decode_V_STRING", -24) // SREF &_decode_V_STRING, $-24
self.Sref("_decode_V_DOUBLE", -28) // SREF &_decode_V_DOUBLE, $-28
self.Sref("_decode_V_INTEGER", -32) // SREF &_decode_V_INTEGER, $-32
self.Sref("_decode_V_KEY_SEP", -36) // SREF &_decode_V_KEY_SEP, $-36
self.Sref("_decode_V_ELEM_SEP", -40) // SREF &_decode_V_ELEM_SEP, $-40
self.Sref("_decode_V_ARRAY_END", -44) // SREF &_decode_V_ARRAY_END, $-44
self.Sref("_decode_V_OBJECT_END", -48) // SREF &_decode_V_OBJECT_END, $-48
/* fast character lookup table */
self.Link("_decode_tab") // _decode_tab:
self.Sref("_decode_V_EOF", 0) // SREF &_decode_V_EOF, $0
/* generate rest of the tabs */
for i := 1; i < 256; i++ {
if to, ok := _R_tab[i]; ok {
self.Sref(to, -int64(i) * 4)
} else {
self.Byte(0x00, 0x00, 0x00, 0x00)
}
}
}
func (self *_ValueDecoder) WritePtrAX(i int, rec obj.Addr, saveDI bool) {
self.Emit("MOVQ", _V_writeBarrier, _R9)
self.Emit("CMPL", jit.Ptr(_R9, 0), jit.Imm(0))
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
if saveDI {
self.save(_DI)
}
self.Emit("LEAQ", rec, _DI)
self.call(_F_gcWriteBarrierAX)
if saveDI {
self.load(_DI)
}
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Emit("MOVQ", _AX, rec)
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
}
func (self *_ValueDecoder) WriteRecNotAX(i int, ptr obj.Addr, rec obj.Addr, saveDI bool) {
if rec.Reg == x86.REG_AX || rec.Index == x86.REG_AX {
panic("rec contains AX!")
}
self.Emit("MOVQ", _V_writeBarrier, _AX)
self.Emit("CMPL", jit.Ptr(_AX, 0), jit.Imm(0))
self.Sjmp("JE", "_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Emit("MOVQ", ptr, _AX)
if saveDI {
self.save(_DI)
}
self.Emit("LEAQ", rec, _DI)
self.call(_F_gcWriteBarrierAX)
if saveDI {
self.load(_DI)
}
self.Sjmp("JMP", "_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Link("_no_writeBarrier" + strconv.Itoa(i) + "_{n}")
self.Emit("MOVQ", ptr, rec)
self.Link("_end_writeBarrier" + strconv.Itoa(i) + "_{n}")
}
/** Generic Decoder **/
var (
_subr_decode_value = new(_ValueDecoder).build()
)
//go:nosplit
func invalid_vtype(vt types.ValueType) {
throw(fmt.Sprintf("invalid value type: %d", vt))
}

View File

@@ -1,37 +0,0 @@
// +build go1.17,!go1.21
//
// Copyright 2021 ByteDance Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include "go_asm.h"
#include "funcdata.h"
#include "textflag.h"
TEXT ·decodeValueStub(SB), NOSPLIT, $0 - 72
NO_LOCAL_POINTERS
PXOR X0, X0
MOVOU X0, rv+48(FP)
MOVQ st+0(FP) , R13
MOVQ sp+8(FP) , R10
MOVQ sn+16(FP), R12
MOVQ ic+24(FP), R11
MOVQ vp+32(FP), R15
MOVQ df+40(FP), AX
MOVQ ·_subr_decode_value(SB), BX
CALL BX
MOVQ R11, rp+48(FP)
MOVQ BX, ex+56(FP)
RET

View File

@@ -1,37 +0,0 @@
// +build go1.15,!go1.17
//
// Copyright 2021 ByteDance Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include "go_asm.h"
#include "funcdata.h"
#include "textflag.h"
TEXT ·decodeValueStub(SB), NOSPLIT, $0 - 72
NO_LOCAL_POINTERS
PXOR X0, X0
MOVOU X0, rv+48(FP)
MOVQ st+0(FP), BX
MOVQ sp+8(FP), R12
MOVQ sn+16(FP), R13
MOVQ ic+24(FP), R14
MOVQ vp+32(FP), R15
MOVQ df+40(FP), R10
MOVQ ·_subr_decode_value(SB), AX
CALL AX
MOVQ R14, rp+48(FP)
MOVQ R11, ex+56(FP)
RET

View File

@@ -29,7 +29,7 @@ const (
_MinSlice = 2
_MaxStack = 4096 // 4k slots
_MaxStackBytes = _MaxStack * _PtrBytes
_MaxDigitNums = 800 // used in atof fallback algorithm
_MaxDigitNums = types.MaxDigitNums // used in atof fallback algorithm
)
const (
@@ -140,4 +140,4 @@ func findOrCompile(vt *rt.GoType) (_Decoder, error) {
} else {
return nil, err
}
}
}

View File

@@ -21,8 +21,9 @@ import (
`io`
`sync`
`github.com/bytedance/sonic/option`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/option`
)
var (
@@ -71,6 +72,7 @@ func (self *StreamDecoder) Decode(val interface{}) (err error) {
var first = true
var repeat = true
read_more:
for {
l := len(buf)
@@ -97,11 +99,20 @@ read_more:
l := len(buf)
if l > 0 {
self.Decoder.Reset(string(buf))
var x int
if ret := native.SkipOneFast(&self.s, &x); ret < 0 {
if repeat {
goto read_more
} else {
err = SyntaxError{x, self.s, types.ParsingError(-ret), ""}
self.err = err
return
}
}
err = self.Decoder.Decode(val)
if err != nil {
if repeat && self.repeatable(err) {
goto read_more
}
self.err = err
}

View File

@@ -1,111 +0,0 @@
// +build go1.15,!go1.20
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`unsafe`
`reflect`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64decode github.com/chenzhuoyu/base64x._subr__b64decode
var _subr__b64decode uintptr
// runtime.maxElementSize
const _max_map_element_size uintptr = 128
func mapfast(vt reflect.Type) bool {
return vt.Elem().Size() <= _max_map_element_size
}
//go:nosplit
//go:linkname throw runtime.throw
//goland:noinspection GoUnusedParameter
func throw(s string)
//go:linkname convT64 runtime.convT64
//goland:noinspection GoUnusedParameter
func convT64(v uint64) unsafe.Pointer
//go:linkname convTslice runtime.convTslice
//goland:noinspection GoUnusedParameter
func convTslice(v []byte) unsafe.Pointer
//go:linkname convTstring runtime.convTstring
//goland:noinspection GoUnusedParameter
func convTstring(v string) unsafe.Pointer
//go:noescape
//go:linkname memequal runtime.memequal
//goland:noinspection GoUnusedParameter
func memequal(a unsafe.Pointer, b unsafe.Pointer, size uintptr) bool
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname mallocgc runtime.mallocgc
//goland:noinspection GoUnusedParameter
func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
//go:linkname makeslice runtime.makeslice
//goland:noinspection GoUnusedParameter
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
//go:noescape
//go:linkname growslice runtime.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname makemap_small runtime.makemap_small
func makemap_small() unsafe.Pointer
//go:linkname mapassign runtime.mapassign
//goland:noinspection GoUnusedParameter
func mapassign(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
//go:linkname mapassign_fast32 runtime.mapassign_fast32
//goland:noinspection GoUnusedParameter
func mapassign_fast32(t *rt.GoType, h unsafe.Pointer, k uint32) unsafe.Pointer
//go:linkname mapassign_fast64 runtime.mapassign_fast64
//goland:noinspection GoUnusedParameter
func mapassign_fast64(t *rt.GoType, h unsafe.Pointer, k uint64) unsafe.Pointer
//go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr
//goland:noinspection GoUnusedParameter
func mapassign_fast64ptr(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
//go:linkname mapassign_faststr runtime.mapassign_faststr
//goland:noinspection GoUnusedParameter
func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer
//go:nosplit
//go:linkname memclrHasPointers runtime.memclrHasPointers
//goland:noinspection GoUnusedParameter
func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)

View File

@@ -82,23 +82,23 @@ func makemap_small() unsafe.Pointer
//go:linkname mapassign runtime.mapassign
//goland:noinspection GoUnusedParameter
func mapassign(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
func mapassign(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
//go:linkname mapassign_fast32 runtime.mapassign_fast32
//goland:noinspection GoUnusedParameter
func mapassign_fast32(t *rt.GoType, h unsafe.Pointer, k uint32) unsafe.Pointer
func mapassign_fast32(t *rt.GoMapType, h unsafe.Pointer, k uint32) unsafe.Pointer
//go:linkname mapassign_fast64 runtime.mapassign_fast64
//goland:noinspection GoUnusedParameter
func mapassign_fast64(t *rt.GoType, h unsafe.Pointer, k uint64) unsafe.Pointer
func mapassign_fast64(t *rt.GoMapType, h unsafe.Pointer, k uint64) unsafe.Pointer
//go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr
//goland:noinspection GoUnusedParameter
func mapassign_fast64ptr(t *rt.GoType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
func mapassign_fast64ptr(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
//go:linkname mapassign_faststr runtime.mapassign_faststr
//goland:noinspection GoUnusedParameter
func mapassign_faststr(t *rt.GoType, h unsafe.Pointer, s string) unsafe.Pointer
func mapassign_faststr(t *rt.GoMapType, h unsafe.Pointer, s string) unsafe.Pointer
//go:nosplit
//go:linkname memclrHasPointers runtime.memclrHasPointers
@@ -108,4 +108,4 @@ func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
// +build go1.15,!go1.17
// +build go1.16,!go1.17
/*
* Copyright 2021 ByteDance Inc.

View File

@@ -1,4 +1,4 @@
// +build go1.17,!go1.21
// +build go1.17,!go1.22
/*
* Copyright 2021 ByteDance Inc.
@@ -202,4 +202,4 @@ func (self *_Assembler) print_ptr(i int, ptr obj.Addr, lea bool) {
self.Emit("MOVQ", _R10, _BX)
self.dcall(_F_printptr)
self.dload(_REG_debug...)
}
}

View File

@@ -292,7 +292,6 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
cfg := option.DefaultCompileOptions()
for _, opt := range opts {
opt(&cfg)
break
}
return pretouchRec(map[reflect.Type]uint8{vt: 0}, cfg)
}
@@ -325,4 +324,4 @@ func Valid(data []byte) (ok bool, start int) {
}
return true, ret
}
}

View File

@@ -62,4 +62,4 @@ func goPanic(code int, val unsafe.Pointer) {
default:
panic("encoder error!")
}
}
}

View File

@@ -190,4 +190,4 @@ func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error {
}
opts.RecursiveDepth -= 1
return pretouchRec(next, opts)
}
}

View File

@@ -165,4 +165,4 @@ func asJson(v unsafe.Pointer) (string, error) {
text := assertI2I(_T_json_Marshaler, *(*rt.GoIface)(v))
r, e := (*(*json.Marshaler)(unsafe.Pointer(&text))).MarshalJSON()
return rt.Mem2Str(r), e
}
}

View File

@@ -81,4 +81,4 @@ func (enc *StreamEncoder) Encode(val interface{}) (err error) {
free_bytes:
freeBytes(out)
return err
}
}

View File

@@ -1,4 +1,4 @@
// +build go1.15,!go1.17
// +build go1.16,!go1.17
/*
* Copyright 2021 ByteDance Inc.

View File

@@ -1,4 +1,4 @@
// +build go1.20
// +build go1.20,!go1.21
/*
* Copyright 2021 ByteDance Inc.
@@ -63,4 +63,4 @@ func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
var _runtime_writeBarrier uintptr
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
func gcWriteBarrierAX()
func gcWriteBarrierAX()

View File

@@ -1,124 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package loader
import (
`reflect`
`sync`
`unsafe`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname lastmoduledatap runtime.lastmoduledatap
//goland:noinspection GoUnusedGlobalVariable
var lastmoduledatap *_ModuleData
//go:linkname moduledataverify1 runtime.moduledataverify1
func moduledataverify1(_ *_ModuleData)
// PCDATA and FUNCDATA table indexes.
//
// See funcdata.h and $GROOT/src/cmd/internal/objabi/funcdata.go.
const (
_FUNCDATA_ArgsPointerMaps = 0
_FUNCDATA_LocalsPointerMaps = 1
)
type funcInfo struct {
*_Func
datap *_ModuleData
}
//go:linkname findfunc runtime.findfunc
func findfunc(pc uintptr) funcInfo
//go:linkname funcdata runtime.funcdata
func funcdata(f funcInfo, i uint8) unsafe.Pointer
var (
modLock sync.Mutex
modList []*_ModuleData
)
var emptyByte byte
func encodeVariant(v int) []byte {
var u int
var r []byte
/* split every 7 bits */
for v > 127 {
u = v & 0x7f
v = v >> 7
r = append(r, byte(u) | 0x80)
}
/* check for last one */
if v == 0 {
return r
}
/* add the last one */
r = append(r, byte(v))
return r
}
func registerModule(mod *_ModuleData) {
modLock.Lock()
modList = append(modList, mod)
lastmoduledatap.next = mod
lastmoduledatap = mod
modLock.Unlock()
}
func stackMap(f interface{}) (args uintptr, locals uintptr) {
fv := reflect.ValueOf(f)
if fv.Kind() != reflect.Func {
panic("f must be reflect.Func kind!")
}
fi := findfunc(fv.Pointer())
return uintptr(funcdata(fi, uint8(_FUNCDATA_ArgsPointerMaps))), uintptr(funcdata(fi, uint8(_FUNCDATA_LocalsPointerMaps)))
}
var moduleCache = struct{
m map[*_ModuleData][]byte
l sync.Mutex
}{
m : make(map[*_ModuleData][]byte),
}
func cacheStackmap(argPtrs []bool, localPtrs []bool, mod *_ModuleData) (argptrs uintptr, localptrs uintptr) {
as := rt.StackMapBuilder{}
for _, b := range argPtrs {
as.AddField(b)
}
ab, _ := as.Build().MarshalBinary()
ls := rt.StackMapBuilder{}
for _, b := range localPtrs {
ls.AddField(b)
}
lb, _ := ls.Build().MarshalBinary()
cache := make([]byte, len(ab) + len(lb))
copy(cache, ab)
copy(cache[len(ab):], lb)
moduleCache.l.Lock()
moduleCache.m[mod] = cache
moduleCache.l.Unlock()
return uintptr(rt.IndexByte(cache, 0)), uintptr(rt.IndexByte(cache, len(ab)))
}

View File

@@ -1,169 +0,0 @@
// +build go1.15,!go1.16
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package loader
import (
`unsafe`
`github.com/bytedance/sonic/internal/rt`
)
type _Func struct {
entry uintptr // start pc
nameoff int32 // function name
args int32 // in/out args size
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
pcsp int32
pcfile int32
pcln int32
npcdata int32
funcID uint8 // set for certain special runtime functions
_ [2]int8 // unused
nfuncdata uint8 // must be last
argptrs uintptr
localptrs uintptr
}
type _FuncTab struct {
entry uintptr
funcoff uintptr
}
type _BitVector struct {
n int32 // # of bits
bytedata *uint8
}
type _PtabEntry struct {
name int32
typ int32
}
type _TextSection struct {
vaddr uintptr // prelinked section vaddr
length uintptr // section length
baseaddr uintptr // relocated section address
}
type _ModuleData struct {
pclntable []byte
ftab []_FuncTab
filetab []uint32
findfunctab *_FindFuncBucket
minpc, maxpc uintptr
text, etext uintptr
noptrdata, enoptrdata uintptr
data, edata uintptr
bss, ebss uintptr
noptrbss, enoptrbss uintptr
end, gcdata, gcbss uintptr
types, etypes uintptr
textsectmap []_TextSection
typelinks []int32 // offsets from types
itablinks []*rt.GoItab
ptab []_PtabEntry
pluginpath string
pkghashes []byte
modulename string
modulehashes []byte
hasmain uint8 // 1 if module contains the main function, 0 otherwise
gcdatamask, gcbssmask _BitVector
typemap map[int32]*rt.GoType // offset to *_rtype in previous module
bad bool // module failed to load and should be ignored
next *_ModuleData
}
type _FindFuncBucket struct {
idx uint32
subbuckets [16]byte
}
var findFuncTab = &_FindFuncBucket {
idx: 1,
}
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
mod := new(_ModuleData)
minpc := pc
maxpc := pc + size
/* build the PC & line table */
pclnt := []byte {
0xfb, 0xff, 0xff, 0xff, // magic : 0xfffffffb
0, // pad1 : 0
0, // pad2 : 0
1, // minLC : 1
4 << (^uintptr(0) >> 63), // ptrSize : 4 << (^uintptr(0) >> 63)
}
// cache arg and local stackmap
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
/* add the function name */
noff := len(pclnt)
pclnt = append(append(pclnt, name...), 0)
/* add PCDATA */
pcsp := len(pclnt)
pclnt = append(pclnt, encodeVariant((fp + 1) << 1)...)
pclnt = append(pclnt, encodeVariant(int(size))...)
/* function entry */
fnv := _Func {
entry : pc,
nameoff : int32(noff),
args : int32(args),
pcsp : int32(pcsp),
nfuncdata : 2,
argptrs : uintptr(argptrs),
localptrs : uintptr(localptrs),
}
/* align the func to 8 bytes */
if p := len(pclnt) % 8; p != 0 {
pclnt = append(pclnt, make([]byte, 8 - p)...)
}
/* add the function descriptor */
foff := len(pclnt)
pclnt = append(pclnt, (*(*[unsafe.Sizeof(_Func{})]byte)(unsafe.Pointer(&fnv)))[:]...)
/* function table */
tab := []_FuncTab {
{entry: pc, funcoff: uintptr(foff)},
{entry: pc, funcoff: uintptr(foff)},
{entry: maxpc},
}
/* module data */
*mod = _ModuleData {
pclntable : pclnt,
ftab : tab,
findfunctab : findFuncTab,
minpc : minpc,
maxpc : maxpc,
modulename : name,
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
}
/* verify and register the new module */
moduledataverify1(mod)
registerModule(mod)
}

View File

@@ -1,175 +0,0 @@
//go:build go1.16 && !go1.18
// +build go1.16,!go1.18
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package loader
import (
`unsafe`
)
type _Func struct {
entry uintptr // start pc
nameoff int32 // function name
args int32 // in/out args size
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
pcsp uint32
pcfile uint32
pcln uint32
npcdata uint32
cuOffset uint32 // runtime.cutab offset of this function's CU
funcID uint8 // set for certain special runtime functions
_ [2]byte // pad
nfuncdata uint8 // must be last
argptrs uintptr
localptrs uintptr
}
type _FuncTab struct {
entry uintptr
funcoff uintptr
}
type _PCHeader struct {
magic uint32 // 0xFFFFFFFA
pad1, pad2 uint8 // 0,0
minLC uint8 // min instruction size
ptrSize uint8 // size of a ptr in bytes
nfunc int // number of functions in the module
nfiles uint // number of entries in the file tab.
funcnameOffset uintptr // offset to the funcnametab variable from _PCHeader
cuOffset uintptr // offset to the cutab variable from _PCHeader
filetabOffset uintptr // offset to the filetab variable from _PCHeader
pctabOffset uintptr // offset to the pctab varible from _PCHeader
pclnOffset uintptr // offset to the pclntab variable from _PCHeader
}
type _BitVector struct {
n int32 // # of bits
bytedata *uint8
}
type _PtabEntry struct {
name int32
typ int32
}
type _TextSection struct {
vaddr uintptr // prelinked section vaddr
length uintptr // section length
baseaddr uintptr // relocated section address
}
type _ModuleData struct {
pcHeader *_PCHeader
funcnametab []byte
cutab []uint32
filetab []byte
pctab []byte
pclntable []_Func
ftab []_FuncTab
findfunctab *_FindFuncBucket
minpc, maxpc uintptr
text, etext uintptr
noptrdata, enoptrdata uintptr
data, edata uintptr
bss, ebss uintptr
noptrbss, enoptrbss uintptr
end, gcdata, gcbss uintptr
types, etypes uintptr
textsectmap []_TextSection
typelinks []int32
itablinks []unsafe.Pointer
ptab []_PtabEntry
pluginpath string
pkghashes []struct{}
modulename string
modulehashes []struct{}
hasmain uint8
gcdatamask, gcbssmask _BitVector
typemap map[int32]unsafe.Pointer
bad bool
next *_ModuleData
}
type _FindFuncBucket struct {
idx uint32
subbuckets [16]byte
}
var modHeader = &_PCHeader {
magic : 0xfffffffa,
minLC : 1,
nfunc : 1,
ptrSize : 4 << (^uintptr(0) >> 63),
}
var findFuncTab = &_FindFuncBucket {
idx: 1,
}
func makePCtab(fp int) []byte {
return append([]byte{0}, encodeVariant((fp + 1) << 1)...)
}
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
mod := new(_ModuleData)
minpc := pc
maxpc := pc + size
// cache arg and local stackmap
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
/* function entry */
lnt := []_Func {{
entry : pc,
nameoff : 1,
args : int32(args),
pcsp : 1,
nfuncdata : 2,
argptrs : uintptr(argptrs),
localptrs : uintptr(localptrs),
}}
/* function table */
tab := []_FuncTab {
{entry: pc},
{entry: pc},
{entry: maxpc},
}
/* module data */
*mod = _ModuleData {
pcHeader : modHeader,
funcnametab : append(append([]byte{0}, name...), 0),
pctab : append(makePCtab(fp), encodeVariant(int(size))...),
pclntable : lnt,
ftab : tab,
findfunctab : findFuncTab,
minpc : minpc,
maxpc : maxpc,
modulename : name,
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
}
/* verify and register the new module */
moduledataverify1(mod)
registerModule(mod)
}

View File

@@ -1,201 +0,0 @@
// +build go1.18,!go1.20
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package loader
import (
`unsafe`
`github.com/bytedance/sonic/internal/rt`
)
// A FuncFlag holds bits about a function.
// This list must match the list in cmd/internal/objabi/funcid.go.
type funcFlag uint8
type _Func struct {
entryOff uint32 // start pc
nameoff int32 // function name
args int32 // in/out args size
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
pcsp uint32
pcfile uint32
pcln uint32
npcdata uint32
cuOffset uint32 // runtime.cutab offset of this function's CU
funcID uint8 // set for certain special runtime functions
flag funcFlag
_ [1]byte // pad
nfuncdata uint8 // must be last
argptrs uint32
localptrs uint32
}
type _FuncTab struct {
entry uint32
funcoff uint32
}
type _PCHeader struct {
magic uint32 // 0xFFFFFFF0
pad1, pad2 uint8 // 0,0
minLC uint8 // min instruction size
ptrSize uint8 // size of a ptr in bytes
nfunc int // number of functions in the module
nfiles uint // number of entries in the file tab
textStart uintptr // base for function entry PC offsets in this module, equal to moduledata.text
funcnameOffset uintptr // offset to the funcnametab variable from pcHeader
cuOffset uintptr // offset to the cutab variable from pcHeader
filetabOffset uintptr // offset to the filetab variable from pcHeader
pctabOffset uintptr // offset to the pctab variable from pcHeader
pclnOffset uintptr // offset to the pclntab variable from pcHeader
}
type _BitVector struct {
n int32 // # of bits
bytedata *uint8
}
type _PtabEntry struct {
name int32
typ int32
}
type _TextSection struct {
vaddr uintptr // prelinked section vaddr
length uintptr // section length
baseaddr uintptr // relocated section address
}
type _ModuleData struct {
pcHeader *_PCHeader
funcnametab []byte
cutab []uint32
filetab []byte
pctab []byte
pclntable []byte
ftab []_FuncTab
findfunctab *_FindFuncBucket
minpc, maxpc uintptr
text, etext uintptr
noptrdata, enoptrdata uintptr
data, edata uintptr
bss, ebss uintptr
noptrbss, enoptrbss uintptr
end, gcdata, gcbss uintptr
types, etypes uintptr
rodata uintptr
gofunc uintptr
textsectmap []_TextSection
typelinks []int32
itablinks []unsafe.Pointer
ptab []_PtabEntry
pluginpath string
pkghashes []struct{}
modulename string
modulehashes []struct{}
hasmain uint8
gcdatamask, gcbssmask _BitVector
typemap map[int32]unsafe.Pointer
bad bool
next *_ModuleData
}
type _FindFuncBucket struct {
idx uint32
subbuckets [16]byte
}
func makePCtab(fp int) []byte {
return append([]byte{0}, encodeVariant((fp + 1) << 1)...)
}
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
mod := new(_ModuleData)
minpc := pc
maxpc := pc + size
findFuncTab := make([]_FindFuncBucket, textSize/4096 + 1)
modHeader := &_PCHeader {
magic : 0xfffffff0,
minLC : 1,
nfunc : 1,
ptrSize : 4 << (^uintptr(0) >> 63),
textStart: minpc,
}
// cache arg and local stackmap
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
base := argptrs
if argptrs > localptrs {
base = localptrs
}
/* function entry */
lnt := []_Func {{
entryOff : 0,
nameoff : 1,
args : int32(args),
pcsp : 1,
nfuncdata : 2,
argptrs: uint32(argptrs - base),
localptrs: uint32(localptrs - base),
}}
nlnt := len(lnt)*int(unsafe.Sizeof(_Func{}))
plnt := unsafe.Pointer(&lnt[0])
/* function table */
ftab := []_FuncTab {
{entry : 0, funcoff : 16},
{entry : uint32(size)},
}
nftab := len(ftab)*int(unsafe.Sizeof(_FuncTab{}))
pftab := unsafe.Pointer(&ftab[0])
pclntab := make([]byte, 0, nftab + nlnt)
pclntab = append(pclntab, rt.BytesFrom(pftab, nftab, nftab)...)
pclntab = append(pclntab, rt.BytesFrom(plnt, nlnt, nlnt)...)
/* module data */
*mod = _ModuleData {
pcHeader : modHeader,
funcnametab : append(append([]byte{0}, name...), 0),
pctab : append(makePCtab(fp), encodeVariant(int(size))...),
pclntable : pclntab,
ftab : ftab,
text : minpc,
etext : pc + textSize,
findfunctab : &findFuncTab[0],
minpc : minpc,
maxpc : maxpc,
modulename : name,
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
gofunc: base,
}
/* verify and register the new module */
moduledataverify1(mod)
registerModule(mod)
}

View File

@@ -1,201 +0,0 @@
// +build go1.20
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package loader
import (
`unsafe`
`github.com/bytedance/sonic/internal/rt`
)
// A FuncFlag holds bits about a function.
// This list must match the list in cmd/internal/objabi/funcid.go.
type funcFlag uint8
type _Func struct {
entryOff uint32 // start pc
nameoff int32 // function name
args int32 // in/out args size
deferreturn uint32 // offset of start of a deferreturn call instruction from entry, if any.
pcsp uint32
pcfile uint32
pcln uint32
npcdata uint32
cuOffset uint32 // runtime.cutab offset of this function's CU
funcID uint8 // set for certain special runtime functions
flag funcFlag
_ [1]byte // pad
nfuncdata uint8 // must be last
argptrs uint32
localptrs uint32
}
type _FuncTab struct {
entry uint32
funcoff uint32
}
type _PCHeader struct {
magic uint32 // 0xFFFFFFF0
pad1, pad2 uint8 // 0,0
minLC uint8 // min instruction size
ptrSize uint8 // size of a ptr in bytes
nfunc int // number of functions in the module
nfiles uint // number of entries in the file tab
textStart uintptr // base for function entry PC offsets in this module, equal to moduledata.text
funcnameOffset uintptr // offset to the funcnametab variable from pcHeader
cuOffset uintptr // offset to the cutab variable from pcHeader
filetabOffset uintptr // offset to the filetab variable from pcHeader
pctabOffset uintptr // offset to the pctab variable from pcHeader
pclnOffset uintptr // offset to the pclntab variable from pcHeader
}
type _BitVector struct {
n int32 // # of bits
bytedata *uint8
}
type _PtabEntry struct {
name int32
typ int32
}
type _TextSection struct {
vaddr uintptr // prelinked section vaddr
length uintptr // section length
baseaddr uintptr // relocated section address
}
type _ModuleData struct {
pcHeader *_PCHeader
funcnametab []byte
cutab []uint32
filetab []byte
pctab []byte
pclntable []byte
ftab []_FuncTab
findfunctab *_FindFuncBucket
minpc, maxpc uintptr
text, etext uintptr
noptrdata, enoptrdata uintptr
data, edata uintptr
bss, ebss uintptr
noptrbss, enoptrbss uintptr
end, gcdata, gcbss uintptr
types, etypes uintptr
rodata uintptr
gofunc uintptr
textsectmap []_TextSection
typelinks []int32
itablinks []unsafe.Pointer
ptab []_PtabEntry
pluginpath string
pkghashes []struct{}
modulename string
modulehashes []struct{}
hasmain uint8
gcdatamask, gcbssmask _BitVector
typemap map[int32]unsafe.Pointer
bad bool
next *_ModuleData
}
type _FindFuncBucket struct {
idx uint32
subbuckets [16]byte
}
func makePCtab(fp int) []byte {
return append([]byte{0}, encodeVariant((fp + 1) << 1)...)
}
func registerFunction(name string, pc uintptr, textSize uintptr, fp int, args int, size uintptr, argPtrs []bool, localPtrs []bool) {
mod := new(_ModuleData)
minpc := pc
maxpc := pc + size
findFuncTab := make([]_FindFuncBucket, textSize/4096 + 1)
modHeader := &_PCHeader {
magic : 0xfffffff0,
minLC : 1,
nfunc : 1,
ptrSize : 4 << (^uintptr(0) >> 63),
textStart: minpc,
}
// cache arg and local stackmap
argptrs, localptrs := cacheStackmap(argPtrs, localPtrs, mod)
base := argptrs
if argptrs > localptrs {
base = localptrs
}
/* function entry */
lnt := []_Func {{
entryOff : 0,
nameoff : 1,
args : int32(args),
pcsp : 1,
nfuncdata : 2,
argptrs: uint32(argptrs - base),
localptrs: uint32(localptrs - base),
}}
nlnt := len(lnt)*int(unsafe.Sizeof(_Func{}))
plnt := unsafe.Pointer(&lnt[0])
/* function table */
ftab := []_FuncTab {
{entry : 0, funcoff : 16},
{entry : uint32(size)},
}
nftab := len(ftab)*int(unsafe.Sizeof(_FuncTab{}))
pftab := unsafe.Pointer(&ftab[0])
pclntab := make([]byte, 0, nftab + nlnt)
pclntab = append(pclntab, rt.BytesFrom(pftab, nftab, nftab)...)
pclntab = append(pclntab, rt.BytesFrom(plnt, nlnt, nlnt)...)
/* module data */
*mod = _ModuleData {
pcHeader : modHeader,
funcnametab : append(append([]byte{0}, name...), 0),
pctab : append(makePCtab(fp), encodeVariant(int(size))...),
pclntable : pclntab,
ftab : ftab,
text : minpc,
etext : pc + textSize,
findfunctab : &findFuncTab[0],
minpc : minpc,
maxpc : maxpc,
modulename : name,
gcdata: uintptr(unsafe.Pointer(&emptyByte)),
gcbss: uintptr(unsafe.Pointer(&emptyByte)),
gofunc: base,
}
/* verify and register the new module */
moduledataverify1(mod)
registerModule(mod)
}

View File

@@ -1,74 +0,0 @@
//go:build darwin || linux
// +build darwin linux
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package loader
import (
`fmt`
`os`
`reflect`
`syscall`
`unsafe`
)
const (
_AP = syscall.MAP_ANON | syscall.MAP_PRIVATE
_RX = syscall.PROT_READ | syscall.PROT_EXEC
_RW = syscall.PROT_READ | syscall.PROT_WRITE
)
type Loader []byte
type Function unsafe.Pointer
func (self Loader) Load(fn string, fp int, args int, argPtrs []bool, localPtrs []bool) (f Function) {
p := os.Getpagesize()
n := (((len(self) - 1) / p) + 1) * p
/* register the function */
m := mmap(n)
v := fmt.Sprintf("runtime.__%s_%x", fn, m)
registerFunction(v, m, uintptr(n), fp, args, uintptr(len(self)), argPtrs, localPtrs)
/* reference as a slice */
s := *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader {
Data : m,
Cap : n,
Len : len(self),
}))
/* copy the machine code, and make it executable */
copy(s, self)
mprotect(m, n)
return Function(&m)
}
func mmap(nb int) uintptr {
if m, _, e := syscall.RawSyscall6(syscall.SYS_MMAP, 0, uintptr(nb), _RW, _AP, 0, 0); e != 0 {
panic(e)
} else {
return m
}
}
func mprotect(p uintptr, nb int) {
if _, _, err := syscall.RawSyscall(syscall.SYS_MPROTECT, p, uintptr(nb), _RX); err != 0 {
panic(err)
}
}

View File

@@ -1,111 +0,0 @@
//go:build windows
// +build windows
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package loader
import (
`fmt`
`os`
`reflect`
`syscall`
`unsafe`
)
const (
MEM_COMMIT = 0x00001000
MEM_RESERVE = 0x00002000
)
var (
libKernel32 = syscall.NewLazyDLL("KERNEL32.DLL")
libKernel32_VirtualAlloc = libKernel32.NewProc("VirtualAlloc")
libKernel32_VirtualProtect = libKernel32.NewProc("VirtualProtect")
)
type Loader []byte
type Function unsafe.Pointer
func (self Loader) Load(fn string, fp int, args int, argPtrs []bool, localPtrs []bool) (f Function) {
p := os.Getpagesize()
n := (((len(self) - 1) / p) + 1) * p
/* register the function */
m := mmap(n)
v := fmt.Sprintf("runtime.__%s_%x", fn, m)
registerFunction(v, m, uintptr(n), fp, args, uintptr(len(self)), argPtrs, localPtrs)
/* reference as a slice */
s := *(*[]byte)(unsafe.Pointer(&reflect.SliceHeader {
Data : m,
Cap : n,
Len : len(self),
}))
/* copy the machine code, and make it executable */
copy(s, self)
mprotect(m, n)
return Function(&m)
}
func mmap(nb int) uintptr {
addr, err := winapi_VirtualAlloc(0, nb, MEM_COMMIT|MEM_RESERVE, syscall.PAGE_READWRITE)
if err != nil {
panic(err)
}
return addr
}
func mprotect(p uintptr, nb int) (oldProtect int) {
err := winapi_VirtualProtect(p, nb, syscall.PAGE_EXECUTE_READ, &oldProtect)
if err != nil {
panic(err)
}
return
}
// winapi_VirtualAlloc allocate memory
// Doc: https://docs.microsoft.com/en-us/windows/win32/api/memoryapi/nf-memoryapi-virtualalloc
func winapi_VirtualAlloc(lpAddr uintptr, dwSize int, flAllocationType int, flProtect int) (uintptr, error) {
r1, _, err := libKernel32_VirtualAlloc.Call(
lpAddr,
uintptr(dwSize),
uintptr(flAllocationType),
uintptr(flProtect),
)
if r1 == 0 {
return 0, err
}
return r1, nil
}
// winapi_VirtualProtect change memory protection
// Doc: https://docs.microsoft.com/en-us/windows/win32/api/memoryapi/nf-memoryapi-virtualprotect
func winapi_VirtualProtect(lpAddr uintptr, dwSize int, flNewProtect int, lpflOldProtect *int) error {
r1, _, err := libKernel32_VirtualProtect.Call(
lpAddr,
uintptr(dwSize),
uintptr(flNewProtect),
uintptr(unsafe.Pointer(lpflOldProtect)),
)
if r1 == 0 {
return err
}
return nil
}

View File

@@ -1,5 +1,7 @@
// Code generated by Makefile, DO NOT EDIT.
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
@@ -22,114 +24,168 @@ import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __i64toa(out *byte, val int64) (ret int)
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __u64toa(out *byte, val uint64) (ret int)
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f64toa(out *byte, val float64) (ret int)
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f32toa(out *byte, val float32) (ret int)
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vnumber(s *string, p *int, v *types.JsonState)
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vsigned(s *string, p *int, v *types.JsonState)
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vunsigned(s *string, p *int, v *types.JsonState)
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one_fast(s *string, p *int) (ret int)
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_number(s *string, p *int) (ret int)
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8_fast(s *string) (ret int)
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,49 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package avx
var (
S_f64toa = _subr__f64toa
S_f32toa = _subr__f32toa
S_i64toa = _subr__i64toa
S_u64toa = _subr__u64toa
S_lspace = _subr__lspace
)
var (
S_quote = _subr__quote
S_unquote = _subr__unquote
)
var (
S_value = _subr__value
S_vstring = _subr__vstring
S_vnumber = _subr__vnumber
S_vsigned = _subr__vsigned
S_vunsigned = _subr__vunsigned
)
var (
S_skip_one = _subr__skip_one
S_skip_one_fast = _subr__skip_one_fast
S_skip_array = _subr__skip_array
S_skip_object = _subr__skip_object
S_skip_number = _subr__skip_number
S_get_by_path = _subr__get_by_path
)

View File

@@ -3,107 +3,601 @@
package avx
//go:nosplit
//go:noescape
//goland:noinspection ALL
func __native_entry__() uintptr
import (
`github.com/bytedance/sonic/loader`
)
var (
_subr__f32toa = __native_entry__() + 31264
_subr__f64toa = __native_entry__() + 192
_subr__get_by_path = __native_entry__() + 25856
_subr__html_escape = __native_entry__() + 9040
_subr__i64toa = __native_entry__() + 3488
_subr__lspace = __native_entry__() + 16
_subr__quote = __native_entry__() + 4880
_subr__skip_array = __native_entry__() + 17952
_subr__skip_number = __native_entry__() + 21952
_subr__skip_object = __native_entry__() + 20368
_subr__skip_one = __native_entry__() + 22112
_subr__skip_one_fast = __native_entry__() + 22352
_subr__u64toa = __native_entry__() + 3600
_subr__unquote = __native_entry__() + 6672
_subr__validate_one = __native_entry__() + 22176
_subr__validate_utf8 = __native_entry__() + 30000
_subr__validate_utf8_fast = __native_entry__() + 30672
_subr__value = __native_entry__() + 12224
_subr__vnumber = __native_entry__() + 15616
_subr__vsigned = __native_entry__() + 17232
_subr__vstring = __native_entry__() + 14064
_subr__vunsigned = __native_entry__() + 17600
const (
_entry__f32toa = 31136
_entry__f64toa = 192
_entry__format_significand = 35248
_entry__format_integer = 3040
_entry__fsm_exec = 17920
_entry__advance_string = 14320
_entry__advance_string_default = 36640
_entry__do_skip_number = 20528
_entry__get_by_path = 25680
_entry__skip_one_fast = 22160
_entry__html_escape = 8912
_entry__i64toa = 3472
_entry__u64toa = 3584
_entry__lspace = 16
_entry__quote = 4864
_entry__skip_array = 17872
_entry__skip_number = 21792
_entry__skip_object = 20160
_entry__skip_one = 21936
_entry__unquote = 6576
_entry__validate_one = 21984
_entry__validate_utf8 = 29888
_entry__validate_utf8_fast = 30560
_entry__value = 12320
_entry__vnumber = 15648
_entry__atof_eisel_lemire64 = 10160
_entry__atof_native = 11712
_entry__decimal_to_f64 = 10528
_entry__right_shift = 36208
_entry__left_shift = 35712
_entry__vsigned = 17200
_entry__vstring = 14144
_entry__vunsigned = 17520
)
const (
_stack__f32toa = 48
_stack__f64toa = 80
_stack__get_by_path = 304
_stack__html_escape = 64
_stack__format_significand = 24
_stack__format_integer = 16
_stack__fsm_exec = 168
_stack__advance_string = 64
_stack__advance_string_default = 64
_stack__do_skip_number = 48
_stack__get_by_path = 280
_stack__skip_one_fast = 176
_stack__html_escape = 72
_stack__i64toa = 16
_stack__u64toa = 8
_stack__lspace = 8
_stack__quote = 56
_stack__skip_array = 128
_stack__skip_number = 72
_stack__skip_object = 128
_stack__skip_one = 128
_stack__skip_one_fast = 200
_stack__u64toa = 8
_stack__skip_array = 176
_stack__skip_number = 88
_stack__skip_object = 176
_stack__skip_one = 176
_stack__unquote = 88
_stack__validate_one = 128
_stack__validate_one = 176
_stack__validate_utf8 = 48
_stack__validate_utf8_fast = 24
_stack__value = 328
_stack__vnumber = 240
_stack__atof_eisel_lemire64 = 32
_stack__atof_native = 136
_stack__decimal_to_f64 = 80
_stack__right_shift = 8
_stack__left_shift = 24
_stack__vsigned = 16
_stack__vstring = 136
_stack__vunsigned = 16
)
var (
_ = _subr__f32toa
_ = _subr__f64toa
_ = _subr__get_by_path
_ = _subr__html_escape
_ = _subr__i64toa
_ = _subr__lspace
_ = _subr__quote
_ = _subr__skip_array
_ = _subr__skip_number
_ = _subr__skip_object
_ = _subr__skip_one
_ = _subr__skip_one_fast
_ = _subr__u64toa
_ = _subr__unquote
_ = _subr__validate_one
_ = _subr__validate_utf8
_ = _subr__validate_utf8_fast
_ = _subr__value
_ = _subr__vnumber
_ = _subr__vsigned
_ = _subr__vstring
_ = _subr__vunsigned
_stack__vstring = 120
_stack__vunsigned = 8
)
const (
_ = _stack__f32toa
_ = _stack__f64toa
_ = _stack__get_by_path
_ = _stack__html_escape
_ = _stack__i64toa
_ = _stack__lspace
_ = _stack__quote
_ = _stack__skip_array
_ = _stack__skip_number
_ = _stack__skip_object
_ = _stack__skip_one
_ = _stack__skip_one_fast
_ = _stack__u64toa
_ = _stack__unquote
_ = _stack__validate_one
_ = _stack__validate_utf8
_ = _stack__validate_utf8_fast
_ = _stack__value
_ = _stack__vnumber
_ = _stack__vsigned
_ = _stack__vstring
_ = _stack__vunsigned
_size__f32toa = 3392
_size__f64toa = 2848
_size__format_significand = 464
_size__format_integer = 432
_size__fsm_exec = 1692
_size__advance_string = 1280
_size__advance_string_default = 944
_size__do_skip_number = 924
_size__get_by_path = 4208
_size__skip_one_fast = 3016
_size__html_escape = 1248
_size__i64toa = 48
_size__u64toa = 1232
_size__lspace = 144
_size__quote = 1696
_size__skip_array = 48
_size__skip_number = 144
_size__skip_object = 48
_size__skip_one = 48
_size__unquote = 2272
_size__validate_one = 48
_size__validate_utf8 = 672
_size__validate_utf8_fast = 544
_size__value = 1316
_size__vnumber = 1552
_size__atof_eisel_lemire64 = 368
_size__atof_native = 608
_size__decimal_to_f64 = 1184
_size__right_shift = 400
_size__left_shift = 496
_size__vsigned = 320
_size__vstring = 128
_size__vunsigned = 336
)
var (
_pcsp__f32toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{3350, 48},
{3351, 40},
{3353, 32},
{3355, 24},
{3357, 16},
{3359, 8},
{3363, 0},
{3385, 48},
}
_pcsp__f64toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2788, 56},
{2792, 48},
{2793, 40},
{2795, 32},
{2797, 24},
{2799, 16},
{2801, 8},
{2805, 0},
{2843, 56},
}
_pcsp__format_significand = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{452, 24},
{453, 16},
{455, 8},
{457, 0},
}
_pcsp__format_integer = [][2]uint32{
{1, 0},
{4, 8},
{412, 16},
{413, 8},
{414, 0},
{423, 16},
{424, 8},
{426, 0},
}
_pcsp__fsm_exec = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1342, 104},
{1346, 48},
{1347, 40},
{1349, 32},
{1351, 24},
{1353, 16},
{1355, 8},
{1356, 0},
{1692, 104},
}
_pcsp__advance_string = [][2]uint32{
{14, 0},
{18, 8},
{20, 16},
{22, 24},
{24, 32},
{26, 40},
{27, 48},
{557, 56},
{561, 48},
{562, 40},
{564, 32},
{566, 24},
{568, 16},
{570, 8},
{571, 0},
{1268, 56},
}
_pcsp__advance_string_default = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{552, 64},
{556, 48},
{557, 40},
{559, 32},
{561, 24},
{563, 16},
{565, 8},
{566, 0},
{931, 64},
}
_pcsp__do_skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{849, 48},
{850, 40},
{852, 32},
{854, 24},
{856, 16},
{858, 8},
{859, 0},
{924, 48},
}
_pcsp__get_by_path = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{4012, 104},
{4016, 48},
{4017, 40},
{4019, 32},
{4021, 24},
{4023, 16},
{4025, 8},
{4026, 0},
{4194, 104},
}
_pcsp__skip_one_fast = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{658, 176},
{659, 168},
{661, 160},
{663, 152},
{665, 144},
{667, 136},
{671, 128},
{3016, 176},
}
_pcsp__html_escape = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1224, 72},
{1228, 48},
{1229, 40},
{1231, 32},
{1233, 24},
{1235, 16},
{1237, 8},
{1239, 0},
}
_pcsp__i64toa = [][2]uint32{
{14, 0},
{34, 8},
{36, 0},
}
_pcsp__u64toa = [][2]uint32{
{1, 0},
{161, 8},
{162, 0},
{457, 8},
{458, 0},
{756, 8},
{757, 0},
{1221, 8},
{1223, 0},
}
_pcsp__lspace = [][2]uint32{
{1, 0},
{89, 8},
{90, 0},
{103, 8},
{104, 0},
{111, 8},
{113, 0},
}
_pcsp__quote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1649, 56},
{1653, 48},
{1654, 40},
{1656, 32},
{1658, 24},
{1660, 16},
{1662, 8},
{1663, 0},
{1690, 56},
}
_pcsp__skip_array = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{100, 40},
{101, 32},
{103, 24},
{105, 16},
{107, 8},
{108, 0},
{139, 40},
}
_pcsp__skip_object = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_one = [][2]uint32{
{1, 0},
{30, 8},
{36, 0},
}
_pcsp__unquote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1684, 88},
{1688, 48},
{1689, 40},
{1691, 32},
{1693, 24},
{1695, 16},
{1697, 8},
{1698, 0},
{2270, 88},
}
_pcsp__validate_one = [][2]uint32{
{1, 0},
{35, 8},
{41, 0},
}
_pcsp__validate_utf8 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{623, 48},
{627, 40},
{628, 32},
{630, 24},
{632, 16},
{634, 8},
{635, 0},
{666, 48},
}
_pcsp__validate_utf8_fast = [][2]uint32{
{1, 0},
{4, 8},
{5, 16},
{247, 24},
{251, 16},
{252, 8},
{253, 0},
{527, 24},
{531, 16},
{532, 8},
{534, 0},
}
_pcsp__value = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{495, 88},
{499, 48},
{500, 40},
{502, 32},
{504, 24},
{506, 16},
{508, 8},
{509, 0},
{1316, 88},
}
_pcsp__vnumber = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{803, 104},
{807, 48},
{808, 40},
{810, 32},
{812, 24},
{814, 16},
{816, 8},
{817, 0},
{1547, 104},
}
_pcsp__atof_eisel_lemire64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{292, 32},
{293, 24},
{295, 16},
{297, 8},
{298, 0},
{362, 32},
}
_pcsp__atof_native = [][2]uint32{
{1, 0},
{4, 8},
{587, 56},
{591, 8},
{593, 0},
}
_pcsp__decimal_to_f64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1144, 56},
{1148, 48},
{1149, 40},
{1151, 32},
{1153, 24},
{1155, 16},
{1157, 8},
{1158, 0},
{1169, 56},
}
_pcsp__right_shift = [][2]uint32{
{1, 0},
{318, 8},
{319, 0},
{387, 8},
{388, 0},
{396, 8},
{398, 0},
}
_pcsp__left_shift = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{363, 24},
{364, 16},
{366, 8},
{367, 0},
{470, 24},
{471, 16},
{473, 8},
{474, 0},
{486, 24},
}
_pcsp__vsigned = [][2]uint32{
{1, 0},
{4, 8},
{112, 16},
{113, 8},
{114, 0},
{125, 16},
{126, 8},
{127, 0},
{260, 16},
{261, 8},
{262, 0},
{266, 16},
{267, 8},
{268, 0},
{306, 16},
{307, 8},
{308, 0},
{316, 16},
{317, 8},
{319, 0},
}
_pcsp__vstring = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{105, 56},
{109, 40},
{110, 32},
{112, 24},
{114, 16},
{116, 8},
{118, 0},
}
_pcsp__vunsigned = [][2]uint32{
{1, 0},
{71, 8},
{72, 0},
{83, 8},
{84, 0},
{107, 8},
{108, 0},
{273, 8},
{274, 0},
{312, 8},
{313, 0},
{320, 8},
{322, 0},
}
)
var Funcs = []loader.CFunc{
{"__native_entry__", 0, 67, 0, nil},
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
}

View File

@@ -1,5 +1,7 @@
// Code generated by Makefile, DO NOT EDIT.
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
@@ -22,114 +24,168 @@ import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __i64toa(out *byte, val int64) (ret int)
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __u64toa(out *byte, val uint64) (ret int)
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f64toa(out *byte, val float64) (ret int)
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f32toa(out *byte, val float32) (ret int)
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vnumber(s *string, p *int, v *types.JsonState)
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vsigned(s *string, p *int, v *types.JsonState)
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vunsigned(s *string, p *int, v *types.JsonState)
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one_fast(s *string, p *int) (ret int)
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_number(s *string, p *int) (ret int)
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8_fast(s *string) (ret int)
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,49 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package avx2
var (
S_f64toa = _subr__f64toa
S_f32toa = _subr__f32toa
S_i64toa = _subr__i64toa
S_u64toa = _subr__u64toa
S_lspace = _subr__lspace
)
var (
S_quote = _subr__quote
S_unquote = _subr__unquote
)
var (
S_value = _subr__value
S_vstring = _subr__vstring
S_vnumber = _subr__vnumber
S_vsigned = _subr__vsigned
S_vunsigned = _subr__vunsigned
)
var (
S_skip_one = _subr__skip_one
S_skip_one_fast = _subr__skip_one_fast
S_skip_array = _subr__skip_array
S_skip_object = _subr__skip_object
S_skip_number = _subr__skip_number
S_get_by_path = _subr__get_by_path
)

View File

@@ -3,107 +3,608 @@
package avx2
//go:nosplit
//go:noescape
//goland:noinspection ALL
func __native_entry__() uintptr
import (
`github.com/bytedance/sonic/loader`
)
var (
_subr__f32toa = __native_entry__() + 33888
_subr__f64toa = __native_entry__() + 288
_subr__get_by_path = __native_entry__() + 28336
_subr__html_escape = __native_entry__() + 10496
_subr__i64toa = __native_entry__() + 3584
_subr__lspace = __native_entry__() + 64
_subr__quote = __native_entry__() + 5072
_subr__skip_array = __native_entry__() + 20688
_subr__skip_number = __native_entry__() + 24912
_subr__skip_object = __native_entry__() + 22736
_subr__skip_one = __native_entry__() + 25072
_subr__skip_one_fast = __native_entry__() + 25488
_subr__u64toa = __native_entry__() + 3696
_subr__unquote = __native_entry__() + 7888
_subr__validate_one = __native_entry__() + 25136
_subr__validate_utf8 = __native_entry__() + 30320
_subr__validate_utf8_fast = __native_entry__() + 31280
_subr__value = __native_entry__() + 15024
_subr__vnumber = __native_entry__() + 18352
_subr__vsigned = __native_entry__() + 19968
_subr__vstring = __native_entry__() + 17024
_subr__vunsigned = __native_entry__() + 20336
const (
_entry__f32toa = 34624
_entry__f64toa = 320
_entry__format_significand = 38736
_entry__format_integer = 3168
_entry__fsm_exec = 21072
_entry__advance_ns = 16928
_entry__advance_string = 17664
_entry__advance_string_default = 40160
_entry__do_skip_number = 23696
_entry__get_by_path = 28864
_entry__skip_one_fast = 25936
_entry__html_escape = 10560
_entry__i64toa = 3600
_entry__u64toa = 3712
_entry__lspace = 64
_entry__quote = 5104
_entry__skip_array = 21024
_entry__skip_number = 25392
_entry__skip_object = 23088
_entry__skip_one = 25536
_entry__unquote = 7888
_entry__validate_one = 25584
_entry__validate_utf8 = 31040
_entry__validate_utf8_fast = 31984
_entry__value = 15376
_entry__vnumber = 18800
_entry__atof_eisel_lemire64 = 12624
_entry__atof_native = 14768
_entry__decimal_to_f64 = 13056
_entry__right_shift = 39696
_entry__left_shift = 39200
_entry__vsigned = 20352
_entry__vstring = 17424
_entry__vunsigned = 20672
)
const (
_stack__f32toa = 48
_stack__f64toa = 80
_stack__get_by_path = 296
_stack__format_significand = 24
_stack__format_integer = 16
_stack__fsm_exec = 144
_stack__advance_ns = 8
_stack__advance_string = 56
_stack__advance_string_default = 48
_stack__do_skip_number = 48
_stack__get_by_path = 272
_stack__skip_one_fast = 184
_stack__html_escape = 72
_stack__i64toa = 16
_stack__u64toa = 8
_stack__lspace = 8
_stack__quote = 56
_stack__skip_array = 128
_stack__skip_number = 72
_stack__skip_object = 128
_stack__skip_one = 128
_stack__skip_one_fast = 208
_stack__u64toa = 8
_stack__skip_array = 152
_stack__skip_number = 88
_stack__skip_object = 152
_stack__skip_one = 152
_stack__unquote = 72
_stack__validate_one = 128
_stack__validate_one = 152
_stack__validate_utf8 = 48
_stack__validate_utf8_fast = 176
_stack__value = 328
_stack__vnumber = 240
_stack__atof_eisel_lemire64 = 32
_stack__atof_native = 136
_stack__decimal_to_f64 = 80
_stack__right_shift = 8
_stack__left_shift = 24
_stack__vsigned = 16
_stack__vstring = 112
_stack__vunsigned = 16
)
var (
_ = _subr__f32toa
_ = _subr__f64toa
_ = _subr__get_by_path
_ = _subr__html_escape
_ = _subr__i64toa
_ = _subr__lspace
_ = _subr__quote
_ = _subr__skip_array
_ = _subr__skip_number
_ = _subr__skip_object
_ = _subr__skip_one
_ = _subr__skip_one_fast
_ = _subr__u64toa
_ = _subr__unquote
_ = _subr__validate_one
_ = _subr__validate_utf8
_ = _subr__validate_utf8_fast
_ = _subr__value
_ = _subr__vnumber
_ = _subr__vsigned
_ = _subr__vstring
_ = _subr__vunsigned
_stack__vunsigned = 8
)
const (
_ = _stack__f32toa
_ = _stack__f64toa
_ = _stack__get_by_path
_ = _stack__html_escape
_ = _stack__i64toa
_ = _stack__lspace
_ = _stack__quote
_ = _stack__skip_array
_ = _stack__skip_number
_ = _stack__skip_object
_ = _stack__skip_one
_ = _stack__skip_one_fast
_ = _stack__u64toa
_ = _stack__unquote
_ = _stack__validate_one
_ = _stack__validate_utf8
_ = _stack__validate_utf8_fast
_ = _stack__value
_ = _stack__vnumber
_ = _stack__vsigned
_ = _stack__vstring
_ = _stack__vunsigned
_size__f32toa = 3392
_size__f64toa = 2848
_size__format_significand = 464
_size__format_integer = 432
_size__fsm_exec = 1468
_size__advance_ns = 496
_size__advance_string = 1088
_size__advance_string_default = 768
_size__do_skip_number = 1360
_size__get_by_path = 2176
_size__skip_one_fast = 2428
_size__html_escape = 2064
_size__i64toa = 48
_size__u64toa = 1248
_size__lspace = 224
_size__quote = 2736
_size__skip_array = 48
_size__skip_number = 144
_size__skip_object = 48
_size__skip_one = 48
_size__unquote = 2480
_size__validate_one = 48
_size__validate_utf8 = 672
_size__validate_utf8_fast = 2608
_size__value = 1004
_size__vnumber = 1552
_size__atof_eisel_lemire64 = 368
_size__atof_native = 608
_size__decimal_to_f64 = 1712
_size__right_shift = 400
_size__left_shift = 496
_size__vsigned = 320
_size__vstring = 144
_size__vunsigned = 336
)
var (
_pcsp__f32toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{3350, 48},
{3351, 40},
{3353, 32},
{3355, 24},
{3357, 16},
{3359, 8},
{3363, 0},
{3385, 48},
}
_pcsp__f64toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2788, 56},
{2792, 48},
{2793, 40},
{2795, 32},
{2797, 24},
{2799, 16},
{2801, 8},
{2805, 0},
{2843, 56},
}
_pcsp__format_significand = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{452, 24},
{453, 16},
{455, 8},
{457, 0},
}
_pcsp__format_integer = [][2]uint32{
{1, 0},
{4, 8},
{412, 16},
{413, 8},
{414, 0},
{423, 16},
{424, 8},
{426, 0},
}
_pcsp__fsm_exec = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1157, 88},
{1161, 48},
{1162, 40},
{1164, 32},
{1166, 24},
{1168, 16},
{1170, 8},
{1171, 0},
{1468, 88},
}
_pcsp__advance_ns = [][2]uint32{
{1, 0},
{453, 8},
{457, 0},
{481, 8},
{486, 0},
}
_pcsp__advance_string = [][2]uint32{
{14, 0},
{18, 8},
{20, 16},
{22, 24},
{24, 32},
{26, 40},
{27, 48},
{433, 56},
{437, 48},
{438, 40},
{440, 32},
{442, 24},
{444, 16},
{446, 8},
{450, 0},
{1078, 56},
}
_pcsp__advance_string_default = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{332, 48},
{333, 40},
{335, 32},
{337, 24},
{339, 16},
{341, 8},
{345, 0},
{757, 48},
}
_pcsp__do_skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{1274, 48},
{1275, 40},
{1277, 32},
{1279, 24},
{1281, 16},
{1283, 8},
{1287, 0},
{1360, 48},
}
_pcsp__get_by_path = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2049, 88},
{2053, 48},
{2054, 40},
{2056, 32},
{2058, 24},
{2060, 16},
{2062, 8},
{2063, 0},
{2170, 88},
}
_pcsp__skip_one_fast = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{418, 176},
{419, 168},
{421, 160},
{423, 152},
{425, 144},
{427, 136},
{431, 128},
{2428, 176},
}
_pcsp__html_escape = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2045, 72},
{2049, 48},
{2050, 40},
{2052, 32},
{2054, 24},
{2056, 16},
{2058, 8},
{2063, 0},
}
_pcsp__i64toa = [][2]uint32{
{14, 0},
{34, 8},
{36, 0},
}
_pcsp__u64toa = [][2]uint32{
{1, 0},
{161, 8},
{162, 0},
{457, 8},
{458, 0},
{758, 8},
{759, 0},
{1225, 8},
{1227, 0},
}
_pcsp__lspace = [][2]uint32{
{1, 0},
{184, 8},
{188, 0},
{204, 8},
{208, 0},
{215, 8},
{220, 0},
}
_pcsp__quote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2687, 56},
{2691, 48},
{2692, 40},
{2694, 32},
{2696, 24},
{2698, 16},
{2700, 8},
{2704, 0},
{2731, 56},
}
_pcsp__skip_array = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{100, 40},
{101, 32},
{103, 24},
{105, 16},
{107, 8},
{108, 0},
{139, 40},
}
_pcsp__skip_object = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_one = [][2]uint32{
{1, 0},
{30, 8},
{36, 0},
}
_pcsp__unquote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{79, 72},
{83, 48},
{84, 40},
{86, 32},
{88, 24},
{90, 16},
{92, 8},
{96, 0},
{2464, 72},
}
_pcsp__validate_one = [][2]uint32{
{1, 0},
{35, 8},
{41, 0},
}
_pcsp__validate_utf8 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{623, 48},
{627, 40},
{628, 32},
{630, 24},
{632, 16},
{634, 8},
{635, 0},
{666, 48},
}
_pcsp__validate_utf8_fast = [][2]uint32{
{1, 0},
{4, 8},
{5, 16},
{1738, 176},
{1739, 168},
{1743, 160},
{2018, 176},
{2019, 168},
{2023, 160},
{2600, 176},
}
_pcsp__value = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{988, 88},
{992, 48},
{993, 40},
{995, 32},
{997, 24},
{999, 16},
{1001, 8},
{1004, 0},
}
_pcsp__vnumber = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{803, 104},
{807, 48},
{808, 40},
{810, 32},
{812, 24},
{814, 16},
{816, 8},
{817, 0},
{1547, 104},
}
_pcsp__atof_eisel_lemire64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{292, 32},
{293, 24},
{295, 16},
{297, 8},
{298, 0},
{362, 32},
}
_pcsp__atof_native = [][2]uint32{
{1, 0},
{4, 8},
{587, 56},
{591, 8},
{593, 0},
}
_pcsp__decimal_to_f64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1673, 56},
{1677, 48},
{1678, 40},
{1680, 32},
{1682, 24},
{1684, 16},
{1686, 8},
{1690, 0},
{1702, 56},
}
_pcsp__right_shift = [][2]uint32{
{1, 0},
{318, 8},
{319, 0},
{387, 8},
{388, 0},
{396, 8},
{398, 0},
}
_pcsp__left_shift = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{363, 24},
{364, 16},
{366, 8},
{367, 0},
{470, 24},
{471, 16},
{473, 8},
{474, 0},
{486, 24},
}
_pcsp__vsigned = [][2]uint32{
{1, 0},
{4, 8},
{112, 16},
{113, 8},
{114, 0},
{125, 16},
{126, 8},
{127, 0},
{260, 16},
{261, 8},
{262, 0},
{266, 16},
{267, 8},
{268, 0},
{306, 16},
{307, 8},
{308, 0},
{316, 16},
{317, 8},
{319, 0},
}
_pcsp__vstring = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{105, 56},
{109, 40},
{110, 32},
{112, 24},
{114, 16},
{116, 8},
{118, 0},
}
_pcsp__vunsigned = [][2]uint32{
{1, 0},
{71, 8},
{72, 0},
{83, 8},
{84, 0},
{107, 8},
{108, 0},
{273, 8},
{274, 0},
{312, 8},
{313, 0},
{320, 8},
{322, 0},
}
)
var Funcs = []loader.CFunc{
{"__native_entry__", 0, 67, 0, nil},
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
{"_advance_ns", _entry__advance_ns, _size__advance_ns, _stack__advance_ns, _pcsp__advance_ns},
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
}

View File

@@ -24,6 +24,8 @@ import (
`github.com/bytedance/sonic/internal/native/avx2`
`github.com/bytedance/sonic/internal/native/sse`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/loader`
)
const (
@@ -40,8 +42,9 @@ var (
)
var (
S_quote uintptr
S_unquote uintptr
S_quote uintptr
S_unquote uintptr
S_html_escape uintptr
)
var (
@@ -53,140 +56,148 @@ var (
)
var (
S_skip_one uintptr
S_skip_one_fast uintptr
S_get_by_path uintptr
S_skip_array uintptr
S_skip_object uintptr
S_skip_number uintptr
S_skip_one uintptr
S_skip_one_fast uintptr
S_get_by_path uintptr
S_skip_array uintptr
S_skip_object uintptr
S_skip_number uintptr
)
var (
S_validate_one uintptr
S_validate_utf8 uintptr
S_validate_utf8_fast uintptr
)
var (
__Quote func(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) int
__Unquote func(s unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) int
__HTMLEscape func(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) int
__Value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) int
__SkipOne func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) int
__SkipOneFast func(s unsafe.Pointer, p unsafe.Pointer) int
__GetByPath func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) int
__ValidateOne func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) int
__I64toa func(out unsafe.Pointer, val int64) (ret int)
__U64toa func(out unsafe.Pointer, val uint64) (ret int)
__F64toa func(out unsafe.Pointer, val float64) (ret int)
__ValidateUTF8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__ValidateUTF8Fast func(s unsafe.Pointer) (ret int)
)
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func Quote(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) int
func Quote(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) int {
return __Quote(rt.NoEscape(unsafe.Pointer(s)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func Unquote(s unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) int
func Unquote(s unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) int {
return __Unquote(rt.NoEscape(unsafe.Pointer(s)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func HTMLEscape(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) int
func HTMLEscape(s unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) int {
return __HTMLEscape(rt.NoEscape(unsafe.Pointer(s)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func Value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) int
func Value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) int {
return __Value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func SkipOne(s *string, p *int, m *types.StateMachine, flags uint64) int
func SkipOne(s *string, p *int, m *types.StateMachine, flags uint64) int {
return __SkipOne(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func SkipOneFast(s *string, p *int) int
func SkipOneFast(s *string, p *int) int {
return __SkipOneFast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func GetByPath(s *string, p *int, path *[]interface{}, m *types.StateMachine) int
func GetByPath(s *string, p *int, path *[]interface{}, m *types.StateMachine) int {
return __GetByPath(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func ValidateOne(s *string, p *int, m *types.StateMachine) int
func ValidateOne(s *string, p *int, m *types.StateMachine) int {
return __ValidateOne(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func I64toa(out *byte, val int64) (ret int)
func I64toa(out *byte, val int64) (ret int) {
return __I64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func U64toa(out *byte, val uint64) (ret int)
func U64toa(out *byte, val uint64) (ret int) {
return __U64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func F64toa(out *byte, val float64) (ret int)
func F64toa(out *byte, val float64) (ret int) {
return __F64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func ValidateUTF8(s *string, p *int, m *types.StateMachine) (ret int)
func ValidateUTF8(s *string, p *int, m *types.StateMachine) (ret int) {
return __ValidateUTF8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func ValidateUTF8Fast(s *string) (ret int)
func ValidateUTF8Fast(s *string) (ret int) {
return __ValidateUTF8Fast(rt.NoEscape(unsafe.Pointer(s)))
}
var stubs = []loader.GoC{
{"_f64toa", &S_f64toa, &__F64toa},
{"_f32toa", &S_f32toa, nil},
{"_i64toa", &S_i64toa, &__I64toa},
{"_u64toa", &S_u64toa, &__U64toa},
{"_lspace", &S_lspace, nil},
{"_quote", &S_quote, &__Quote},
{"_unquote", &S_unquote, &__Unquote},
{"_html_escape", &S_html_escape, &__HTMLEscape},
{"_value", &S_value, &__Value},
{"_vstring", &S_vstring, nil},
{"_vnumber", &S_vnumber, nil},
{"_vsigned", &S_vsigned, nil},
{"_vunsigned", &S_vunsigned, nil},
{"_skip_one", &S_skip_one, &__SkipOne},
{"_skip_one_fast", &S_skip_one_fast, &__SkipOneFast},
{"_get_by_path", &S_get_by_path, &__GetByPath},
{"_skip_array", &S_skip_array, nil},
{"_skip_object", &S_skip_object, nil},
{"_skip_number", &S_skip_number, nil},
{"_validate_one", &S_validate_one, &__ValidateOne},
{"_validate_utf8", &S_validate_utf8, &__ValidateUTF8},
{"_validate_utf8_fast", &S_validate_utf8_fast, &__ValidateUTF8Fast},
}
func useAVX() {
S_f64toa = avx.S_f64toa
S_f32toa = avx.S_f32toa
S_i64toa = avx.S_i64toa
S_u64toa = avx.S_u64toa
S_lspace = avx.S_lspace
S_quote = avx.S_quote
S_unquote = avx.S_unquote
S_value = avx.S_value
S_vstring = avx.S_vstring
S_vnumber = avx.S_vnumber
S_vsigned = avx.S_vsigned
S_vunsigned = avx.S_vunsigned
S_skip_one = avx.S_skip_one
S_skip_one_fast = avx.S_skip_one_fast
S_skip_array = avx.S_skip_array
S_skip_object = avx.S_skip_object
S_skip_number = avx.S_skip_number
S_get_by_path = avx.S_get_by_path
loader.WrapGoC(avx.Text__native_entry__, avx.Funcs, stubs, "avx", "avx/native.c")
}
func useAVX2() {
S_f64toa = avx2.S_f64toa
S_f32toa = avx2.S_f32toa
S_i64toa = avx2.S_i64toa
S_u64toa = avx2.S_u64toa
S_lspace = avx2.S_lspace
S_quote = avx2.S_quote
S_unquote = avx2.S_unquote
S_value = avx2.S_value
S_vstring = avx2.S_vstring
S_vnumber = avx2.S_vnumber
S_vsigned = avx2.S_vsigned
S_vunsigned = avx2.S_vunsigned
S_skip_one = avx2.S_skip_one
S_skip_one_fast = avx2.S_skip_one_fast
S_skip_array = avx2.S_skip_array
S_skip_object = avx2.S_skip_object
S_skip_number = avx2.S_skip_number
S_get_by_path = avx2.S_get_by_path
loader.WrapGoC(avx2.Text__native_entry__, avx2.Funcs, stubs, "avx2", "avx2/native.c")
}
func useSSE() {
S_f64toa = sse.S_f64toa
S_f32toa = sse.S_f32toa
S_i64toa = sse.S_i64toa
S_u64toa = sse.S_u64toa
S_lspace = sse.S_lspace
S_quote = sse.S_quote
S_unquote = sse.S_unquote
S_value = sse.S_value
S_vstring = sse.S_vstring
S_vnumber = sse.S_vnumber
S_vsigned = sse.S_vsigned
S_vunsigned = sse.S_vunsigned
S_skip_one = sse.S_skip_one
S_skip_one_fast = sse.S_skip_one_fast
S_skip_array = sse.S_skip_array
S_skip_object = sse.S_skip_object
S_skip_number = sse.S_skip_number
S_get_by_path = sse.S_get_by_path
loader.WrapGoC(sse.Text__native_entry__, sse.Funcs, stubs, "sse", "sse/native.c")
}
func init() {

View File

@@ -1,137 +0,0 @@
//
// Copyright 2021 ByteDance Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include "go_asm.h"
#include "funcdata.h"
#include "textflag.h"
TEXT ·Quote(SB), NOSPLIT, $0 - 48
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__quote(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__quote(SB)
JMP github·combytedancesonicinternalnativesse·__quote(SB)
TEXT ·Unquote(SB), NOSPLIT, $0 - 48
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__unquote(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__unquote(SB)
JMP github·combytedancesonicinternalnativesse·__unquote(SB)
TEXT ·HTMLEscape(SB), NOSPLIT, $0 - 40
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__html_escape(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__html_escape(SB)
JMP github·combytedancesonicinternalnativesse·__html_escape(SB)
TEXT ·Value(SB), NOSPLIT, $0 - 48
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__value(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__value(SB)
JMP github·combytedancesonicinternalnativesse·__value(SB)
TEXT ·SkipOne(SB), NOSPLIT, $0 - 40
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__skip_one(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__skip_one(SB)
JMP github·combytedancesonicinternalnativesse·__skip_one(SB)
TEXT ·SkipOneFast(SB), NOSPLIT, $0 - 24
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__skip_one_fast(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__skip_one_fast(SB)
JMP github·combytedancesonicinternalnativesse·__skip_one_fast(SB)
TEXT ·GetByPath(SB), NOSPLIT, $0 - 40
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__get_by_path(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__get_by_path(SB)
JMP github·combytedancesonicinternalnativesse·__get_by_path(SB)
TEXT ·ValidateOne(SB), NOSPLIT, $0 - 32
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__validate_one(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__validate_one(SB)
JMP github·combytedancesonicinternalnativesse·__validate_one(SB)
TEXT ·ValidateUTF8(SB), NOSPLIT, $0 - 40
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__validate_utf8(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__validate_utf8(SB)
JMP github·combytedancesonicinternalnativesse·__validate_utf8(SB)
TEXT ·ValidateUTF8Fast(SB), NOSPLIT, $0 - 16
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__validate_utf8_fast(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__validate_utf8_fast(SB)
JMP github·combytedancesonicinternalnativesse·__validate_utf8_fast(SB)
TEXT ·I64toa(SB), NOSPLIT, $0 - 32
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__i64toa(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__i64toa(SB)
JMP github·combytedancesonicinternalnativesse·__i64toa(SB)
TEXT ·U64toa(SB), NOSPLIT, $0 - 32
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__u64toa(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__u64toa(SB)
JMP github·combytedancesonicinternalnativesse·__u64toa(SB)
TEXT ·F64toa(SB), NOSPLIT, $0 - 32
CMPB github·combytedancesonicinternalcpu·HasAVX2(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx2·__f64toa(SB)
CMPB github·combytedancesonicinternalcpu·HasAVX(SB), $0
JE 2(PC)
JMP github·combytedancesonicinternalnativeavx·__f64toa(SB)
JMP github·combytedancesonicinternalnativesse·__f64toa(SB)

View File

@@ -1,3 +1,5 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
@@ -17,35 +19,35 @@
package {{PACKAGE}}
import (
`encoding/json`
`math`
`math/rand`
`strconv`
`testing`
`math/rand`
`encoding/json`
`github.com/stretchr/testify/assert`
)
func TestFastFloat_Encode(t *testing.T) {
var buf [64]byte
assert.Equal(t, "0" , string(buf[:__f64toa(&buf[0], 0)]))
assert.Equal(t, "-0" , string(buf[:__f64toa(&buf[0], math.Float64frombits(0x8000000000000000))]))
assert.Equal(t, "12340000000" , string(buf[:__f64toa(&buf[0], 1234e7)]))
assert.Equal(t, "12.34" , string(buf[:__f64toa(&buf[0], 1234e-2)]))
assert.Equal(t, "0.001234" , string(buf[:__f64toa(&buf[0], 1234e-6)]))
assert.Equal(t, "1e+30" , string(buf[:__f64toa(&buf[0], 1e30)]))
assert.Equal(t, "1.234e+33" , string(buf[:__f64toa(&buf[0], 1234e30)]))
assert.Equal(t, "1.234e+308" , string(buf[:__f64toa(&buf[0], 1234e305)]))
assert.Equal(t, "1.234e-317" , string(buf[:__f64toa(&buf[0], 1234e-320)]))
assert.Equal(t, "1.7976931348623157e+308" , string(buf[:__f64toa(&buf[0], 1.7976931348623157e308)]))
assert.Equal(t, "-12340000000" , string(buf[:__f64toa(&buf[0], -1234e7)]))
assert.Equal(t, "-12.34" , string(buf[:__f64toa(&buf[0], -1234e-2)]))
assert.Equal(t, "-0.001234" , string(buf[:__f64toa(&buf[0], -1234e-6)]))
assert.Equal(t, "-1e+30" , string(buf[:__f64toa(&buf[0], -1e30)]))
assert.Equal(t, "-1.234e+33" , string(buf[:__f64toa(&buf[0], -1234e30)]))
assert.Equal(t, "-1.234e+308" , string(buf[:__f64toa(&buf[0], -1234e305)]))
assert.Equal(t, "-1.234e-317" , string(buf[:__f64toa(&buf[0], -1234e-320)]))
assert.Equal(t, "-2.2250738585072014e-308" , string(buf[:__f64toa(&buf[0], -2.2250738585072014e-308)]))
assert.Equal(t, "0" , string(buf[:f64toa(&buf[0], 0)]))
assert.Equal(t, "-0" , string(buf[:f64toa(&buf[0], math.Float64frombits(0x8000000000000000))]))
assert.Equal(t, "12340000000" , string(buf[:f64toa(&buf[0], 1234e7)]))
assert.Equal(t, "12.34" , string(buf[:f64toa(&buf[0], 1234e-2)]))
assert.Equal(t, "0.001234" , string(buf[:f64toa(&buf[0], 1234e-6)]))
assert.Equal(t, "1e+30" , string(buf[:f64toa(&buf[0], 1e30)]))
assert.Equal(t, "1.234e+33" , string(buf[:f64toa(&buf[0], 1234e30)]))
assert.Equal(t, "1.234e+308" , string(buf[:f64toa(&buf[0], 1234e305)]))
assert.Equal(t, "1.234e-317" , string(buf[:f64toa(&buf[0], 1234e-320)]))
assert.Equal(t, "1.7976931348623157e+308" , string(buf[:f64toa(&buf[0], 1.7976931348623157e308)]))
assert.Equal(t, "-12340000000" , string(buf[:f64toa(&buf[0], -1234e7)]))
assert.Equal(t, "-12.34" , string(buf[:f64toa(&buf[0], -1234e-2)]))
assert.Equal(t, "-0.001234" , string(buf[:f64toa(&buf[0], -1234e-6)]))
assert.Equal(t, "-1e+30" , string(buf[:f64toa(&buf[0], -1e30)]))
assert.Equal(t, "-1.234e+33" , string(buf[:f64toa(&buf[0], -1234e30)]))
assert.Equal(t, "-1.234e+308" , string(buf[:f64toa(&buf[0], -1234e305)]))
assert.Equal(t, "-1.234e-317" , string(buf[:f64toa(&buf[0], -1234e-320)]))
assert.Equal(t, "-2.2250738585072014e-308" , string(buf[:f64toa(&buf[0], -2.2250738585072014e-308)]))
}
func TestFastFloat_Random(t *testing.T) {
@@ -56,7 +58,7 @@ func TestFastFloat_Random(t *testing.T) {
f64 := math.Float64frombits(b64)
jout, jerr := json.Marshal(f64)
n := __f64toa(&buf[0], f64)
n := f64toa(&buf[0], f64)
if jerr == nil {
assert.Equal(t, jout, buf[:n])
} else {
@@ -65,7 +67,7 @@ func TestFastFloat_Random(t *testing.T) {
f32 := math.Float32frombits(rand.Uint32())
jout, jerr = json.Marshal(f32)
n = __f32toa(&buf[0], f32)
n = f32toa(&buf[0], f32)
if jerr == nil {
assert.Equal(t, jout, buf[:n])
} else {
@@ -97,7 +99,7 @@ func BenchmarkParseFloat64(b *testing.B) {
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { strconv.AppendFloat(buf[:0], c.float, 'g', -1, 64) }},
}, {
name: "FastFloat",
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { __f64toa(&buf[0], c.float) }},
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { f64toa(&buf[0], c.float) }},
}}
for _, bm := range f64bench {
name := bm.name + "_" + c.name
@@ -128,11 +130,11 @@ func BenchmarkParseFloat32(b *testing.B) {
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { strconv.AppendFloat(buf[:0], float64(c.float), 'g', -1, 32) }},
}, {
name: "FastFloat32",
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { __f32toa(&buf[0], c.float) }},
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { f32toa(&buf[0], c.float) }},
}}
for _, bm := range bench {
name := bm.name + "_" + c.name
b.Run(name, bm.test)
}
}
}
}

View File

@@ -1,3 +1,5 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
@@ -26,126 +28,126 @@ import (
func TestFastInt_IntToString(t *testing.T) {
var buf [32]byte
assert.Equal(t, "0" , string(buf[:__i64toa(&buf[0], 0)]))
assert.Equal(t, "1" , string(buf[:__i64toa(&buf[0], 1)]))
assert.Equal(t, "12" , string(buf[:__i64toa(&buf[0], 12)]))
assert.Equal(t, "123" , string(buf[:__i64toa(&buf[0], 123)]))
assert.Equal(t, "1234" , string(buf[:__i64toa(&buf[0], 1234)]))
assert.Equal(t, "12345" , string(buf[:__i64toa(&buf[0], 12345)]))
assert.Equal(t, "123456" , string(buf[:__i64toa(&buf[0], 123456)]))
assert.Equal(t, "1234567" , string(buf[:__i64toa(&buf[0], 1234567)]))
assert.Equal(t, "12345678" , string(buf[:__i64toa(&buf[0], 12345678)]))
assert.Equal(t, "123456789" , string(buf[:__i64toa(&buf[0], 123456789)]))
assert.Equal(t, "1234567890" , string(buf[:__i64toa(&buf[0], 1234567890)]))
assert.Equal(t, "12345678901" , string(buf[:__i64toa(&buf[0], 12345678901)]))
assert.Equal(t, "123456789012" , string(buf[:__i64toa(&buf[0], 123456789012)]))
assert.Equal(t, "1234567890123" , string(buf[:__i64toa(&buf[0], 1234567890123)]))
assert.Equal(t, "12345678901234" , string(buf[:__i64toa(&buf[0], 12345678901234)]))
assert.Equal(t, "123456789012345" , string(buf[:__i64toa(&buf[0], 123456789012345)]))
assert.Equal(t, "1234567890123456" , string(buf[:__i64toa(&buf[0], 1234567890123456)]))
assert.Equal(t, "12345678901234567" , string(buf[:__i64toa(&buf[0], 12345678901234567)]))
assert.Equal(t, "123456789012345678" , string(buf[:__i64toa(&buf[0], 123456789012345678)]))
assert.Equal(t, "1234567890123456789" , string(buf[:__i64toa(&buf[0], 1234567890123456789)]))
assert.Equal(t, "9223372036854775807" , string(buf[:__i64toa(&buf[0], 9223372036854775807)]))
assert.Equal(t, "-1" , string(buf[:__i64toa(&buf[0], -1)]))
assert.Equal(t, "-12" , string(buf[:__i64toa(&buf[0], -12)]))
assert.Equal(t, "-123" , string(buf[:__i64toa(&buf[0], -123)]))
assert.Equal(t, "-1234" , string(buf[:__i64toa(&buf[0], -1234)]))
assert.Equal(t, "-12345" , string(buf[:__i64toa(&buf[0], -12345)]))
assert.Equal(t, "-123456" , string(buf[:__i64toa(&buf[0], -123456)]))
assert.Equal(t, "-1234567" , string(buf[:__i64toa(&buf[0], -1234567)]))
assert.Equal(t, "-12345678" , string(buf[:__i64toa(&buf[0], -12345678)]))
assert.Equal(t, "-123456789" , string(buf[:__i64toa(&buf[0], -123456789)]))
assert.Equal(t, "-1234567890" , string(buf[:__i64toa(&buf[0], -1234567890)]))
assert.Equal(t, "-12345678901" , string(buf[:__i64toa(&buf[0], -12345678901)]))
assert.Equal(t, "-123456789012" , string(buf[:__i64toa(&buf[0], -123456789012)]))
assert.Equal(t, "-1234567890123" , string(buf[:__i64toa(&buf[0], -1234567890123)]))
assert.Equal(t, "-12345678901234" , string(buf[:__i64toa(&buf[0], -12345678901234)]))
assert.Equal(t, "-123456789012345" , string(buf[:__i64toa(&buf[0], -123456789012345)]))
assert.Equal(t, "-1234567890123456" , string(buf[:__i64toa(&buf[0], -1234567890123456)]))
assert.Equal(t, "-12345678901234567" , string(buf[:__i64toa(&buf[0], -12345678901234567)]))
assert.Equal(t, "-123456789012345678" , string(buf[:__i64toa(&buf[0], -123456789012345678)]))
assert.Equal(t, "-1234567890123456789" , string(buf[:__i64toa(&buf[0], -1234567890123456789)]))
assert.Equal(t, "-9223372036854775808" , string(buf[:__i64toa(&buf[0], -9223372036854775808)]))
assert.Equal(t, "0" , string(buf[:i64toa(&buf[0], 0)]))
assert.Equal(t, "1" , string(buf[:i64toa(&buf[0], 1)]))
assert.Equal(t, "12" , string(buf[:i64toa(&buf[0], 12)]))
assert.Equal(t, "123" , string(buf[:i64toa(&buf[0], 123)]))
assert.Equal(t, "1234" , string(buf[:i64toa(&buf[0], 1234)]))
assert.Equal(t, "12345" , string(buf[:i64toa(&buf[0], 12345)]))
assert.Equal(t, "123456" , string(buf[:i64toa(&buf[0], 123456)]))
assert.Equal(t, "1234567" , string(buf[:i64toa(&buf[0], 1234567)]))
assert.Equal(t, "12345678" , string(buf[:i64toa(&buf[0], 12345678)]))
assert.Equal(t, "123456789" , string(buf[:i64toa(&buf[0], 123456789)]))
assert.Equal(t, "1234567890" , string(buf[:i64toa(&buf[0], 1234567890)]))
assert.Equal(t, "12345678901" , string(buf[:i64toa(&buf[0], 12345678901)]))
assert.Equal(t, "123456789012" , string(buf[:i64toa(&buf[0], 123456789012)]))
assert.Equal(t, "1234567890123" , string(buf[:i64toa(&buf[0], 1234567890123)]))
assert.Equal(t, "12345678901234" , string(buf[:i64toa(&buf[0], 12345678901234)]))
assert.Equal(t, "123456789012345" , string(buf[:i64toa(&buf[0], 123456789012345)]))
assert.Equal(t, "1234567890123456" , string(buf[:i64toa(&buf[0], 1234567890123456)]))
assert.Equal(t, "12345678901234567" , string(buf[:i64toa(&buf[0], 12345678901234567)]))
assert.Equal(t, "123456789012345678" , string(buf[:i64toa(&buf[0], 123456789012345678)]))
assert.Equal(t, "1234567890123456789" , string(buf[:i64toa(&buf[0], 1234567890123456789)]))
assert.Equal(t, "9223372036854775807" , string(buf[:i64toa(&buf[0], 9223372036854775807)]))
assert.Equal(t, "-1" , string(buf[:i64toa(&buf[0], -1)]))
assert.Equal(t, "-12" , string(buf[:i64toa(&buf[0], -12)]))
assert.Equal(t, "-123" , string(buf[:i64toa(&buf[0], -123)]))
assert.Equal(t, "-1234" , string(buf[:i64toa(&buf[0], -1234)]))
assert.Equal(t, "-12345" , string(buf[:i64toa(&buf[0], -12345)]))
assert.Equal(t, "-123456" , string(buf[:i64toa(&buf[0], -123456)]))
assert.Equal(t, "-1234567" , string(buf[:i64toa(&buf[0], -1234567)]))
assert.Equal(t, "-12345678" , string(buf[:i64toa(&buf[0], -12345678)]))
assert.Equal(t, "-123456789" , string(buf[:i64toa(&buf[0], -123456789)]))
assert.Equal(t, "-1234567890" , string(buf[:i64toa(&buf[0], -1234567890)]))
assert.Equal(t, "-12345678901" , string(buf[:i64toa(&buf[0], -12345678901)]))
assert.Equal(t, "-123456789012" , string(buf[:i64toa(&buf[0], -123456789012)]))
assert.Equal(t, "-1234567890123" , string(buf[:i64toa(&buf[0], -1234567890123)]))
assert.Equal(t, "-12345678901234" , string(buf[:i64toa(&buf[0], -12345678901234)]))
assert.Equal(t, "-123456789012345" , string(buf[:i64toa(&buf[0], -123456789012345)]))
assert.Equal(t, "-1234567890123456" , string(buf[:i64toa(&buf[0], -1234567890123456)]))
assert.Equal(t, "-12345678901234567" , string(buf[:i64toa(&buf[0], -12345678901234567)]))
assert.Equal(t, "-123456789012345678" , string(buf[:i64toa(&buf[0], -123456789012345678)]))
assert.Equal(t, "-1234567890123456789" , string(buf[:i64toa(&buf[0], -1234567890123456789)]))
assert.Equal(t, "-9223372036854775808" , string(buf[:i64toa(&buf[0], -9223372036854775808)]))
}
func TestFastInt_UintToString(t *testing.T) {
var buf [32]byte
assert.Equal(t, "0" , string(buf[:__u64toa(&buf[0], 0)]))
assert.Equal(t, "1" , string(buf[:__u64toa(&buf[0], 1)]))
assert.Equal(t, "12" , string(buf[:__u64toa(&buf[0], 12)]))
assert.Equal(t, "123" , string(buf[:__u64toa(&buf[0], 123)]))
assert.Equal(t, "1234" , string(buf[:__u64toa(&buf[0], 1234)]))
assert.Equal(t, "12345" , string(buf[:__u64toa(&buf[0], 12345)]))
assert.Equal(t, "123456" , string(buf[:__u64toa(&buf[0], 123456)]))
assert.Equal(t, "1234567" , string(buf[:__u64toa(&buf[0], 1234567)]))
assert.Equal(t, "12345678" , string(buf[:__u64toa(&buf[0], 12345678)]))
assert.Equal(t, "123456789" , string(buf[:__u64toa(&buf[0], 123456789)]))
assert.Equal(t, "1234567890" , string(buf[:__u64toa(&buf[0], 1234567890)]))
assert.Equal(t, "12345678901" , string(buf[:__u64toa(&buf[0], 12345678901)]))
assert.Equal(t, "123456789012" , string(buf[:__u64toa(&buf[0], 123456789012)]))
assert.Equal(t, "1234567890123" , string(buf[:__u64toa(&buf[0], 1234567890123)]))
assert.Equal(t, "12345678901234" , string(buf[:__u64toa(&buf[0], 12345678901234)]))
assert.Equal(t, "123456789012345" , string(buf[:__u64toa(&buf[0], 123456789012345)]))
assert.Equal(t, "1234567890123456" , string(buf[:__u64toa(&buf[0], 1234567890123456)]))
assert.Equal(t, "12345678901234567" , string(buf[:__u64toa(&buf[0], 12345678901234567)]))
assert.Equal(t, "123456789012345678" , string(buf[:__u64toa(&buf[0], 123456789012345678)]))
assert.Equal(t, "1234567890123456789" , string(buf[:__u64toa(&buf[0], 1234567890123456789)]))
assert.Equal(t, "12345678901234567890" , string(buf[:__u64toa(&buf[0], 12345678901234567890)]))
assert.Equal(t, "18446744073709551615" , string(buf[:__u64toa(&buf[0], 18446744073709551615)]))
assert.Equal(t, "0" , string(buf[:u64toa(&buf[0], 0)]))
assert.Equal(t, "1" , string(buf[:u64toa(&buf[0], 1)]))
assert.Equal(t, "12" , string(buf[:u64toa(&buf[0], 12)]))
assert.Equal(t, "123" , string(buf[:u64toa(&buf[0], 123)]))
assert.Equal(t, "1234" , string(buf[:u64toa(&buf[0], 1234)]))
assert.Equal(t, "12345" , string(buf[:u64toa(&buf[0], 12345)]))
assert.Equal(t, "123456" , string(buf[:u64toa(&buf[0], 123456)]))
assert.Equal(t, "1234567" , string(buf[:u64toa(&buf[0], 1234567)]))
assert.Equal(t, "12345678" , string(buf[:u64toa(&buf[0], 12345678)]))
assert.Equal(t, "123456789" , string(buf[:u64toa(&buf[0], 123456789)]))
assert.Equal(t, "1234567890" , string(buf[:u64toa(&buf[0], 1234567890)]))
assert.Equal(t, "12345678901" , string(buf[:u64toa(&buf[0], 12345678901)]))
assert.Equal(t, "123456789012" , string(buf[:u64toa(&buf[0], 123456789012)]))
assert.Equal(t, "1234567890123" , string(buf[:u64toa(&buf[0], 1234567890123)]))
assert.Equal(t, "12345678901234" , string(buf[:u64toa(&buf[0], 12345678901234)]))
assert.Equal(t, "123456789012345" , string(buf[:u64toa(&buf[0], 123456789012345)]))
assert.Equal(t, "1234567890123456" , string(buf[:u64toa(&buf[0], 1234567890123456)]))
assert.Equal(t, "12345678901234567" , string(buf[:u64toa(&buf[0], 12345678901234567)]))
assert.Equal(t, "123456789012345678" , string(buf[:u64toa(&buf[0], 123456789012345678)]))
assert.Equal(t, "1234567890123456789" , string(buf[:u64toa(&buf[0], 1234567890123456789)]))
assert.Equal(t, "12345678901234567890" , string(buf[:u64toa(&buf[0], 12345678901234567890)]))
assert.Equal(t, "18446744073709551615" , string(buf[:u64toa(&buf[0], 18446744073709551615)]))
}
func BenchmarkFastInt_IntToString(b *testing.B) {
benchmarks := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib-Positive",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendInt(buf[:0], int64(i), 10) }},
}, {
name: "StdLib-Negative",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendInt(buf[:0], -int64(i), 10) }},
}, {
name: "FastInt-Positive",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { __i64toa(&buf[0], int64(i)) }},
}, {
name: "FastInt-Negative",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { __i64toa(&buf[0], -int64(i)) }},
}}
for _, bm := range benchmarks {
b.Run(bm.name, bm.test)
}
}
type utoaBench struct {
name string
num uint64
}
func BenchmarkFastInt_UintToString(b *testing.B) {
maxUint := "18446744073709551615"
benchs := make([]utoaBench, len(maxUint) + 1)
benchs[0].name = "Zero"
benchs[0].num = 0
for i := 1; i <= len(maxUint); i++ {
benchs[i].name = strconv.FormatInt(int64(i), 10) + "-Digs"
benchs[i].num, _ = strconv.ParseUint(string(maxUint[:i]), 10, 64)
}
for _, t := range(benchs) {
benchmarks := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendUint(buf[:0], t.num, 10) }},
}, {
name: "FastInt",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { __u64toa(&buf[0], t.num) }},
}}
for _, bm := range benchmarks {
name := fmt.Sprintf("%s_%s", bm.name, t.name)
b.Run(name, bm.test)
}
}
}
func BenchmarkFastInt_IntToString(b *testing.B) {
benchmarks := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib-Positive",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendInt(buf[:0], int64(i), 10) }},
}, {
name: "StdLib-Negative",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendInt(buf[:0], -int64(i), 10) }},
}, {
name: "FastInt-Positive",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { i64toa(&buf[0], int64(i)) }},
}, {
name: "FastInt-Negative",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { i64toa(&buf[0], -int64(i)) }},
}}
for _, bm := range benchmarks {
b.Run(bm.name, bm.test)
}
}
type utoaBench struct {
name string
num uint64
}
func BenchmarkFastInt_UintToString(b *testing.B) {
maxUint := "18446744073709551615"
benchs := make([]utoaBench, len(maxUint) + 1)
benchs[0].name = "Zero"
benchs[0].num = 0
for i := 1; i <= len(maxUint); i++ {
benchs[i].name = strconv.FormatInt(int64(i), 10) + "-Digs"
benchs[i].num, _ = strconv.ParseUint(string(maxUint[:i]), 10, 64)
}
for _, t := range(benchs) {
benchmarks := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendUint(buf[:0], t.num, 10) }},
}, {
name: "FastInt",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { u64toa(&buf[0], t.num) }},
}}
for _, bm := range benchmarks {
name := fmt.Sprintf("%s_%s", bm.name, t.name)
b.Run(name, bm.test)
}
}
}

View File

@@ -1,3 +1,5 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
@@ -20,114 +22,168 @@ import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __i64toa(out *byte, val int64) (ret int)
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __u64toa(out *byte, val uint64) (ret int)
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f64toa(out *byte, val float64) (ret int)
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f32toa(out *byte, val float32) (ret int)
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vnumber(s *string, p *int, v *types.JsonState)
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vsigned(s *string, p *int, v *types.JsonState)
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vunsigned(s *string, p *int, v *types.JsonState)
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one_fast(s *string, p *int) (ret int)
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_number(s *string, p *int) (ret int)
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8_fast(s *string) (ret int)
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

View File

@@ -1,3 +1,5 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
@@ -20,6 +22,7 @@ import (
`encoding/hex`
`fmt`
`math`
`strings`
`testing`
`unsafe`
@@ -34,7 +37,7 @@ func TestNative_Value(t *testing.T) {
var v types.JsonState
s := ` -12345`
p := (*rt.GoString)(unsafe.Pointer(&s))
x := __value(p.Ptr, p.Len, 0, &v, 0)
x := value(p.Ptr, p.Len, 0, &v, 0)
assert.Equal(t, 9, x)
assert.Equal(t, types.V_INTEGER, v.Vt)
assert.Equal(t, int64(-12345), v.Iv)
@@ -46,7 +49,7 @@ func TestNative_Value_OutOfBound(t *testing.T) {
mem := []byte{'"', '"'}
s := rt.Mem2Str(mem[:1])
p := (*rt.GoString)(unsafe.Pointer(&s))
x := __value(p.Ptr, p.Len, 0, &v, 0)
x := value(p.Ptr, p.Len, 0, &v, 0)
assert.Equal(t, 1, x)
assert.Equal(t, -int(types.ERR_EOF), int(v.Vt))
}
@@ -56,7 +59,7 @@ func TestNative_Quote(t *testing.T) {
d := make([]byte, 256)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
@@ -70,7 +73,7 @@ func TestNative_QuoteNoMem(t *testing.T) {
d := make([]byte, 10)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
assert.Equal(t, -6, rv)
assert.Equal(t, 5, len(d))
assert.Equal(t, `hello`, string(d))
@@ -81,7 +84,7 @@ func TestNative_DoubleQuote(t *testing.T) {
d := make([]byte, 256)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, types.F_DOUBLE_UNQUOTE)
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, types.F_DOUBLE_UNQUOTE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
@@ -96,7 +99,7 @@ func TestNative_Unquote(t *testing.T) {
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
@@ -111,7 +114,7 @@ func TestNative_UnquoteError(t *testing.T) {
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_EOF), rv)
assert.Equal(t, 5, ep)
s = `asdf\gqwer`
@@ -119,7 +122,7 @@ func TestNative_UnquoteError(t *testing.T) {
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_ESCAPE), rv)
assert.Equal(t, 5, ep)
s = `asdf\u1gggqwer`
@@ -127,7 +130,7 @@ func TestNative_UnquoteError(t *testing.T) {
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_CHAR), rv)
assert.Equal(t, 7, ep)
s = `asdf\ud800qwer`
@@ -135,7 +138,7 @@ func TestNative_UnquoteError(t *testing.T) {
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 6, ep)
s = `asdf\\ud800qwer`
@@ -143,7 +146,7 @@ func TestNative_UnquoteError(t *testing.T) {
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 7, ep)
s = `asdf\ud800\ud800qwer`
@@ -151,7 +154,7 @@ func TestNative_UnquoteError(t *testing.T) {
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 12, ep)
s = `asdf\\ud800\\ud800qwer`
@@ -159,7 +162,7 @@ func TestNative_UnquoteError(t *testing.T) {
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 14, ep)
}
@@ -170,7 +173,7 @@ func TestNative_DoubleUnquote(t *testing.T) {
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
@@ -185,7 +188,7 @@ func TestNative_UnquoteUnicodeReplacement(t *testing.T) {
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
@@ -197,7 +200,7 @@ func TestNative_UnquoteUnicodeReplacement(t *testing.T) {
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = __unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
@@ -211,7 +214,7 @@ func TestNative_HTMLEscape(t *testing.T) {
d := make([]byte, 256)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
rv := html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
@@ -225,7 +228,7 @@ func TestNative_HTMLEscapeNoMem(t *testing.T) {
d := make([]byte, 10)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := __html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
rv := html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
assert.Equal(t, -6, rv)
assert.Equal(t, 5, len(d))
assert.Equal(t, `hello`, string(d))
@@ -235,11 +238,11 @@ func TestNative_Vstring(t *testing.T) {
var v types.JsonState
i := 0
s := `test"test\n2"`
__vstring(&s, &i, &v, 0)
vstring(&s, &i, &v, 0)
assert.Equal(t, 5, i)
assert.Equal(t, -1, v.Ep)
assert.Equal(t, int64(0), v.Iv)
__vstring(&s, &i, &v, 0)
vstring(&s, &i, &v, 0)
assert.Equal(t, 13, i)
assert.Equal(t, 9, v.Ep)
assert.Equal(t, int64(5), v.Iv)
@@ -250,7 +253,7 @@ func TestNative_Vstring_ValidUnescapedChars(t *testing.T) {
valid := uint64(types.F_VALIDATE_STRING)
i := 0
s := "test\x1f\""
__vstring(&s, &i, &v, valid)
vstring(&s, &i, &v, valid)
assert.Equal(t, -int(types.ERR_INVALID_CHAR), int(v.Vt))
}
@@ -258,7 +261,7 @@ func TestNative_VstringEscapeEOF(t *testing.T) {
var v types.JsonState
i := 0
s := `xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"x`
__vstring(&s, &i, &v, 0)
vstring(&s, &i, &v, 0)
assert.Equal(t, 95, i)
assert.Equal(t, 63, v.Ep)
assert.Equal(t, int64(0), v.Iv)
@@ -274,7 +277,7 @@ func TestNative_VstringHangUpOnRandomData(t *testing.T) {
p := 1
s := rt.Mem2Str(v)
var js types.JsonState
__vstring(&s, &p, &js, 0)
vstring(&s, &p, &js, 0)
fmt.Printf("js: %s\n", spew.Sdump(js))
}
@@ -282,49 +285,49 @@ func TestNative_Vnumber(t *testing.T) {
var v types.JsonState
i := 0
s := "1234"
__vnumber(&s, &i, &v)
vnumber(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "1.234"
__vnumber(&s, &i, &v)
vnumber(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 1.234, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "1.234e5"
__vnumber(&s, &i, &v)
vnumber(&s, &i, &v)
assert.Equal(t, 7, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 1.234e5, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "0.0125"
__vnumber(&s, &i, &v)
vnumber(&s, &i, &v)
assert.Equal(t, 6, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 0.0125, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "100000000000000000000"
__vnumber(&s, &i, &v)
vnumber(&s, &i, &v)
assert.Equal(t, 21, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 100000000000000000000.0, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "999999999999999900000"
__vnumber(&s, &i, &v)
vnumber(&s, &i, &v)
assert.Equal(t, 21, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 999999999999999900000.0, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "-1.234"
__vnumber(&s, &i, &v)
vnumber(&s, &i, &v)
assert.Equal(t, 6, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, -1.234, v.Dv)
@@ -335,65 +338,65 @@ func TestNative_Vsigned(t *testing.T) {
var v types.JsonState
i := 0
s := "1234"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "-1234"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(-1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "9223372036854775807"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 19, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(math.MaxInt64), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "-9223372036854775808"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 20, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(math.MinInt64), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "9223372036854775808"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 18, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
i = 0
s = "-9223372036854775809"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 19, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
i = 0
s = "1.234"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "0.0125"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1234e5"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1234e-5"
__vsigned(&s, &i, &v)
vsigned(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
@@ -403,63 +406,63 @@ func TestNative_Vunsigned(t *testing.T) {
var v types.JsonState
i := 0
s := "1234"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "18446744073709551615"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 20, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, ^int64(0), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "18446744073709551616"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 19, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
i = 0
s = "-1234"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "1.234"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "0.0125"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "1234e5"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1234e5"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1.234e5"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1.234e-5"
__vunsigned(&s, &i, &v)
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
@@ -468,36 +471,36 @@ func TestNative_Vunsigned(t *testing.T) {
func TestNative_SkipOne(t *testing.T) {
p := 0
s := ` {"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
q := __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q := skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 42, p)
assert.Equal(t, 1, q)
p = 0
s = `1 2.5 -3 "asdf\nqwer" true false null {} []`
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 1, p)
assert.Equal(t, 0, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 5, p)
assert.Equal(t, 2, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 8, p)
assert.Equal(t, 6, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 21, p)
assert.Equal(t, 9, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 26, p)
assert.Equal(t, 22, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 32, p)
assert.Equal(t, 27, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 37, p)
assert.Equal(t, 33, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 40, p)
assert.Equal(t, 38, q)
q = __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 43, p)
assert.Equal(t, 41, q)
}
@@ -509,7 +512,7 @@ func TestNative_SkipOne_Error(t *testing.T) {
`"asdf`, `"\\\"`,
}) {
p := 0
q := __skip_one(&s, &p, &types.StateMachine{}, uint64(0))
q := skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.True(t, q < 0)
}
}
@@ -517,21 +520,21 @@ func TestNative_SkipOne_Error(t *testing.T) {
func TestNative_SkipArray(t *testing.T) {
p := 0
s := `null, true, false, 1, 2.0, -3, {"asdf": "wqer"}],`
__skip_array(&s, &p, &types.StateMachine{}, uint64(0))
skip_array(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, p, 48)
}
func TestNative_SkipObject(t *testing.T) {
p := 0
s := `"asdf": "wqer"},`
__skip_object(&s, &p, &types.StateMachine{}, uint64(0))
skip_object(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, p, 15)
}
func TestNative_SkipNumber(t *testing.T) {
p := 0
s := `-1.23e+12`
q := __skip_number(&s, &p)
q := skip_number(&s, &p)
assert.Equal(t, 9, p)
assert.Equal(t, 0, q)
}
@@ -539,44 +542,44 @@ func TestNative_SkipNumber(t *testing.T) {
func TestNative_SkipOneFast(t *testing.T) {
p := 0
s := ` {"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
q := __skip_one_fast(&s, &p)
q := skip_one_fast(&s, &p)
assert.Equal(t, 42, p)
assert.Equal(t, 1, q)
p = 0
s = `1, 2.5, -3, "asdf\nqwer", true, false, null, {}, [],`
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 1, p)
assert.Equal(t, 0, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 6, p)
assert.Equal(t, 3, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 10, p)
assert.Equal(t, 8, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 24, p)
assert.Equal(t, 12, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 30, p)
assert.Equal(t, 26, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 37, p)
assert.Equal(t, 32, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 43, p)
assert.Equal(t, 39, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 47, p)
assert.Equal(t, 45, q)
p += 1
q = __skip_one_fast(&s, &p)
q = skip_one_fast(&s, &p)
assert.Equal(t, 51, p)
assert.Equal(t, 49, q)
}
@@ -587,7 +590,36 @@ func TestNative_SkipOneFast_Error(t *testing.T) {
`"asdf`, `"\\\"`,
}) {
p := 0
q := __skip_one_fast(&s, &p)
q := skip_one_fast(&s, &p)
assert.True(t, q < 0)
}
}
func TestNative_GetByPath(t *testing.T) {
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
p := 0
path := []interface{}{"asdf", 4}
ret := get_by_path(&s, &p, &path, types.NewStateMachine())
assert.Equal(t, strings.Index(s, "2.0"), ret)
}
func BenchmarkNative_SkipOneFast(b *testing.B) {
b.ResetTimer()
for i:=0; i<b.N; i++ {
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
p := 0
_ = skip_one_fast(&s, &p)
}
}
func BenchmarkNative_GetByPath(b *testing.B) {
b.ResetTimer()
for i:=0; i<b.N; i++ {
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
p := 0
path := []interface{}{"asdf", 3}
sm := types.NewStateMachine()
_ = get_by_path(&s, &p, &path, sm)
types.FreeStateMachine(sm)
}
}

View File

@@ -1,47 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {{PACKAGE}}
var (
S_f64toa = _subr__f64toa
S_f32toa = _subr__f32toa
S_i64toa = _subr__i64toa
S_u64toa = _subr__u64toa
S_lspace = _subr__lspace
)
var (
S_quote = _subr__quote
S_unquote = _subr__unquote
)
var (
S_value = _subr__value
S_vstring = _subr__vstring
S_vnumber = _subr__vnumber
S_vsigned = _subr__vsigned
S_vunsigned = _subr__vunsigned
)
var (
S_skip_one = _subr__skip_one
S_skip_one_fast = _subr__skip_one_fast
S_skip_array = _subr__skip_array
S_skip_object = _subr__skip_object
S_skip_number = _subr__skip_number
S_get_by_path = _subr__get_by_path
)

View File

@@ -1,5 +1,7 @@
// Code generated by Makefile, DO NOT EDIT.
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
@@ -22,114 +24,168 @@ import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __i64toa(out *byte, val int64) (ret int)
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __u64toa(out *byte, val uint64) (ret int)
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f64toa(out *byte, val float64) (ret int)
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __f32toa(out *byte, val float32) (ret int)
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __lspace(sp unsafe.Pointer, nb int, off int) (ret int)
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int)
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int)
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int)
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int)
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vstring(s *string, p *int, v *types.JsonState, flags uint64)
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vnumber(s *string, p *int, v *types.JsonState)
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vsigned(s *string, p *int, v *types.JsonState)
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __vunsigned(s *string, p *int, v *types.JsonState)
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_one_fast(s *string, p *int) (ret int)
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int)
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __skip_number(s *string, p *int) (ret int)
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_one(s *string, p *int, m *types.StateMachine) (ret int)
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int)
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8(s *string, p *int, m *types.StateMachine) (ret int)
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
//go:noescape
//goland:noinspection GoUnusedParameter
func __validate_utf8_fast(s *string) (ret int)
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,49 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sse
var (
S_f64toa = _subr__f64toa
S_f32toa = _subr__f32toa
S_i64toa = _subr__i64toa
S_u64toa = _subr__u64toa
S_lspace = _subr__lspace
)
var (
S_quote = _subr__quote
S_unquote = _subr__unquote
)
var (
S_value = _subr__value
S_vstring = _subr__vstring
S_vnumber = _subr__vnumber
S_vsigned = _subr__vsigned
S_vunsigned = _subr__vunsigned
)
var (
S_skip_one = _subr__skip_one
S_skip_one_fast = _subr__skip_one_fast
S_skip_array = _subr__skip_array
S_skip_object = _subr__skip_object
S_skip_number = _subr__skip_number
S_get_by_path = _subr__get_by_path
)

View File

@@ -3,107 +3,602 @@
package sse
//go:nosplit
//go:noescape
//goland:noinspection ALL
func __native_entry__() uintptr
import (
`github.com/bytedance/sonic/loader`
)
var (
_subr__f32toa = __native_entry__() + 31760
_subr__f64toa = __native_entry__() + 160
_subr__get_by_path = __native_entry__() + 26384
_subr__html_escape = __native_entry__() + 9072
_subr__i64toa = __native_entry__() + 3424
_subr__lspace = __native_entry__() + 16
_subr__quote = __native_entry__() + 4864
_subr__skip_array = __native_entry__() + 18112
_subr__skip_number = __native_entry__() + 22128
_subr__skip_object = __native_entry__() + 20512
_subr__skip_one = __native_entry__() + 22288
_subr__skip_one_fast = __native_entry__() + 22512
_subr__u64toa = __native_entry__() + 3552
_subr__unquote = __native_entry__() + 6704
_subr__validate_one = __native_entry__() + 22336
_subr__validate_utf8 = __native_entry__() + 30528
_subr__validate_utf8_fast = __native_entry__() + 31200
_subr__value = __native_entry__() + 12272
_subr__vnumber = __native_entry__() + 15728
_subr__vsigned = __native_entry__() + 17376
_subr__vstring = __native_entry__() + 14112
_subr__vunsigned = __native_entry__() + 17760
const (
_entry__f32toa = 31616
_entry__f64toa = 160
_entry__format_significand = 35888
_entry__format_integer = 2960
_entry__fsm_exec = 18016
_entry__advance_string = 14352
_entry__advance_string_default = 37280
_entry__do_skip_number = 20608
_entry__get_by_path = 26176
_entry__skip_one_fast = 22272
_entry__html_escape = 8912
_entry__i64toa = 3392
_entry__u64toa = 3520
_entry__lspace = 16
_entry__quote = 4832
_entry__skip_array = 17984
_entry__skip_number = 21904
_entry__skip_object = 20256
_entry__skip_one = 22048
_entry__unquote = 6576
_entry__validate_one = 22096
_entry__validate_utf8 = 30384
_entry__validate_utf8_fast = 31056
_entry__value = 12352
_entry__vnumber = 15744
_entry__atof_eisel_lemire64 = 10192
_entry__atof_native = 11744
_entry__decimal_to_f64 = 10560
_entry__right_shift = 36848
_entry__left_shift = 36352
_entry__vsigned = 17296
_entry__vstring = 14176
_entry__vunsigned = 17632
)
const (
_stack__f32toa = 48
_stack__f64toa = 80
_stack__get_by_path = 240
_stack__html_escape = 64
_stack__format_significand = 24
_stack__format_integer = 16
_stack__fsm_exec = 168
_stack__advance_string = 64
_stack__advance_string_default = 64
_stack__do_skip_number = 48
_stack__get_by_path = 272
_stack__skip_one_fast = 136
_stack__html_escape = 72
_stack__i64toa = 16
_stack__u64toa = 8
_stack__lspace = 8
_stack__quote = 64
_stack__skip_array = 128
_stack__skip_number = 72
_stack__skip_object = 128
_stack__skip_one = 128
_stack__skip_one_fast = 136
_stack__u64toa = 8
_stack__skip_array = 176
_stack__skip_number = 88
_stack__skip_object = 176
_stack__skip_one = 176
_stack__unquote = 88
_stack__validate_one = 128
_stack__validate_one = 176
_stack__validate_utf8 = 48
_stack__validate_utf8_fast = 24
_stack__value = 328
_stack__vnumber = 240
_stack__atof_eisel_lemire64 = 32
_stack__atof_native = 136
_stack__decimal_to_f64 = 80
_stack__right_shift = 8
_stack__left_shift = 24
_stack__vsigned = 16
_stack__vstring = 136
_stack__vunsigned = 16
)
var (
_ = _subr__f32toa
_ = _subr__f64toa
_ = _subr__get_by_path
_ = _subr__html_escape
_ = _subr__i64toa
_ = _subr__lspace
_ = _subr__quote
_ = _subr__skip_array
_ = _subr__skip_number
_ = _subr__skip_object
_ = _subr__skip_one
_ = _subr__skip_one_fast
_ = _subr__u64toa
_ = _subr__unquote
_ = _subr__validate_one
_ = _subr__validate_utf8
_ = _subr__validate_utf8_fast
_ = _subr__value
_ = _subr__vnumber
_ = _subr__vsigned
_ = _subr__vstring
_ = _subr__vunsigned
_stack__vstring = 120
_stack__vunsigned = 8
)
const (
_ = _stack__f32toa
_ = _stack__f64toa
_ = _stack__get_by_path
_ = _stack__html_escape
_ = _stack__i64toa
_ = _stack__lspace
_ = _stack__quote
_ = _stack__skip_array
_ = _stack__skip_number
_ = _stack__skip_object
_ = _stack__skip_one
_ = _stack__skip_one_fast
_ = _stack__u64toa
_ = _stack__unquote
_ = _stack__validate_one
_ = _stack__validate_utf8
_ = _stack__validate_utf8_fast
_ = _stack__value
_ = _stack__vnumber
_ = _stack__vsigned
_ = _stack__vstring
_ = _stack__vunsigned
_size__f32toa = 3328
_size__f64toa = 2800
_size__format_significand = 464
_size__format_integer = 432
_size__fsm_exec = 1692
_size__advance_string = 1344
_size__advance_string_default = 960
_size__do_skip_number = 956
_size__get_by_path = 4208
_size__skip_one_fast = 3404
_size__html_escape = 1280
_size__i64toa = 48
_size__u64toa = 1264
_size__lspace = 128
_size__quote = 1728
_size__skip_array = 32
_size__skip_number = 144
_size__skip_object = 32
_size__skip_one = 48
_size__unquote = 2272
_size__validate_one = 48
_size__validate_utf8 = 672
_size__validate_utf8_fast = 544
_size__value = 1316
_size__vnumber = 1552
_size__atof_eisel_lemire64 = 368
_size__atof_native = 608
_size__decimal_to_f64 = 1184
_size__right_shift = 400
_size__left_shift = 496
_size__vsigned = 336
_size__vstring = 128
_size__vunsigned = 336
)
var (
_pcsp__f32toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{3286, 48},
{3287, 40},
{3289, 32},
{3291, 24},
{3293, 16},
{3295, 8},
{3296, 0},
{3318, 48},
}
_pcsp__f64toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2740, 56},
{2744, 48},
{2745, 40},
{2747, 32},
{2749, 24},
{2751, 16},
{2753, 8},
{2754, 0},
{2792, 56},
}
_pcsp__format_significand = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{452, 24},
{453, 16},
{455, 8},
{457, 0},
}
_pcsp__format_integer = [][2]uint32{
{1, 0},
{4, 8},
{412, 16},
{413, 8},
{414, 0},
{423, 16},
{424, 8},
{426, 0},
}
_pcsp__fsm_exec = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1342, 104},
{1346, 48},
{1347, 40},
{1349, 32},
{1351, 24},
{1353, 16},
{1355, 8},
{1356, 0},
{1692, 104},
}
_pcsp__advance_string = [][2]uint32{
{14, 0},
{18, 8},
{20, 16},
{22, 24},
{24, 32},
{26, 40},
{27, 48},
{614, 56},
{618, 48},
{619, 40},
{621, 32},
{623, 24},
{625, 16},
{627, 8},
{628, 0},
{1339, 56},
}
_pcsp__advance_string_default = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{576, 64},
{580, 48},
{581, 40},
{583, 32},
{585, 24},
{587, 16},
{589, 8},
{590, 0},
{955, 64},
}
_pcsp__do_skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{881, 48},
{882, 40},
{884, 32},
{886, 24},
{888, 16},
{890, 8},
{891, 0},
{956, 48},
}
_pcsp__get_by_path = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{4012, 104},
{4016, 48},
{4017, 40},
{4019, 32},
{4021, 24},
{4023, 16},
{4025, 8},
{4026, 0},
{4194, 104},
}
_pcsp__skip_one_fast = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{658, 136},
{662, 48},
{663, 40},
{665, 32},
{667, 24},
{669, 16},
{671, 8},
{672, 0},
{3404, 136},
}
_pcsp__html_escape = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1256, 72},
{1260, 48},
{1261, 40},
{1263, 32},
{1265, 24},
{1267, 16},
{1269, 8},
{1271, 0},
}
_pcsp__i64toa = [][2]uint32{
{14, 0},
{34, 8},
{36, 0},
}
_pcsp__u64toa = [][2]uint32{
{1, 0},
{161, 8},
{162, 0},
{457, 8},
{458, 0},
{772, 8},
{773, 0},
{1249, 8},
{1251, 0},
}
_pcsp__lspace = [][2]uint32{
{1, 0},
{89, 8},
{90, 0},
{103, 8},
{104, 0},
{111, 8},
{113, 0},
}
_pcsp__quote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1681, 64},
{1685, 48},
{1686, 40},
{1688, 32},
{1690, 24},
{1692, 16},
{1694, 8},
{1695, 0},
{1722, 64},
}
_pcsp__skip_array = [][2]uint32{
{1, 0},
{26, 8},
{32, 0},
}
_pcsp__skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{100, 40},
{101, 32},
{103, 24},
{105, 16},
{107, 8},
{108, 0},
{139, 40},
}
_pcsp__skip_object = [][2]uint32{
{1, 0},
{26, 8},
{32, 0},
}
_pcsp__skip_one = [][2]uint32{
{1, 0},
{30, 8},
{36, 0},
}
_pcsp__unquote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1684, 88},
{1688, 48},
{1689, 40},
{1691, 32},
{1693, 24},
{1695, 16},
{1697, 8},
{1698, 0},
{2270, 88},
}
_pcsp__validate_one = [][2]uint32{
{1, 0},
{35, 8},
{41, 0},
}
_pcsp__validate_utf8 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{623, 48},
{627, 40},
{628, 32},
{630, 24},
{632, 16},
{634, 8},
{635, 0},
{666, 48},
}
_pcsp__validate_utf8_fast = [][2]uint32{
{1, 0},
{4, 8},
{5, 16},
{247, 24},
{251, 16},
{252, 8},
{253, 0},
{527, 24},
{531, 16},
{532, 8},
{534, 0},
}
_pcsp__value = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{495, 88},
{499, 48},
{500, 40},
{502, 32},
{504, 24},
{506, 16},
{508, 8},
{509, 0},
{1316, 88},
}
_pcsp__vnumber = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{803, 104},
{807, 48},
{808, 40},
{810, 32},
{812, 24},
{814, 16},
{816, 8},
{817, 0},
{1551, 104},
}
_pcsp__atof_eisel_lemire64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{292, 32},
{293, 24},
{295, 16},
{297, 8},
{298, 0},
{362, 32},
}
_pcsp__atof_native = [][2]uint32{
{1, 0},
{4, 8},
{587, 56},
{591, 8},
{593, 0},
}
_pcsp__decimal_to_f64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1144, 56},
{1148, 48},
{1149, 40},
{1151, 32},
{1153, 24},
{1155, 16},
{1157, 8},
{1158, 0},
{1169, 56},
}
_pcsp__right_shift = [][2]uint32{
{1, 0},
{318, 8},
{319, 0},
{387, 8},
{388, 0},
{396, 8},
{398, 0},
}
_pcsp__left_shift = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{363, 24},
{364, 16},
{366, 8},
{367, 0},
{470, 24},
{471, 16},
{473, 8},
{474, 0},
{486, 24},
}
_pcsp__vsigned = [][2]uint32{
{1, 0},
{4, 8},
{119, 16},
{120, 8},
{121, 0},
{132, 16},
{133, 8},
{134, 0},
{276, 16},
{277, 8},
{278, 0},
{282, 16},
{283, 8},
{284, 0},
{322, 16},
{323, 8},
{324, 0},
{332, 16},
{333, 8},
{335, 0},
}
_pcsp__vstring = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{105, 56},
{109, 40},
{110, 32},
{112, 24},
{114, 16},
{116, 8},
{118, 0},
}
_pcsp__vunsigned = [][2]uint32{
{1, 0},
{78, 8},
{79, 0},
{90, 8},
{91, 0},
{114, 8},
{115, 0},
{273, 8},
{274, 0},
{312, 8},
{313, 0},
{320, 8},
{322, 0},
}
)
var Funcs = []loader.CFunc{
{"__native_entry__", 0, 67, 0, nil},
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
}

View File

@@ -19,6 +19,7 @@ package types
import (
`fmt`
`sync`
`unsafe`
)
type ValueType int
@@ -46,15 +47,23 @@ const (
)
const (
// for native.Unquote() flags
B_DOUBLE_UNQUOTE = 0
B_UNICODE_REPLACE = 1
// for native.Value() flags
B_USE_NUMBER = 1
B_VALIDATE_STRING = 5
B_ALLOW_CONTROL = 31
)
const (
F_DOUBLE_UNQUOTE = 1 << B_DOUBLE_UNQUOTE
F_UNICODE_REPLACE = 1 << B_UNICODE_REPLACE
F_USE_NUMBER = 1 << B_USE_NUMBER
F_VALIDATE_STRING = 1 << B_VALIDATE_STRING
F_ALLOW_CONTROL = 1 << B_ALLOW_CONTROL
)
const (
@@ -136,3 +145,18 @@ func FreeStateMachine(fsm *StateMachine) {
stackPool.Put(fsm)
}
const MaxDigitNums = 800
var digitPool = sync.Pool{
New: func() interface{} {
return (*byte)(unsafe.Pointer(&[MaxDigitNums]byte{}))
},
}
func NewDbuf() *byte {
return digitPool.Get().(*byte)
}
func FreeDbuf(p *byte) {
digitPool.Put(p)
}

View File

@@ -1,46 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package resolver
import (
_ `encoding/json`
`reflect`
_ `unsafe`
)
type StdField struct {
name string
nameBytes []byte
equalFold func()
nameNonEsc string
nameEscHTML string
tag bool
index []int
typ reflect.Type
omitEmpty bool
quoted bool
encoder func()
}
type StdStructFields struct {
list []StdField
nameIndex map[string]int
}
//go:noescape
//go:linkname typeFields encoding/json.typeFields
func typeFields(_ reflect.Type) StdStructFields

View File

@@ -109,4 +109,16 @@ func StrFrom(p unsafe.Pointer, n int64) (s string) {
(*GoString)(unsafe.Pointer(&s)).Ptr = p
(*GoString)(unsafe.Pointer(&s)).Len = int(n)
return
}
// NoEscape hides a pointer from escape analysis. NoEscape is
// the identity function but escape analysis doesn't think the
// output depends on the input. NoEscape is inlined and currently
// compiles down to zero instructions.
// USE CAREFULLY!
//go:nosplit
//goland:noinspection GoVetUnsafePointer
func NoEscape(p unsafe.Pointer) unsafe.Pointer {
x := uintptr(p)
return unsafe.Pointer(x ^ 0)
}