处理AI胡乱生成的乱摊子

This commit is contained in:
2025-09-07 20:36:02 +08:00
parent ba513e0827
commit c4522b974b
403 changed files with 22915 additions and 44424 deletions

View File

@@ -23,16 +23,12 @@ import (
)
var (
V_strhash = rt.UnpackEface(strhash)
V_strhash = rt.UnpackEface(rt.Strhash)
S_strhash = *(*uintptr)(V_strhash.Value)
)
//go:noescape
//go:linkname strhash runtime.strhash
func strhash(_ unsafe.Pointer, _ uintptr) uintptr
func StrHash(s string) uint64 {
if v := strhash(unsafe.Pointer(&s), 0); v == 0 {
if v := rt.Strhash(unsafe.Pointer(&s), 0); v == 0 {
return 1
} else {
return uint64(v)

View File

@@ -24,7 +24,6 @@ import (
)
var (
HasAVX = cpuid.CPU.Has(cpuid.AVX)
HasAVX2 = cpuid.CPU.Has(cpuid.AVX2)
HasSSE = cpuid.CPU.Has(cpuid.SSE)
)
@@ -33,7 +32,8 @@ func init() {
switch v := os.Getenv("SONIC_MODE"); v {
case "" : break
case "auto" : break
case "noavx" : HasAVX = false; fallthrough
case "noavx" : HasAVX2 = false
// will also disable avx, act as `noavx`, we remain it to make sure forward compatibility
case "noavx2" : HasAVX2 = false
default : panic(fmt.Sprintf("invalid mode: '%s', should be one of 'auto', 'noavx', 'noavx2'", v))
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,70 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`os`
`runtime`
`runtime/debug`
`strings`
`github.com/bytedance/sonic/internal/jit`
)
var (
debugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
)
var (
_Instr_End _Instr = newInsOp(_OP_nil_1)
_F_gc = jit.Func(runtime.GC)
_F_force_gc = jit.Func(debug.FreeOSMemory)
_F_println = jit.Func(println_wrapper)
_F_print = jit.Func(print)
)
func println_wrapper(i int, op1 int, op2 int){
println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
}
func print(i int){
println(i)
}
func (self *_Assembler) force_gc() {
self.call_go(_F_gc)
self.call_go(_F_force_gc)
}
func (self *_Assembler) debug_instr(i int, v *_Instr) {
if debugSyncGC {
if (i+1 == len(self.p)) {
self.print_gc(i, v, &_Instr_End)
} else {
next := &(self.p[i+1])
self.print_gc(i, v, next)
name := _OpNames[next.op()]
if strings.Contains(name, "save") {
return
}
}
self.force_gc()
}
}

View File

@@ -1,255 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`unsafe`
`encoding/json`
`reflect`
`runtime`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/option`
`github.com/bytedance/sonic/utf8`
)
const (
_F_use_int64 = 0
_F_disable_urc = 2
_F_disable_unknown = 3
_F_copy_string = 4
_F_use_number = types.B_USE_NUMBER
_F_validate_string = types.B_VALIDATE_STRING
_F_allow_control = types.B_ALLOW_CONTROL
)
type Options uint64
const (
OptionUseInt64 Options = 1 << _F_use_int64
OptionUseNumber Options = 1 << _F_use_number
OptionUseUnicodeErrors Options = 1 << _F_disable_urc
OptionDisableUnknown Options = 1 << _F_disable_unknown
OptionCopyString Options = 1 << _F_copy_string
OptionValidateString Options = 1 << _F_validate_string
)
func (self *Decoder) SetOptions(opts Options) {
if (opts & OptionUseNumber != 0) && (opts & OptionUseInt64 != 0) {
panic("can't set OptionUseInt64 and OptionUseNumber both!")
}
self.f = uint64(opts)
}
// Decoder is the decoder context object
type Decoder struct {
i int
f uint64
s string
}
// NewDecoder creates a new decoder instance.
func NewDecoder(s string) *Decoder {
return &Decoder{s: s}
}
// Pos returns the current decoding position.
func (self *Decoder) Pos() int {
return self.i
}
func (self *Decoder) Reset(s string) {
self.s = s
self.i = 0
// self.f = 0
}
func (self *Decoder) CheckTrailings() error {
pos := self.i
buf := self.s
/* skip all the trailing spaces */
if pos != len(buf) {
for pos < len(buf) && (types.SPACE_MASK & (1 << buf[pos])) != 0 {
pos++
}
}
/* then it must be at EOF */
if pos == len(buf) {
return nil
}
/* junk after JSON value */
return SyntaxError {
Src : buf,
Pos : pos,
Code : types.ERR_INVALID_CHAR,
}
}
// Decode parses the JSON-encoded data from current position and stores the result
// in the value pointed to by val.
func (self *Decoder) Decode(val interface{}) error {
/* validate json if needed */
if (self.f & (1 << _F_validate_string)) != 0 && !utf8.ValidateString(self.s){
dbuf := utf8.CorrectWith(nil, rt.Str2Mem(self.s), "\ufffd")
self.s = rt.Mem2Str(dbuf)
}
vv := rt.UnpackEface(val)
vp := vv.Value
/* check for nil type */
if vv.Type == nil {
return &json.InvalidUnmarshalError{}
}
/* must be a non-nil pointer */
if vp == nil || vv.Type.Kind() != reflect.Ptr {
return &json.InvalidUnmarshalError{Type: vv.Type.Pack()}
}
etp := rt.PtrElem(vv.Type)
/* check the defined pointer type for issue 379 */
if vv.Type.IsNamed() {
newp := vp
etp = vv.Type
vp = unsafe.Pointer(&newp)
}
/* create a new stack, and call the decoder */
sb := newStack()
nb, err := decodeTypedPointer(self.s, self.i, etp, vp, sb, self.f)
/* return the stack back */
self.i = nb
freeStack(sb)
/* avoid GC ahead */
runtime.KeepAlive(vv)
return err
}
// UseInt64 indicates the Decoder to unmarshal an integer into an interface{} as an
// int64 instead of as a float64.
func (self *Decoder) UseInt64() {
self.f |= 1 << _F_use_int64
self.f &^= 1 << _F_use_number
}
// UseNumber indicates the Decoder to unmarshal a number into an interface{} as a
// json.Number instead of as a float64.
func (self *Decoder) UseNumber() {
self.f &^= 1 << _F_use_int64
self.f |= 1 << _F_use_number
}
// UseUnicodeErrors indicates the Decoder to return an error when encounter invalid
// UTF-8 escape sequences.
func (self *Decoder) UseUnicodeErrors() {
self.f |= 1 << _F_disable_urc
}
// DisallowUnknownFields indicates the Decoder to return an error when the destination
// is a struct and the input contains object keys which do not match any
// non-ignored, exported fields in the destination.
func (self *Decoder) DisallowUnknownFields() {
self.f |= 1 << _F_disable_unknown
}
// CopyString indicates the Decoder to decode string values by copying instead of referring.
func (self *Decoder) CopyString() {
self.f |= 1 << _F_copy_string
}
// ValidateString causes the Decoder to validate string values when decoding string value
// in JSON. Validation is that, returning error when unescaped control chars(0x00-0x1f) or
// invalid UTF-8 chars in the string value of JSON.
func (self *Decoder) ValidateString() {
self.f |= 1 << _F_validate_string
}
// Pretouch compiles vt ahead-of-time to avoid JIT compilation on-the-fly, in
// order to reduce the first-hit latency.
//
// Opts are the compile options, for example, "option.WithCompileRecursiveDepth" is
// a compile option to set the depth of recursive compile for the nested struct type.
func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
cfg := option.DefaultCompileOptions()
for _, opt := range opts {
opt(&cfg)
}
return pretouchRec(map[reflect.Type]bool{vt:true}, cfg)
}
func pretouchType(_vt reflect.Type, opts option.CompileOptions) (map[reflect.Type]bool, error) {
/* compile function */
compiler := newCompiler().apply(opts)
decoder := func(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
if pp, err := compiler.compile(_vt); err != nil {
return nil, err
} else {
as := newAssembler(pp)
as.name = _vt.String()
return as.Load(), nil
}
}
/* find or compile */
vt := rt.UnpackType(_vt)
if val := programCache.Get(vt); val != nil {
return nil, nil
} else if _, err := programCache.Compute(vt, decoder); err == nil {
return compiler.rec, nil
} else {
return nil, err
}
}
func pretouchRec(vtm map[reflect.Type]bool, opts option.CompileOptions) error {
if opts.RecursiveDepth < 0 || len(vtm) == 0 {
return nil
}
next := make(map[reflect.Type]bool)
for vt := range(vtm) {
sub, err := pretouchType(vt, opts)
if err != nil {
return err
}
for svt := range(sub) {
next[svt] = true
}
}
opts.RecursiveDepth -= 1
return pretouchRec(next, opts)
}
// Skip skips only one json value, and returns first non-blank character position and its ending position if it is valid.
// Otherwise, returns negative error code using start and invalid character position using end
func Skip(data []byte) (start int, end int) {
s := rt.Mem2Str(data)
p := 0
m := types.NewStateMachine()
ret := native.SkipOne(&s, &p, m, uint64(0))
types.FreeStateMachine(m)
return ret, p
}

View File

@@ -1,191 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`encoding/json`
`errors`
`fmt`
`reflect`
`strconv`
`strings`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
type SyntaxError struct {
Pos int
Src string
Code types.ParsingError
Msg string
}
func (self SyntaxError) Error() string {
return fmt.Sprintf("%q", self.Description())
}
func (self SyntaxError) Description() string {
return "Syntax error " + self.description()
}
func (self SyntaxError) description() string {
/* check for empty source */
if self.Src == "" {
return fmt.Sprintf("no sources available: %#v", self)
}
p, x, q, y := calcBounds(len(self.Src), self.Pos)
/* compose the error description */
return fmt.Sprintf(
"at index %d: %s\n\n\t%s\n\t%s^%s\n",
self.Pos,
self.Message(),
self.Src[p:q],
strings.Repeat(".", x),
strings.Repeat(".", y),
)
}
func calcBounds(size int, pos int) (lbound int, lwidth int, rbound int, rwidth int) {
if pos >= size || pos < 0 {
return 0, 0, size, 0
}
i := 16
lbound = pos - i
rbound = pos + i
/* prevent slicing before the beginning */
if lbound < 0 {
lbound, rbound, i = 0, rbound - lbound, i + lbound
}
/* prevent slicing beyond the end */
if n := size; rbound > n {
n = rbound - n
rbound = size
/* move the left bound if possible */
if lbound > n {
i += n
lbound -= n
}
}
/* left and right length */
lwidth = clamp_zero(i)
rwidth = clamp_zero(rbound - lbound - i - 1)
return
}
func (self SyntaxError) Message() string {
if self.Msg == "" {
return self.Code.Message()
}
return self.Msg
}
func clamp_zero(v int) int {
if v < 0 {
return 0
} else {
return v
}
}
/** JIT Error Helpers **/
var stackOverflow = &json.UnsupportedValueError {
Str : "Value nesting too deep",
Value : reflect.ValueOf("..."),
}
func error_wrap(src string, pos int, code types.ParsingError) error {
return *error_wrap_heap(src, pos, code)
}
//go:noinline
func error_wrap_heap(src string, pos int, code types.ParsingError) *SyntaxError {
return &SyntaxError {
Pos : pos,
Src : src,
Code : code,
}
}
func error_type(vt *rt.GoType) error {
return &json.UnmarshalTypeError{Type: vt.Pack()}
}
type MismatchTypeError struct {
Pos int
Src string
Type reflect.Type
}
func swithchJSONType (src string, pos int) string {
var val string
switch src[pos] {
case 'f': fallthrough
case 't': val = "bool"
case '"': val = "string"
case '{': val = "object"
case '[': val = "array"
case '-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': val = "number"
}
return val
}
func (self MismatchTypeError) Error() string {
se := SyntaxError {
Pos : self.Pos,
Src : self.Src,
Code : types.ERR_MISMATCH,
}
return fmt.Sprintf("Mismatch type %s with value %s %q", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
}
func (self MismatchTypeError) Description() string {
se := SyntaxError {
Pos : self.Pos,
Src : self.Src,
Code : types.ERR_MISMATCH,
}
return fmt.Sprintf("Mismatch type %s with value %s %s", self.Type.String(), swithchJSONType(self.Src, self.Pos), se.description())
}
func error_mismatch(src string, pos int, vt *rt.GoType) error {
return &MismatchTypeError {
Pos : pos,
Src : src,
Type : vt.Pack(),
}
}
func error_field(name string) error {
return errors.New("json: unknown field " + strconv.Quote(name))
}
func error_value(value string, vtype reflect.Type) error {
return &json.UnmarshalTypeError {
Type : vtype,
Value : value,
}
}

View File

@@ -1,143 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`sync`
`unsafe`
`github.com/bytedance/sonic/internal/caching`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
const (
_MinSlice = 2
_MaxStack = 4096 // 4k slots
_MaxStackBytes = _MaxStack * _PtrBytes
_MaxDigitNums = types.MaxDigitNums // used in atof fallback algorithm
)
const (
_PtrBytes = _PTR_SIZE / 8
_FsmOffset = (_MaxStack + 1) * _PtrBytes
_DbufOffset = _FsmOffset + int64(unsafe.Sizeof(types.StateMachine{})) + types.MAX_RECURSE * _PtrBytes
_StackSize = unsafe.Sizeof(_Stack{})
)
var (
stackPool = sync.Pool{}
valueCache = []unsafe.Pointer(nil)
fieldCache = []*caching.FieldMap(nil)
fieldCacheMux = sync.Mutex{}
programCache = caching.CreateProgramCache()
)
type _Stack struct {
sp uintptr
sb [_MaxStack]unsafe.Pointer
mm types.StateMachine
vp [types.MAX_RECURSE]unsafe.Pointer
dp [_MaxDigitNums]byte
}
type _Decoder func(
s string,
i int,
vp unsafe.Pointer,
sb *_Stack,
fv uint64,
sv string, // DO NOT pass value to this arguement, since it is only used for local _VAR_sv
vk unsafe.Pointer, // DO NOT pass value to this arguement, since it is only used for local _VAR_vk
) (int, error)
var _KeepAlive struct {
s string
i int
vp unsafe.Pointer
sb *_Stack
fv uint64
sv string
vk unsafe.Pointer
ret int
err error
frame_decoder [_FP_offs]byte
frame_generic [_VD_offs]byte
}
var (
argPtrs = []bool{true, false, false, true, true, false, true, false, true}
localPtrs = []bool{}
)
var (
argPtrs_generic = []bool{true}
localPtrs_generic = []bool{}
)
func newStack() *_Stack {
if ret := stackPool.Get(); ret == nil {
return new(_Stack)
} else {
return ret.(*_Stack)
}
}
func resetStack(p *_Stack) {
memclrNoHeapPointers(unsafe.Pointer(p), _StackSize)
}
func freeStack(p *_Stack) {
p.sp = 0
stackPool.Put(p)
}
func freezeValue(v unsafe.Pointer) uintptr {
valueCache = append(valueCache, v)
return uintptr(v)
}
func freezeFields(v *caching.FieldMap) int64 {
fieldCacheMux.Lock()
fieldCache = append(fieldCache, v)
fieldCacheMux.Unlock()
return referenceFields(v)
}
func referenceFields(v *caching.FieldMap) int64 {
return int64(uintptr(unsafe.Pointer(v)))
}
func makeDecoder(vt *rt.GoType, _ ...interface{}) (interface{}, error) {
if pp, err := newCompiler().compile(vt.Pack()); err != nil {
return nil, err
} else {
return newAssembler(pp).Load(), nil
}
}
func findOrCompile(vt *rt.GoType) (_Decoder, error) {
if val := programCache.Get(vt); val != nil {
return val.(_Decoder), nil
} else if ret, err := programCache.Compute(vt, makeDecoder); err == nil {
return ret.(_Decoder), nil
} else {
return nil, err
}
}

View File

@@ -1,46 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`encoding`
`encoding/json`
`unsafe`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/rt`
)
func decodeTypedPointer(s string, i int, vt *rt.GoType, vp unsafe.Pointer, sb *_Stack, fv uint64) (int, error) {
if fn, err := findOrCompile(vt); err != nil {
return 0, err
} else {
rt.MoreStack(_FP_size + _VD_size + native.MaxFrameSize)
rt.StopProf()
ret, err := fn(s, i, vp, sb, fv, "", nil)
rt.StartProf()
return ret, err
}
}
func decodeJsonUnmarshaler(vv interface{}, s string) error {
return vv.(json.Unmarshaler).UnmarshalJSON(rt.Str2Mem(s))
}
func decodeTextUnmarshaler(vv interface{}, s string) error {
return vv.(encoding.TextUnmarshaler).UnmarshalText(rt.Str2Mem(s))
}

View File

@@ -1,228 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`bytes`
`io`
`sync`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/option`
)
var (
minLeftBufferShift uint = 1
)
// StreamDecoder is the decoder context object for streaming input.
type StreamDecoder struct {
r io.Reader
buf []byte
scanp int
scanned int64
err error
Decoder
}
var bufPool = sync.Pool{
New: func () interface{} {
return make([]byte, 0, option.DefaultDecoderBufferSize)
},
}
// NewStreamDecoder adapts to encoding/json.NewDecoder API.
//
// NewStreamDecoder returns a new decoder that reads from r.
func NewStreamDecoder(r io.Reader) *StreamDecoder {
return &StreamDecoder{r : r}
}
// Decode decodes input stream into val with corresponding data.
// Redundantly bytes may be read and left in its buffer, and can be used at next call.
// Either io error from underlying io.Reader (except io.EOF)
// or syntax error from data will be recorded and stop subsequently decoding.
func (self *StreamDecoder) Decode(val interface{}) (err error) {
if self.err != nil {
return self.err
}
var buf = self.buf[self.scanp:]
var p = 0
var recycle bool
if cap(buf) == 0 {
buf = bufPool.Get().([]byte)
recycle = true
}
var first = true
var repeat = true
read_more:
for {
l := len(buf)
realloc(&buf)
n, err := self.r.Read(buf[l:cap(buf)])
buf = buf[:l+n]
if err != nil {
repeat = false
if err == io.EOF {
if len(buf) == 0 {
return err
}
break
}
self.err = err
return err
}
if n > 0 || first {
break
}
}
first = false
l := len(buf)
if l > 0 {
self.Decoder.Reset(string(buf))
var x int
if ret := native.SkipOneFast(&self.s, &x); ret < 0 {
if repeat {
goto read_more
} else {
err = SyntaxError{x, self.s, types.ParsingError(-ret), ""}
self.err = err
return
}
}
err = self.Decoder.Decode(val)
if err != nil {
self.err = err
}
p = self.Decoder.Pos()
self.scanned += int64(p)
self.scanp = 0
}
if l > p {
// remain undecoded bytes, so copy them into self.buf
self.buf = append(self.buf[:0], buf[p:]...)
} else {
self.buf = nil
recycle = true
}
if recycle {
buf = buf[:0]
bufPool.Put(buf)
}
return err
}
func (self StreamDecoder) repeatable(err error) bool {
if ee, ok := err.(SyntaxError); ok &&
(ee.Code == types.ERR_EOF || (ee.Code == types.ERR_INVALID_CHAR && self.i >= len(self.s)-1)) {
return true
}
return false
}
// InputOffset returns the input stream byte offset of the current decoder position.
// The offset gives the location of the end of the most recently returned token and the beginning of the next token.
func (self *StreamDecoder) InputOffset() int64 {
return self.scanned + int64(self.scanp)
}
// Buffered returns a reader of the data remaining in the Decoder's buffer.
// The reader is valid until the next call to Decode.
func (self *StreamDecoder) Buffered() io.Reader {
return bytes.NewReader(self.buf[self.scanp:])
}
// More reports whether there is another element in the
// current array or object being parsed.
func (self *StreamDecoder) More() bool {
if self.err != nil {
return false
}
c, err := self.peek()
return err == nil && c != ']' && c != '}'
}
func (self *StreamDecoder) peek() (byte, error) {
var err error
for {
for i := self.scanp; i < len(self.buf); i++ {
c := self.buf[i]
if isSpace(c) {
continue
}
self.scanp = i
return c, nil
}
// buffer has been scanned, now report any error
if err != nil {
if err != io.EOF {
self.err = err
}
return 0, err
}
err = self.refill()
}
}
func isSpace(c byte) bool {
return types.SPACE_MASK & (1 << c) != 0
}
func (self *StreamDecoder) refill() error {
// Make room to read more into the buffer.
// First slide down data already consumed.
if self.scanp > 0 {
self.scanned += int64(self.scanp)
n := copy(self.buf, self.buf[self.scanp:])
self.buf = self.buf[:n]
self.scanp = 0
}
// Grow buffer if not large enough.
realloc(&self.buf)
// Read. Delay error for next iteration (after scan).
n, err := self.r.Read(self.buf[len(self.buf):cap(self.buf)])
self.buf = self.buf[0 : len(self.buf)+n]
return err
}
func realloc(buf *[]byte) {
l := uint(len(*buf))
c := uint(cap(*buf))
if c - l <= c >> minLeftBufferShift {
e := l+(l>>minLeftBufferShift)
if e < option.DefaultDecoderBufferSize {
e = option.DefaultDecoderBufferSize
}
tmp := make([]byte, l, e)
copy(tmp, *buf)
*buf = tmp
}
}

View File

@@ -1,111 +0,0 @@
// +build go1.20
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`unsafe`
`reflect`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64decode github.com/chenzhuoyu/base64x._subr__b64decode
var _subr__b64decode uintptr
// runtime.maxElementSize
const _max_map_element_size uintptr = 128
func mapfast(vt reflect.Type) bool {
return vt.Elem().Size() <= _max_map_element_size
}
//go:nosplit
//go:linkname throw runtime.throw
//goland:noinspection GoUnusedParameter
func throw(s string)
//go:linkname convT64 runtime.convT64
//goland:noinspection GoUnusedParameter
func convT64(v uint64) unsafe.Pointer
//go:linkname convTslice runtime.convTslice
//goland:noinspection GoUnusedParameter
func convTslice(v []byte) unsafe.Pointer
//go:linkname convTstring runtime.convTstring
//goland:noinspection GoUnusedParameter
func convTstring(v string) unsafe.Pointer
//go:noescape
//go:linkname memequal runtime.memequal
//goland:noinspection GoUnusedParameter
func memequal(a unsafe.Pointer, b unsafe.Pointer, size uintptr) bool
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname mallocgc runtime.mallocgc
//goland:noinspection GoUnusedParameter
func mallocgc(size uintptr, typ *rt.GoType, needzero bool) unsafe.Pointer
//go:linkname makeslice runtime.makeslice
//goland:noinspection GoUnusedParameter
func makeslice(et *rt.GoType, len int, cap int) unsafe.Pointer
//go:noescape
//go:linkname growslice reflect.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname makemap_small runtime.makemap_small
func makemap_small() unsafe.Pointer
//go:linkname mapassign runtime.mapassign
//goland:noinspection GoUnusedParameter
func mapassign(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
//go:linkname mapassign_fast32 runtime.mapassign_fast32
//goland:noinspection GoUnusedParameter
func mapassign_fast32(t *rt.GoMapType, h unsafe.Pointer, k uint32) unsafe.Pointer
//go:linkname mapassign_fast64 runtime.mapassign_fast64
//goland:noinspection GoUnusedParameter
func mapassign_fast64(t *rt.GoMapType, h unsafe.Pointer, k uint64) unsafe.Pointer
//go:linkname mapassign_fast64ptr runtime.mapassign_fast64ptr
//goland:noinspection GoUnusedParameter
func mapassign_fast64ptr(t *rt.GoMapType, h unsafe.Pointer, k unsafe.Pointer) unsafe.Pointer
//go:linkname mapassign_faststr runtime.mapassign_faststr
//goland:noinspection GoUnusedParameter
func mapassign_faststr(t *rt.GoMapType, h unsafe.Pointer, s string) unsafe.Pointer
//go:nosplit
//go:linkname memclrHasPointers runtime.memclrHasPointers
//goland:noinspection GoUnusedParameter
func memclrHasPointers(ptr unsafe.Pointer, n uintptr)
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)

View File

@@ -1,58 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`encoding`
`encoding/base64`
`encoding/json`
`reflect`
`unsafe`
`github.com/bytedance/sonic/internal/rt`
)
var (
byteType = reflect.TypeOf(byte(0))
intType = reflect.TypeOf(int(0))
int8Type = reflect.TypeOf(int8(0))
int16Type = reflect.TypeOf(int16(0))
int32Type = reflect.TypeOf(int32(0))
int64Type = reflect.TypeOf(int64(0))
uintType = reflect.TypeOf(uint(0))
uint8Type = reflect.TypeOf(uint8(0))
uint16Type = reflect.TypeOf(uint16(0))
uint32Type = reflect.TypeOf(uint32(0))
uint64Type = reflect.TypeOf(uint64(0))
float32Type = reflect.TypeOf(float32(0))
float64Type = reflect.TypeOf(float64(0))
stringType = reflect.TypeOf("")
bytesType = reflect.TypeOf([]byte(nil))
jsonNumberType = reflect.TypeOf(json.Number(""))
base64CorruptInputError = reflect.TypeOf(base64.CorruptInputError(0))
)
var (
errorType = reflect.TypeOf((*error)(nil)).Elem()
jsonUnmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
encodingTextUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
)
func rtype(t reflect.Type) (*rt.GoItab, *rt.GoType) {
p := (*rt.GoIface)(unsafe.Pointer(&t))
return p.Itab, (*rt.GoType)(p.Value)
}

View File

@@ -1,39 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package decoder
import (
`unsafe`
`github.com/bytedance/sonic/loader`
)
//go:nosplit
func pbool(v bool) uintptr {
return freezeValue(unsafe.Pointer(&v))
}
//go:nosplit
func ptodec(p loader.Function) _Decoder {
return *(*_Decoder)(unsafe.Pointer(&p))
}
func assert_eq(v int64, exp int64, msg string) {
if v != exp {
panic(msg)
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,66 +0,0 @@
// +build go1.16,!go1.17
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`os`
`strings`
`runtime`
`runtime/debug`
`github.com/bytedance/sonic/internal/jit`
)
var (
debugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
)
var (
_Instr_End _Instr = newInsOp(_OP_null)
_F_gc = jit.Func(runtime.GC)
_F_force_gc = jit.Func(debug.FreeOSMemory)
_F_println = jit.Func(println_wrapper)
)
func println_wrapper(i int, op1 int, op2 int){
println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
}
func (self *_Assembler) force_gc() {
self.call_go(_F_gc)
self.call_go(_F_force_gc)
}
func (self *_Assembler) debug_instr(i int, v *_Instr) {
if debugSyncGC {
if (i+1 == len(self.p)) {
self.print_gc(i, v, &_Instr_End)
} else {
next := &(self.p[i+1])
self.print_gc(i, v, next)
name := _OpNames[next.op()]
if strings.Contains(name, "save") {
return
}
}
self.force_gc()
}
}

View File

@@ -1,205 +0,0 @@
// +build go1.17,!go1.22
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`fmt`
`os`
`runtime`
`strings`
`unsafe`
`github.com/bytedance/sonic/internal/jit`
`github.com/twitchyliquid64/golang-asm/obj`
)
const _FP_debug = 128
var (
debugSyncGC = os.Getenv("SONIC_SYNC_GC") != ""
debugAsyncGC = os.Getenv("SONIC_NO_ASYNC_GC") == ""
debugCheckPtr = os.Getenv("SONIC_CHECK_POINTER") != ""
)
var (
_Instr_End = newInsOp(_OP_is_nil)
_F_gc = jit.Func(gc)
_F_println = jit.Func(println_wrapper)
_F_print = jit.Func(print)
)
func (self *_Assembler) dsave(r ...obj.Addr) {
for i, v := range r {
if i > _FP_debug / 8 - 1 {
panic("too many registers to save")
} else {
self.Emit("MOVQ", v, jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8))
}
}
}
func (self *_Assembler) dload(r ...obj.Addr) {
for i, v := range r {
if i > _FP_debug / 8 - 1 {
panic("too many registers to load")
} else {
self.Emit("MOVQ", jit.Ptr(_SP, _FP_fargs + _FP_saves + _FP_locals + int64(i) * 8), v)
}
}
}
func println_wrapper(i int, op1 int, op2 int){
println(i, " Intrs ", op1, _OpNames[op1], "next: ", op2, _OpNames[op2])
}
func print(i int){
println(i)
}
func gc() {
if !debugSyncGC {
return
}
runtime.GC()
// debug.FreeOSMemory()
}
func (self *_Assembler) dcall(fn obj.Addr) {
self.Emit("MOVQ", fn, _R10) // MOVQ ${fn}, R10
self.Rjmp("CALL", _R10) // CALL R10
}
func (self *_Assembler) debug_gc() {
if !debugSyncGC {
return
}
self.dsave(_REG_debug...)
self.dcall(_F_gc)
self.dload(_REG_debug...)
}
func (self *_Assembler) debug_instr(i int, v *_Instr) {
if debugSyncGC {
if i+1 == len(self.p) {
self.print_gc(i, v, &_Instr_End)
} else {
next := &(self.p[i+1])
self.print_gc(i, v, next)
name := _OpNames[next.op()]
if strings.Contains(name, "save") {
return
}
}
// self.debug_gc()
}
}
//go:noescape
//go:linkname checkptrBase runtime.checkptrBase
func checkptrBase(p unsafe.Pointer) uintptr
//go:noescape
//go:linkname findObject runtime.findObject
func findObject(p, refBase, refOff uintptr) (base uintptr, s unsafe.Pointer, objIndex uintptr)
var (
_F_checkptr = jit.Func(checkptr)
_F_printptr = jit.Func(printptr)
)
var (
_R10 = jit.Reg("R10")
)
var _REG_debug = []obj.Addr {
jit.Reg("AX"),
jit.Reg("BX"),
jit.Reg("CX"),
jit.Reg("DX"),
jit.Reg("DI"),
jit.Reg("SI"),
jit.Reg("BP"),
jit.Reg("SP"),
jit.Reg("R8"),
jit.Reg("R9"),
jit.Reg("R10"),
jit.Reg("R11"),
jit.Reg("R12"),
jit.Reg("R13"),
jit.Reg("R14"),
jit.Reg("R15"),
}
func checkptr(ptr uintptr) {
if ptr == 0 {
return
}
fmt.Printf("pointer: %x\n", ptr)
f := checkptrBase(unsafe.Pointer(uintptr(ptr)))
if f == 0 {
fmt.Printf("! unknown-based pointer: %x\n", ptr)
} else if f == 1 {
fmt.Printf("! stack pointer: %x\n", ptr)
} else {
fmt.Printf("base: %x\n", f)
}
findobj(ptr)
}
func findobj(ptr uintptr) {
base, s, objIndex := findObject(ptr, 0, 0)
if s != nil && base == 0 {
fmt.Printf("! invalid pointer: %x\n", ptr)
}
fmt.Printf("objIndex: %d\n", objIndex)
}
func (self *_Assembler) check_ptr(ptr obj.Addr, lea bool) {
if !debugCheckPtr {
return
}
self.dsave(_REG_debug...)
if lea {
self.Emit("LEAQ", ptr, _R10)
} else {
self.Emit("MOVQ", ptr, _R10)
}
self.Emit("MOVQ", _R10, jit.Ptr(_SP, 0))
self.dcall(_F_checkptr)
self.dload(_REG_debug...)
}
func printptr(i int, ptr uintptr) {
fmt.Printf("[%d] ptr: %x\n", i, ptr)
}
func (self *_Assembler) print_ptr(i int, ptr obj.Addr, lea bool) {
self.dsave(_REG_debug...)
if lea {
self.Emit("LEAQ", ptr, _R10)
} else {
self.Emit("MOVQ", ptr, _R10)
}
self.Emit("MOVQ", jit.Imm(int64(i)), _AX)
self.Emit("MOVQ", _R10, _BX)
self.dcall(_F_printptr)
self.dload(_REG_debug...)
}

View File

@@ -17,63 +17,63 @@
package encoder
import (
`bytes`
`encoding/json`
`reflect`
`runtime`
`unsafe`
"bytes"
"encoding/json"
"reflect"
"runtime"
"unsafe"
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/utf8`
`github.com/bytedance/sonic/option`
"github.com/bytedance/sonic/utf8"
"github.com/bytedance/sonic/internal/encoder/alg"
"github.com/bytedance/sonic/internal/encoder/vars"
"github.com/bytedance/sonic/internal/rt"
"github.com/bytedance/sonic/option"
"github.com/bytedance/gopkg/lang/dirtmake"
)
// Options is a set of encoding options.
type Options uint64
const (
bitSortMapKeys = iota
bitEscapeHTML
bitCompactMarshaler
bitNoQuoteTextMarshaler
bitNoNullSliceOrMap
bitValidateString
// used for recursive compile
bitPointerValue = 63
)
const (
// SortMapKeys indicates that the keys of a map needs to be sorted
// before serializing into JSON.
// WARNING: This hurts performance A LOT, USE WITH CARE.
SortMapKeys Options = 1 << bitSortMapKeys
SortMapKeys Options = 1 << alg.BitSortMapKeys
// EscapeHTML indicates encoder to escape all HTML characters
// after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
// WARNING: This hurts performance A LOT, USE WITH CARE.
EscapeHTML Options = 1 << bitEscapeHTML
EscapeHTML Options = 1 << alg.BitEscapeHTML
// CompactMarshaler indicates that the output JSON from json.Marshaler
// is always compact and needs no validation
CompactMarshaler Options = 1 << bitCompactMarshaler
CompactMarshaler Options = 1 << alg.BitCompactMarshaler
// NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler
// is always escaped string and needs no quoting
NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler
NoQuoteTextMarshaler Options = 1 << alg.BitNoQuoteTextMarshaler
// NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}',
// instead of 'null'
NoNullSliceOrMap Options = 1 << bitNoNullSliceOrMap
// instead of 'null'.
// NOTE: The priority of this option is lower than json tag `omitempty`.
NoNullSliceOrMap Options = 1 << alg.BitNoNullSliceOrMap
// ValidateString indicates that encoder should validate the input string
// before encoding it into JSON.
ValidateString Options = 1 << bitValidateString
ValidateString Options = 1 << alg.BitValidateString
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string
// after encoding the JSONMarshaler to JSON.
NoValidateJSONMarshaler Options = 1 << alg.BitNoValidateJSONMarshaler
// NoEncoderNewline indicates that the encoder should not add a newline after every message
NoEncoderNewline Options = 1 << alg.BitNoEncoderNewline
// CompatibleWithStd is used to be compatible with std encoder.
CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
// Encode Infinity or Nan float into `null`, instead of returning an error.
EncodeNullForInfOrNan Options = 1 << alg.BitEncodeNullForInfOrNan
)
// Encoder represents a specific set of encoder configurations.
@@ -115,6 +115,25 @@ func (self *Encoder) SetValidateString(f bool) {
}
}
// SetNoValidateJSONMarshaler specifies if option NoValidateJSONMarshaler opens
func (self *Encoder) SetNoValidateJSONMarshaler(f bool) {
if f {
self.Opts |= NoValidateJSONMarshaler
} else {
self.Opts &= ^NoValidateJSONMarshaler
}
}
// SetNoEncoderNewline specifies if option NoEncoderNewline opens
func (self *Encoder) SetNoEncoderNewline(f bool) {
if f {
self.Opts |= NoEncoderNewline
} else {
self.Opts &= ^NoEncoderNewline
}
}
// SetCompactMarshaler specifies if option CompactMarshaler opens
func (self *Encoder) SetCompactMarshaler(f bool) {
if f {
@@ -143,53 +162,45 @@ func (enc *Encoder) SetIndent(prefix, indent string) {
// Quote returns the JSON-quoted version of s.
func Quote(s string) string {
var n int
var p []byte
/* check for empty string */
if s == "" {
return `""`
}
/* allocate space for result */
n = len(s) + 2
p = make([]byte, 0, n)
/* call the encoder */
_ = encodeString(&p, s)
return rt.Mem2Str(p)
buf := make([]byte, 0, len(s)+2)
buf = alg.Quote(buf, s, false)
return rt.Mem2Str(buf)
}
// Encode returns the JSON encoding of val, encoded with opts.
func Encode(val interface{}, opts Options) ([]byte, error) {
var ret []byte
buf := newBytes()
err := encodeInto(&buf, val, opts)
buf := vars.NewBytes()
err := encodeIntoCheckRace(buf, val, opts)
/* check for errors */
if err != nil {
freeBytes(buf)
vars.FreeBytes(buf)
return nil, err
}
/* htmlescape or correct UTF-8 if opts enable */
old := buf
buf = encodeFinish(old, opts)
pbuf := ((*rt.GoSlice)(unsafe.Pointer(&buf))).Ptr
pold := ((*rt.GoSlice)(unsafe.Pointer(&old))).Ptr
*buf = encodeFinish(*old, opts)
pbuf := ((*rt.GoSlice)(unsafe.Pointer(buf))).Ptr
pold := ((*rt.GoSlice)(unsafe.Pointer(old))).Ptr
/* return when allocated a new buffer */
if pbuf != pold {
freeBytes(old)
return buf, nil
vars.FreeBytes(old)
return *buf, nil
}
/* make a copy of the result */
ret = make([]byte, len(buf))
copy(ret, buf)
freeBytes(buf)
if rt.CanSizeResue(cap(*buf)) {
ret = dirtmake.Bytes(len(*buf), len(*buf))
copy(ret, *buf)
vars.FreeBytes(buf)
} else {
ret = *buf
}
/* return the buffer into pool */
return ret, nil
}
@@ -197,7 +208,7 @@ func Encode(val interface{}, opts Options) ([]byte, error) {
// EncodeInto is like Encode but uses a user-supplied buffer instead of allocating
// a new one.
func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
err := encodeInto(buf, val, opts)
err := encodeIntoCheckRace(buf, val, opts)
if err != nil {
return err
}
@@ -206,15 +217,15 @@ func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
}
func encodeInto(buf *[]byte, val interface{}, opts Options) error {
stk := newStack()
stk := vars.NewStack()
efv := rt.UnpackEface(val)
err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts))
/* return the stack into pool */
if err != nil {
resetStack(stk)
vars.ResetStack(stk)
}
freeStack(stk)
vars.FreeStack(stk)
/* avoid GC ahead */
runtime.KeepAlive(buf)
@@ -226,13 +237,12 @@ func encodeFinish(buf []byte, opts Options) []byte {
if opts & EscapeHTML != 0 {
buf = HTMLEscape(nil, buf)
}
if opts & ValidateString != 0 && !utf8.Validate(buf) {
if (opts & ValidateString != 0) && !utf8.Validate(buf) {
buf = utf8.CorrectWith(nil, buf, `\ufffd`)
}
return buf
}
var typeByte = rt.UnpackType(reflect.TypeOf(byte(0)))
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
@@ -241,7 +251,7 @@ var typeByte = rt.UnpackType(reflect.TypeOf(byte(0)))
// escaping within <script> tags, so an alternative JSON encoding must
// be used.
func HTMLEscape(dst []byte, src []byte) []byte {
return htmlEscape(dst, src)
return alg.HtmlEscape(dst, src)
}
// EncodeIndented is like Encode but applies Indent to format the output.
@@ -249,37 +259,40 @@ func HTMLEscape(dst []byte, src []byte) []byte {
// followed by one or more copies of indent according to the indentation nesting.
func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) {
var err error
var out []byte
var buf *bytes.Buffer
/* encode into the buffer */
out = newBytes()
err = EncodeInto(&out, val, opts)
out := vars.NewBytes()
err = EncodeInto(out, val, opts)
/* check for errors */
if err != nil {
freeBytes(out)
vars.FreeBytes(out)
return nil, err
}
/* indent the JSON */
buf = newBuffer()
err = json.Indent(buf, out, prefix, indent)
buf = vars.NewBuffer()
err = json.Indent(buf, *out, prefix, indent)
vars.FreeBytes(out)
/* check for errors */
if err != nil {
freeBytes(out)
freeBuffer(buf)
vars.FreeBuffer(buf)
return nil, err
}
/* copy to the result buffer */
ret := make([]byte, buf.Len())
copy(ret, buf.Bytes())
/* return the buffers into pool */
freeBytes(out)
freeBuffer(buf)
var ret []byte
if rt.CanSizeResue(cap(buf.Bytes())) {
ret = make([]byte, buf.Len())
copy(ret, buf.Bytes())
/* return the buffers into pool */
vars.FreeBuffer(buf)
} else {
ret = buf.Bytes()
}
return ret, nil
}
@@ -302,26 +315,5 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
//
// Note: it does not check for the invalid UTF-8 characters.
func Valid(data []byte) (ok bool, start int) {
n := len(data)
if n == 0 {
return false, -1
}
s := rt.Mem2Str(data)
p := 0
m := types.NewStateMachine()
ret := native.ValidateOne(&s, &p, m)
types.FreeStateMachine(m)
if ret < 0 {
return false, p-1
}
/* check for trailing spaces */
for ;p < n; p++ {
if (types.SPACE_MASK & (1 << data[p])) == 0 {
return false, p
}
}
return true, ret
return alg.Valid(data)
}

View File

@@ -1,65 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding/json`
`fmt`
`reflect`
`strconv`
`unsafe`
`github.com/bytedance/sonic/internal/rt`
)
var _ERR_too_deep = &json.UnsupportedValueError {
Str : "Value nesting too deep",
Value : reflect.ValueOf("..."),
}
var _ERR_nan_or_infinite = &json.UnsupportedValueError {
Str : "NaN or ±Infinite",
Value : reflect.ValueOf("NaN or ±Infinite"),
}
func error_type(vtype reflect.Type) error {
return &json.UnsupportedTypeError{Type: vtype}
}
func error_number(number json.Number) error {
return &json.UnsupportedValueError {
Str : "invalid number literal: " + strconv.Quote(string(number)),
Value : reflect.ValueOf(number),
}
}
func error_marshaler(ret []byte, pos int) error {
return fmt.Errorf("invalid Marshaler output json syntax at %d: %q", pos, ret)
}
const (
panicNilPointerOfNonEmptyString int = 1 + iota
)
func goPanic(code int, val unsafe.Pointer) {
switch(code){
case panicNilPointerOfNonEmptyString:
panic(fmt.Sprintf("val: %#v has nil pointer while its length is not zero!", (*rt.GoString)(val)))
default:
panic("encoder error!")
}
}

View File

@@ -1,199 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
"encoding"
"reflect"
"sync"
"unsafe"
"github.com/bytedance/sonic/internal/native"
"github.com/bytedance/sonic/internal/rt"
)
type _MapPair struct {
k string // when the map key is integer, k is pointed to m
v unsafe.Pointer
m [32]byte
}
type _MapIterator struct {
it rt.GoMapIterator // must be the first field
kv rt.GoSlice // slice of _MapPair
ki int
}
var (
iteratorPool = sync.Pool{}
iteratorPair = rt.UnpackType(reflect.TypeOf(_MapPair{}))
)
func init() {
if unsafe.Offsetof(_MapIterator{}.it) != 0 {
panic("_MapIterator.it is not the first field")
}
}
func newIterator() *_MapIterator {
if v := iteratorPool.Get(); v == nil {
return new(_MapIterator)
} else {
return resetIterator(v.(*_MapIterator))
}
}
func resetIterator(p *_MapIterator) *_MapIterator {
p.ki = 0
p.it = rt.GoMapIterator{}
p.kv.Len = 0
return p
}
func (self *_MapIterator) at(i int) *_MapPair {
return (*_MapPair)(unsafe.Pointer(uintptr(self.kv.Ptr) + uintptr(i) * unsafe.Sizeof(_MapPair{})))
}
func (self *_MapIterator) add() (p *_MapPair) {
p = self.at(self.kv.Len)
self.kv.Len++
return
}
func (self *_MapIterator) data() (p []_MapPair) {
*(*rt.GoSlice)(unsafe.Pointer(&p)) = self.kv
return
}
func (self *_MapIterator) append(t *rt.GoType, k unsafe.Pointer, v unsafe.Pointer) (err error) {
p := self.add()
p.v = v
/* check for strings */
if tk := t.Kind(); tk != reflect.String {
return self.appendGeneric(p, t, tk, k)
}
/* fast path for strings */
p.k = *(*string)(k)
return nil
}
func (self *_MapIterator) appendGeneric(p *_MapPair, t *rt.GoType, v reflect.Kind, k unsafe.Pointer) error {
switch v {
case reflect.Int : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int)(k)))]) ; return nil
case reflect.Int8 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int8)(k)))]) ; return nil
case reflect.Int16 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int16)(k)))]) ; return nil
case reflect.Int32 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], int64(*(*int32)(k)))]) ; return nil
case reflect.Int64 : p.k = rt.Mem2Str(p.m[:native.I64toa(&p.m[0], *(*int64)(k))]) ; return nil
case reflect.Uint : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint)(k)))]) ; return nil
case reflect.Uint8 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint8)(k)))]) ; return nil
case reflect.Uint16 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint16)(k)))]) ; return nil
case reflect.Uint32 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uint32)(k)))]) ; return nil
case reflect.Uint64 : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], *(*uint64)(k))]) ; return nil
case reflect.Uintptr : p.k = rt.Mem2Str(p.m[:native.U64toa(&p.m[0], uint64(*(*uintptr)(k)))]) ; return nil
case reflect.Interface : return self.appendInterface(p, t, k)
case reflect.Struct, reflect.Ptr : return self.appendConcrete(p, t, k)
default : panic("unexpected map key type")
}
}
func (self *_MapIterator) appendConcrete(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
// compiler has already checked that the type implements the encoding.MarshalText interface
if !t.Indirect() {
k = *(*unsafe.Pointer)(k)
}
eface := rt.GoEface{Value: k, Type: t}.Pack()
out, err := eface.(encoding.TextMarshaler).MarshalText()
if err != nil {
return err
}
p.k = rt.Mem2Str(out)
return
}
func (self *_MapIterator) appendInterface(p *_MapPair, t *rt.GoType, k unsafe.Pointer) (err error) {
if len(rt.IfaceType(t).Methods) == 0 {
panic("unexpected map key type")
} else if p.k, err = asText(k); err == nil {
return nil
} else {
return
}
}
func iteratorStop(p *_MapIterator) {
iteratorPool.Put(p)
}
func iteratorNext(p *_MapIterator) {
i := p.ki
t := &p.it
/* check for unordered iteration */
if i < 0 {
mapiternext(t)
return
}
/* check for end of iteration */
if p.ki >= p.kv.Len {
t.K = nil
t.V = nil
return
}
/* update the key-value pair, and increase the pointer */
t.K = unsafe.Pointer(&p.at(p.ki).k)
t.V = p.at(p.ki).v
p.ki++
}
func iteratorStart(t *rt.GoMapType, m *rt.GoMap, fv uint64) (*_MapIterator, error) {
it := newIterator()
mapiterinit(t, m, &it.it)
/* check for key-sorting, empty map don't need sorting */
if m.Count == 0 || (fv & uint64(SortMapKeys)) == 0 {
it.ki = -1
return it, nil
}
/* pre-allocate space if needed */
if m.Count > it.kv.Cap {
it.kv = growslice(iteratorPair, it.kv, m.Count)
}
/* dump all the key-value pairs */
for ; it.it.K != nil; mapiternext(&it.it) {
if err := it.append(t.Key, it.it.K, it.it.V); err != nil {
iteratorStop(it)
return nil, err
}
}
/* sort the keys, map with only 1 item don't need sorting */
if it.ki = 1; m.Count > 1 {
radixQsort(it.data(), 0, maxDepth(it.kv.Len))
}
/* load the first pair into iterator */
it.it.V = it.at(0).v
it.it.K = unsafe.Pointer(&it.at(0).k)
return it, nil
}

View File

@@ -1,193 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`bytes`
`sync`
`unsafe`
`errors`
`reflect`
`github.com/bytedance/sonic/internal/caching`
`github.com/bytedance/sonic/option`
`github.com/bytedance/sonic/internal/rt`
)
const (
_MaxStack = 4096 // 4k states
_StackSize = unsafe.Sizeof(_Stack{})
)
var (
bytesPool = sync.Pool{}
stackPool = sync.Pool{}
bufferPool = sync.Pool{}
programCache = caching.CreateProgramCache()
)
type _State struct {
x int
f uint64
p unsafe.Pointer
q unsafe.Pointer
}
type _Stack struct {
sp uint64
sb [_MaxStack]_State
}
type _Encoder func(
rb *[]byte,
vp unsafe.Pointer,
sb *_Stack,
fv uint64,
) error
var _KeepAlive struct {
rb *[]byte
vp unsafe.Pointer
sb *_Stack
fv uint64
err error
frame [_FP_offs]byte
}
var errCallShadow = errors.New("DON'T CALL THIS!")
// Faker func of _Encoder, used to export its stackmap as _Encoder's
func _Encoder_Shadow(rb *[]byte, vp unsafe.Pointer, sb *_Stack, fv uint64) (err error) {
// align to assembler_amd64.go: _FP_offs
var frame [_FP_offs]byte
// must keep all args and frames noticeable to GC
_KeepAlive.rb = rb
_KeepAlive.vp = vp
_KeepAlive.sb = sb
_KeepAlive.fv = fv
_KeepAlive.err = err
_KeepAlive.frame = frame
return errCallShadow
}
func newBytes() []byte {
if ret := bytesPool.Get(); ret != nil {
return ret.([]byte)
} else {
return make([]byte, 0, option.DefaultEncoderBufferSize)
}
}
func newStack() *_Stack {
if ret := stackPool.Get(); ret == nil {
return new(_Stack)
} else {
return ret.(*_Stack)
}
}
func resetStack(p *_Stack) {
memclrNoHeapPointers(unsafe.Pointer(p), _StackSize)
}
func newBuffer() *bytes.Buffer {
if ret := bufferPool.Get(); ret != nil {
return ret.(*bytes.Buffer)
} else {
return bytes.NewBuffer(make([]byte, 0, option.DefaultEncoderBufferSize))
}
}
func freeBytes(p []byte) {
p = p[:0]
bytesPool.Put(p)
}
func freeStack(p *_Stack) {
p.sp = 0
stackPool.Put(p)
}
func freeBuffer(p *bytes.Buffer) {
p.Reset()
bufferPool.Put(p)
}
func makeEncoder(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
if pp, err := newCompiler().compile(vt.Pack(), ex[0].(bool)); err != nil {
return nil, err
} else {
as := newAssembler(pp)
as.name = vt.String()
return as.Load(), nil
}
}
func findOrCompile(vt *rt.GoType, pv bool) (_Encoder, error) {
if val := programCache.Get(vt); val != nil {
return val.(_Encoder), nil
} else if ret, err := programCache.Compute(vt, makeEncoder, pv); err == nil {
return ret.(_Encoder), nil
} else {
return nil, err
}
}
func pretouchType(_vt reflect.Type, opts option.CompileOptions, v uint8) (map[reflect.Type]uint8, error) {
/* compile function */
compiler := newCompiler().apply(opts)
encoder := func(vt *rt.GoType, ex ...interface{}) (interface{}, error) {
if pp, err := compiler.compile(_vt, ex[0].(bool)); err != nil {
return nil, err
} else {
as := newAssembler(pp)
as.name = vt.String()
return as.Load(), nil
}
}
/* find or compile */
vt := rt.UnpackType(_vt)
if val := programCache.Get(vt); val != nil {
return nil, nil
} else if _, err := programCache.Compute(vt, encoder, v == 1); err == nil {
return compiler.rec, nil
} else {
return nil, err
}
}
func pretouchRec(vtm map[reflect.Type]uint8, opts option.CompileOptions) error {
if opts.RecursiveDepth < 0 || len(vtm) == 0 {
return nil
}
next := make(map[reflect.Type]uint8)
for vt, v := range vtm {
sub, err := pretouchType(vt, opts, v)
if err != nil {
return err
}
for svt, v := range sub {
next[svt] = v
}
}
opts.RecursiveDepth -= 1
return pretouchRec(next, opts)
}

View File

@@ -1,168 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding`
`encoding/json`
`unsafe`
`github.com/bytedance/sonic/internal/jit`
`github.com/bytedance/sonic/internal/native`
`github.com/bytedance/sonic/internal/rt`
)
/** Encoder Primitives **/
func encodeNil(rb *[]byte) error {
*rb = append(*rb, 'n', 'u', 'l', 'l')
return nil
}
func encodeString(buf *[]byte, val string) error {
var sidx int
var pbuf *rt.GoSlice
var pstr *rt.GoString
/* opening quote */
*buf = append(*buf, '"')
pbuf = (*rt.GoSlice)(unsafe.Pointer(buf))
pstr = (*rt.GoString)(unsafe.Pointer(&val))
/* encode with native library */
for sidx < pstr.Len {
sn := pstr.Len - sidx
dn := pbuf.Cap - pbuf.Len
sp := padd(pstr.Ptr, sidx)
dp := padd(pbuf.Ptr, pbuf.Len)
nb := native.Quote(sp, sn, dp, &dn, 0)
/* check for errors */
if pbuf.Len += dn; nb >= 0 {
break
}
/* not enough space, grow the slice and try again */
sidx += ^nb
*pbuf = growslice(rt.UnpackType(byteType), *pbuf, pbuf.Cap * 2)
}
/* closing quote */
*buf = append(*buf, '"')
return nil
}
func encodeTypedPointer(buf *[]byte, vt *rt.GoType, vp *unsafe.Pointer, sb *_Stack, fv uint64) error {
if vt == nil {
return encodeNil(buf)
} else if fn, err := findOrCompile(vt, (fv&(1<<bitPointerValue)) != 0); err != nil {
return err
} else if vt.Indirect() {
rt.MoreStack(_FP_size + native.MaxFrameSize)
rt.StopProf()
err := fn(buf, *vp, sb, fv)
rt.StartProf()
return err
} else {
rt.MoreStack(_FP_size + native.MaxFrameSize)
rt.StopProf()
err := fn(buf, unsafe.Pointer(vp), sb, fv)
rt.StartProf()
return err
}
}
func encodeJsonMarshaler(buf *[]byte, val json.Marshaler, opt Options) error {
if ret, err := val.MarshalJSON(); err != nil {
return err
} else {
if opt & CompactMarshaler != 0 {
return compact(buf, ret)
}
if ok, s := Valid(ret); !ok {
return error_marshaler(ret, s)
}
*buf = append(*buf, ret...)
return nil
}
}
func encodeTextMarshaler(buf *[]byte, val encoding.TextMarshaler, opt Options) error {
if ret, err := val.MarshalText(); err != nil {
return err
} else {
if opt & NoQuoteTextMarshaler != 0 {
*buf = append(*buf, ret...)
return nil
}
return encodeString(buf, rt.Mem2Str(ret) )
}
}
func htmlEscape(dst []byte, src []byte) []byte {
var sidx int
dst = append(dst, src[:0]...) // avoid check nil dst
sbuf := (*rt.GoSlice)(unsafe.Pointer(&src))
dbuf := (*rt.GoSlice)(unsafe.Pointer(&dst))
/* grow dst if it is shorter */
if cap(dst) - len(dst) < len(src) + native.BufPaddingSize {
cap := len(src) * 3 / 2 + native.BufPaddingSize
*dbuf = growslice(typeByte, *dbuf, cap)
}
for sidx < sbuf.Len {
sp := padd(sbuf.Ptr, sidx)
dp := padd(dbuf.Ptr, dbuf.Len)
sn := sbuf.Len - sidx
dn := dbuf.Cap - dbuf.Len
nb := native.HTMLEscape(sp, sn, dp, &dn)
/* check for errors */
if dbuf.Len += dn; nb >= 0 {
break
}
/* not enough space, grow the slice and try again */
sidx += ^nb
*dbuf = growslice(typeByte, *dbuf, dbuf.Cap * 2)
}
return dst
}
var (
argPtrs = []bool { true, true, true, false }
localPtrs = []bool{}
)
var (
_F_assertI2I = jit.Func(assertI2I)
)
func asText(v unsafe.Pointer) (string, error) {
text := assertI2I(_T_encoding_TextMarshaler, *(*rt.GoIface)(v))
r, e := (*(*encoding.TextMarshaler)(unsafe.Pointer(&text))).MarshalText()
return rt.Mem2Str(r), e
}
func asJson(v unsafe.Pointer) (string, error) {
text := assertI2I(_T_json_Marshaler, *(*rt.GoIface)(v))
r, e := (*(*json.Marshaler)(unsafe.Pointer(&text))).MarshalJSON()
return rt.Mem2Str(r), e
}

View File

@@ -1,206 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
// Algorithm 3-way Radix Quicksort, d means the radix.
// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html
func radixQsort(kvs []_MapPair, d, maxDepth int) {
for len(kvs) > 11 {
// To avoid the worst case of quickSort (time: O(n^2)), use introsort here.
// Reference: https://en.wikipedia.org/wiki/Introsort and
// https://github.com/golang/go/issues/467
if maxDepth == 0 {
heapSort(kvs, 0, len(kvs))
return
}
maxDepth--
p := pivot(kvs, d)
lt, i, gt := 0, 0, len(kvs)
for i < gt {
c := byteAt(kvs[i].k, d)
if c < p {
swap(kvs, lt, i)
i++
lt++
} else if c > p {
gt--
swap(kvs, i, gt)
} else {
i++
}
}
// kvs[0:lt] < v = kvs[lt:gt] < kvs[gt:len(kvs)]
// Native implemention:
// radixQsort(kvs[:lt], d, maxDepth)
// if p > -1 {
// radixQsort(kvs[lt:gt], d+1, maxDepth)
// }
// radixQsort(kvs[gt:], d, maxDepth)
// Optimize as follows: make recursive calls only for the smaller parts.
// Reference: https://www.geeksforgeeks.org/quicksort-tail-call-optimization-reducing-worst-case-space-log-n/
if p == -1 {
if lt > len(kvs) - gt {
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else {
radixQsort(kvs[:lt], d, maxDepth)
kvs = kvs[gt:]
}
} else {
ml := maxThree(lt, gt-lt, len(kvs)-gt)
if ml == lt {
radixQsort(kvs[lt:gt], d+1, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else if ml == gt-lt {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[lt:gt]
d += 1
} else {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[lt:gt], d+1, maxDepth)
kvs = kvs[gt:]
}
}
}
insertRadixSort(kvs, d)
}
func insertRadixSort(kvs []_MapPair, d int) {
for i := 1; i < len(kvs); i++ {
for j := i; j > 0 && lessFrom(kvs[j].k, kvs[j-1].k, d); j-- {
swap(kvs, j, j-1)
}
}
}
func pivot(kvs []_MapPair, d int) int {
m := len(kvs) >> 1
if len(kvs) > 40 {
// Tukey's ``Ninther,'' median of three mediankvs of three.
t := len(kvs) / 8
return medianThree(
medianThree(byteAt(kvs[0].k, d), byteAt(kvs[t].k, d), byteAt(kvs[2*t].k, d)),
medianThree(byteAt(kvs[m].k, d), byteAt(kvs[m-t].k, d), byteAt(kvs[m+t].k, d)),
medianThree(byteAt(kvs[len(kvs)-1].k, d),
byteAt(kvs[len(kvs)-1-t].k, d),
byteAt(kvs[len(kvs)-1-2*t].k, d)))
}
return medianThree(byteAt(kvs[0].k, d), byteAt(kvs[m].k, d), byteAt(kvs[len(kvs)-1].k, d))
}
func medianThree(i, j, k int) int {
if i > j {
i, j = j, i
} // i < j
if k < i {
return i
}
if k > j {
return j
}
return k
}
func maxThree(i, j, k int) int {
max := i
if max < j {
max = j
}
if max < k {
max = k
}
return max
}
// maxDepth returns a threshold at which quicksort should switch
// to heapsort. It returnkvs 2*ceil(lg(n+1)).
func maxDepth(n int) int {
var depth int
for i := n; i > 0; i >>= 1 {
depth++
}
return depth * 2
}
// siftDown implements the heap property on kvs[lo:hi].
// first is an offset into the array where the root of the heap lies.
func siftDown(kvs []_MapPair, lo, hi, first int) {
root := lo
for {
child := 2*root + 1
if child >= hi {
break
}
if child+1 < hi && kvs[first+child].k < kvs[first+child+1].k {
child++
}
if kvs[first+root].k >= kvs[first+child].k {
return
}
swap(kvs, first+root, first+child)
root = child
}
}
func heapSort(kvs []_MapPair, a, b int) {
first := a
lo := 0
hi := b - a
// Build heap with the greatest element at top.
for i := (hi - 1) / 2; i >= 0; i-- {
siftDown(kvs, i, hi, first)
}
// Pop elements, the largest first, into end of kvs.
for i := hi - 1; i >= 0; i-- {
swap(kvs, first, first+i)
siftDown(kvs, lo, i, first)
}
}
// Note that _MapPair.k is NOT pointed to _MapPair.m when map key is integer after swap
func swap(kvs []_MapPair, a, b int) {
kvs[a].k, kvs[b].k = kvs[b].k, kvs[a].k
kvs[a].v, kvs[b].v = kvs[b].v, kvs[a].v
}
// Compare two strings from the pos d.
func lessFrom(a, b string, d int) bool {
l := len(a)
if l > len(b) {
l = len(b)
}
for i := d; i < l; i++ {
if a[i] == b[i] {
continue
}
return a[i] < b[i]
}
return len(a) < len(b)
}
func byteAt(b string, p int) int {
if p < len(b) {
return int(b[p])
}
return -1
}

View File

@@ -17,8 +17,10 @@
package encoder
import (
`encoding/json`
`io`
"encoding/json"
"io"
"github.com/bytedance/sonic/internal/encoder/vars"
)
// StreamEncoder uses io.Writer as input.
@@ -36,49 +38,54 @@ func NewStreamEncoder(w io.Writer) *StreamEncoder {
// Encode encodes interface{} as JSON to io.Writer
func (enc *StreamEncoder) Encode(val interface{}) (err error) {
out := newBytes()
out := vars.NewBytes()
/* encode into the buffer */
err = EncodeInto(&out, val, enc.Opts)
err = EncodeInto(out, val, enc.Opts)
if err != nil {
goto free_bytes
}
if enc.indent != "" || enc.prefix != "" {
/* indent the JSON */
buf := newBuffer()
err = json.Indent(buf, out, enc.prefix, enc.indent)
buf := vars.NewBuffer()
err = json.Indent(buf, *out, enc.prefix, enc.indent)
if err != nil {
freeBuffer(buf)
vars.FreeBuffer(buf)
goto free_bytes
}
// according to standard library, terminate each value with a newline...
buf.WriteByte('\n')
if enc.Opts & NoEncoderNewline == 0 {
buf.WriteByte('\n')
}
/* copy into io.Writer */
_, err = io.Copy(enc.w, buf)
if err != nil {
freeBuffer(buf)
vars.FreeBuffer(buf)
goto free_bytes
}
} else {
/* copy into io.Writer */
var n int
for len(out) > 0 {
n, err = enc.w.Write(out)
out = out[n:]
buf := *out
for len(buf) > 0 {
n, err = enc.w.Write(buf)
buf = buf[n:]
if err != nil {
goto free_bytes
}
}
// according to standard library, terminate each value with a newline...
enc.w.Write([]byte{'\n'})
if enc.Opts & NoEncoderNewline == 0 {
enc.w.Write([]byte{'\n'})
}
}
free_bytes:
freeBytes(out)
vars.FreeBytes(out)
return err
}

View File

@@ -1,65 +0,0 @@
// +build go1.16,!go1.17
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`unsafe`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64encode github.com/chenzhuoyu/base64x._subr__b64encode
var _subr__b64encode uintptr
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname growslice runtime.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname assertI2I runtime.assertI2I
//goland:noinspection GoUnusedParameter
func assertI2I(inter *rt.GoType, i rt.GoIface) rt.GoIface
//go:linkname mapiternext runtime.mapiternext
//goland:noinspection GoUnusedParameter
func mapiternext(it *rt.GoMapIterator)
//go:linkname mapiterinit runtime.mapiterinit
//goland:noinspection GoUnusedParameter
func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
//go:linkname isValidNumber encoding/json.isValidNumber
//goland:noinspection GoUnusedParameter
func isValidNumber(s string) bool
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
var _runtime_writeBarrier uintptr = rt.GcwbAddr()
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
func gcWriteBarrierAX()

View File

@@ -1,66 +0,0 @@
// +build go1.17,!go1.20
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`unsafe`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64encode github.com/chenzhuoyu/base64x._subr__b64encode
var _subr__b64encode uintptr
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname growslice runtime.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname assertI2I runtime.assertI2I2
//goland:noinspection GoUnusedParameter
func assertI2I(inter *rt.GoType, i rt.GoIface) rt.GoIface
//go:linkname mapiternext runtime.mapiternext
//goland:noinspection GoUnusedParameter
func mapiternext(it *rt.GoMapIterator)
//go:linkname mapiterinit runtime.mapiterinit
//goland:noinspection GoUnusedParameter
func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
//go:linkname isValidNumber encoding/json.isValidNumber
//goland:noinspection GoUnusedParameter
func isValidNumber(s string) bool
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
//go:linkname _runtime_writeBarrier runtime.writeBarrier
var _runtime_writeBarrier uintptr
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
func gcWriteBarrierAX()

View File

@@ -1,66 +0,0 @@
// +build go1.20,!go1.21
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`unsafe`
_ `github.com/chenzhuoyu/base64x`
`github.com/bytedance/sonic/internal/rt`
)
//go:linkname _subr__b64encode github.com/chenzhuoyu/base64x._subr__b64encode
var _subr__b64encode uintptr
//go:noescape
//go:linkname memmove runtime.memmove
//goland:noinspection GoUnusedParameter
func memmove(to unsafe.Pointer, from unsafe.Pointer, n uintptr)
//go:linkname growslice reflect.growslice
//goland:noinspection GoUnusedParameter
func growslice(et *rt.GoType, old rt.GoSlice, cap int) rt.GoSlice
//go:linkname assertI2I runtime.assertI2I2
//goland:noinspection GoUnusedParameter
func assertI2I(inter *rt.GoType, i rt.GoIface) rt.GoIface
//go:linkname mapiternext runtime.mapiternext
//goland:noinspection GoUnusedParameter
func mapiternext(it *rt.GoMapIterator)
//go:linkname mapiterinit runtime.mapiterinit
//goland:noinspection GoUnusedParameter
func mapiterinit(t *rt.GoMapType, m *rt.GoMap, it *rt.GoMapIterator)
//go:linkname isValidNumber encoding/json.isValidNumber
//goland:noinspection GoUnusedParameter
func isValidNumber(s string) bool
//go:noescape
//go:linkname memclrNoHeapPointers runtime.memclrNoHeapPointers
//goland:noinspection GoUnusedParameter
func memclrNoHeapPointers(ptr unsafe.Pointer, n uintptr)
//go:linkname _runtime_writeBarrier runtime.writeBarrier
var _runtime_writeBarrier uintptr
//go:linkname gcWriteBarrierAX runtime.gcWriteBarrier
func gcWriteBarrierAX()

View File

@@ -1,47 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding`
`encoding/json`
`reflect`
)
var (
byteType = reflect.TypeOf(byte(0))
jsonNumberType = reflect.TypeOf(json.Number(""))
jsonUnsupportedValueType = reflect.TypeOf(new(json.UnsupportedValueError))
)
var (
errorType = reflect.TypeOf((*error)(nil)).Elem()
jsonMarshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem()
encodingTextMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
)
func isSimpleByte(vt reflect.Type) bool {
if vt.Kind() != byteType.Kind() {
return false
} else {
return !isEitherMarshaler(vt) && !isEitherMarshaler(reflect.PtrTo(vt))
}
}
func isEitherMarshaler(vt reflect.Type) bool {
return vt.Implements(jsonMarshalerType) || vt.Implements(encodingTextMarshalerType)
}

View File

@@ -1,52 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package encoder
import (
`encoding/json`
`unsafe`
`github.com/bytedance/sonic/loader`
)
//go:nosplit
func padd(p unsafe.Pointer, v int) unsafe.Pointer {
return unsafe.Pointer(uintptr(p) + uintptr(v))
}
//go:nosplit
func ptoenc(p loader.Function) _Encoder {
return *(*_Encoder)(unsafe.Pointer(&p))
}
func compact(p *[]byte, v []byte) error {
buf := newBuffer()
err := json.Compact(buf, v)
/* check for errors */
if err != nil {
return err
}
/* add to result */
v = buf.Bytes()
*p = append(*p, v...)
/* return the buffer into pool */
freeBuffer(buf)
return nil
}

View File

@@ -17,8 +17,10 @@
package jit
import (
`github.com/twitchyliquid64/golang-asm/asm/arch`
`github.com/twitchyliquid64/golang-asm/obj`
"unsafe"
"github.com/twitchyliquid64/golang-asm/asm/arch"
"github.com/twitchyliquid64/golang-asm/obj"
)
var (
@@ -33,6 +35,13 @@ func As(op string) obj.As {
}
}
func ImmPtr(imm unsafe.Pointer) obj.Addr {
return obj.Addr {
Type : obj.TYPE_CONST,
Offset : int64(uintptr(imm)),
}
}
func Imm(imm int64) obj.Addr {
return obj.Addr {
Type : obj.TYPE_CONST,

View File

@@ -72,18 +72,6 @@ func (self *BaseAssembler) NOPn(n int) {
}
}
func (self *BaseAssembler) StorePtr(ptr int64, to obj.Addr, tmp obj.Addr) {
if (to.Type != obj.TYPE_MEM) || (tmp.Type != obj.TYPE_REG) {
panic("must store imm to memory, tmp must be register")
}
if (ptr >> 32) != 0 {
self.Emit("MOVQ", Imm(ptr), tmp)
self.Emit("MOVQ", tmp, to)
} else {
self.Emit("MOVQ", Imm(ptr), to);
}
}
func (self *BaseAssembler) Byte(v ...byte) {
for ; len(v) >= 8; v = v[8:] { self.From("QUAD", Imm(rt.Get64(v))) }
for ; len(v) >= 4; v = v[4:] { self.From("LONG", Imm(int64(rt.Get32(v)))) }

View File

@@ -21,6 +21,7 @@ import (
`sync`
_ `unsafe`
`github.com/bytedance/sonic/internal/rt`
`github.com/twitchyliquid64/golang-asm/asm/arch`
`github.com/twitchyliquid64/golang-asm/obj`
`github.com/twitchyliquid64/golang-asm/objabi`
@@ -38,10 +39,6 @@ var (
_progPool sync.Pool
)
//go:nosplit
//go:linkname throw runtime.throw
func throw(_ string)
func newProg() *obj.Prog {
if val := _progPool.Get(); val == nil {
return new(obj.Prog)
@@ -71,7 +68,7 @@ func newLinkContext(arch *obj.LinkArch) (ret *obj.Link) {
}
func diagLinkContext(str string, args ...interface{}) {
throw(fmt.Sprintf(str, args...))
rt.Throw(fmt.Sprintf(str, args...))
}
func (self *Backend) New() (ret *obj.Prog) {

View File

@@ -24,11 +24,6 @@ import (
`github.com/twitchyliquid64/golang-asm/obj`
)
//go:noescape
//go:linkname getitab runtime.getitab
//goland:noinspection ALL
func getitab(inter *rt.GoType, typ *rt.GoType, canfail bool) *rt.GoItab
func Func(f interface{}) obj.Addr {
if p := rt.UnpackEface(f); p.Type.Kind() != reflect.Func {
panic("f is not a function")
@@ -42,7 +37,7 @@ func Type(t reflect.Type) obj.Addr {
}
func Itab(i *rt.GoType, t reflect.Type) obj.Addr {
return Imm(int64(uintptr(unsafe.Pointer(getitab(i, rt.UnpackType(t), false)))))
return Imm(int64(uintptr(unsafe.Pointer(rt.GetItab(rt.IfaceType(i), rt.UnpackType(t), false)))))
}
func Gitab(i *rt.GoItab) obj.Addr {

View File

@@ -1,191 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package avx
import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

View File

@@ -1,603 +0,0 @@
// +build !noasm !appengine
// Code generated by asm2asm, DO NOT EDIT.
package avx
import (
`github.com/bytedance/sonic/loader`
)
const (
_entry__f32toa = 31136
_entry__f64toa = 192
_entry__format_significand = 35248
_entry__format_integer = 3040
_entry__fsm_exec = 17920
_entry__advance_string = 14320
_entry__advance_string_default = 36640
_entry__do_skip_number = 20528
_entry__get_by_path = 25680
_entry__skip_one_fast = 22160
_entry__html_escape = 8912
_entry__i64toa = 3472
_entry__u64toa = 3584
_entry__lspace = 16
_entry__quote = 4864
_entry__skip_array = 17872
_entry__skip_number = 21792
_entry__skip_object = 20160
_entry__skip_one = 21936
_entry__unquote = 6576
_entry__validate_one = 21984
_entry__validate_utf8 = 29888
_entry__validate_utf8_fast = 30560
_entry__value = 12320
_entry__vnumber = 15648
_entry__atof_eisel_lemire64 = 10160
_entry__atof_native = 11712
_entry__decimal_to_f64 = 10528
_entry__right_shift = 36208
_entry__left_shift = 35712
_entry__vsigned = 17200
_entry__vstring = 14144
_entry__vunsigned = 17520
)
const (
_stack__f32toa = 48
_stack__f64toa = 80
_stack__format_significand = 24
_stack__format_integer = 16
_stack__fsm_exec = 168
_stack__advance_string = 64
_stack__advance_string_default = 64
_stack__do_skip_number = 48
_stack__get_by_path = 280
_stack__skip_one_fast = 176
_stack__html_escape = 72
_stack__i64toa = 16
_stack__u64toa = 8
_stack__lspace = 8
_stack__quote = 56
_stack__skip_array = 176
_stack__skip_number = 88
_stack__skip_object = 176
_stack__skip_one = 176
_stack__unquote = 88
_stack__validate_one = 176
_stack__validate_utf8 = 48
_stack__validate_utf8_fast = 24
_stack__value = 328
_stack__vnumber = 240
_stack__atof_eisel_lemire64 = 32
_stack__atof_native = 136
_stack__decimal_to_f64 = 80
_stack__right_shift = 8
_stack__left_shift = 24
_stack__vsigned = 16
_stack__vstring = 120
_stack__vunsigned = 8
)
const (
_size__f32toa = 3392
_size__f64toa = 2848
_size__format_significand = 464
_size__format_integer = 432
_size__fsm_exec = 1692
_size__advance_string = 1280
_size__advance_string_default = 944
_size__do_skip_number = 924
_size__get_by_path = 4208
_size__skip_one_fast = 3016
_size__html_escape = 1248
_size__i64toa = 48
_size__u64toa = 1232
_size__lspace = 144
_size__quote = 1696
_size__skip_array = 48
_size__skip_number = 144
_size__skip_object = 48
_size__skip_one = 48
_size__unquote = 2272
_size__validate_one = 48
_size__validate_utf8 = 672
_size__validate_utf8_fast = 544
_size__value = 1316
_size__vnumber = 1552
_size__atof_eisel_lemire64 = 368
_size__atof_native = 608
_size__decimal_to_f64 = 1184
_size__right_shift = 400
_size__left_shift = 496
_size__vsigned = 320
_size__vstring = 128
_size__vunsigned = 336
)
var (
_pcsp__f32toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{3350, 48},
{3351, 40},
{3353, 32},
{3355, 24},
{3357, 16},
{3359, 8},
{3363, 0},
{3385, 48},
}
_pcsp__f64toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2788, 56},
{2792, 48},
{2793, 40},
{2795, 32},
{2797, 24},
{2799, 16},
{2801, 8},
{2805, 0},
{2843, 56},
}
_pcsp__format_significand = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{452, 24},
{453, 16},
{455, 8},
{457, 0},
}
_pcsp__format_integer = [][2]uint32{
{1, 0},
{4, 8},
{412, 16},
{413, 8},
{414, 0},
{423, 16},
{424, 8},
{426, 0},
}
_pcsp__fsm_exec = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1342, 104},
{1346, 48},
{1347, 40},
{1349, 32},
{1351, 24},
{1353, 16},
{1355, 8},
{1356, 0},
{1692, 104},
}
_pcsp__advance_string = [][2]uint32{
{14, 0},
{18, 8},
{20, 16},
{22, 24},
{24, 32},
{26, 40},
{27, 48},
{557, 56},
{561, 48},
{562, 40},
{564, 32},
{566, 24},
{568, 16},
{570, 8},
{571, 0},
{1268, 56},
}
_pcsp__advance_string_default = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{552, 64},
{556, 48},
{557, 40},
{559, 32},
{561, 24},
{563, 16},
{565, 8},
{566, 0},
{931, 64},
}
_pcsp__do_skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{849, 48},
{850, 40},
{852, 32},
{854, 24},
{856, 16},
{858, 8},
{859, 0},
{924, 48},
}
_pcsp__get_by_path = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{4012, 104},
{4016, 48},
{4017, 40},
{4019, 32},
{4021, 24},
{4023, 16},
{4025, 8},
{4026, 0},
{4194, 104},
}
_pcsp__skip_one_fast = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{658, 176},
{659, 168},
{661, 160},
{663, 152},
{665, 144},
{667, 136},
{671, 128},
{3016, 176},
}
_pcsp__html_escape = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1224, 72},
{1228, 48},
{1229, 40},
{1231, 32},
{1233, 24},
{1235, 16},
{1237, 8},
{1239, 0},
}
_pcsp__i64toa = [][2]uint32{
{14, 0},
{34, 8},
{36, 0},
}
_pcsp__u64toa = [][2]uint32{
{1, 0},
{161, 8},
{162, 0},
{457, 8},
{458, 0},
{756, 8},
{757, 0},
{1221, 8},
{1223, 0},
}
_pcsp__lspace = [][2]uint32{
{1, 0},
{89, 8},
{90, 0},
{103, 8},
{104, 0},
{111, 8},
{113, 0},
}
_pcsp__quote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1649, 56},
{1653, 48},
{1654, 40},
{1656, 32},
{1658, 24},
{1660, 16},
{1662, 8},
{1663, 0},
{1690, 56},
}
_pcsp__skip_array = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{100, 40},
{101, 32},
{103, 24},
{105, 16},
{107, 8},
{108, 0},
{139, 40},
}
_pcsp__skip_object = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_one = [][2]uint32{
{1, 0},
{30, 8},
{36, 0},
}
_pcsp__unquote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1684, 88},
{1688, 48},
{1689, 40},
{1691, 32},
{1693, 24},
{1695, 16},
{1697, 8},
{1698, 0},
{2270, 88},
}
_pcsp__validate_one = [][2]uint32{
{1, 0},
{35, 8},
{41, 0},
}
_pcsp__validate_utf8 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{623, 48},
{627, 40},
{628, 32},
{630, 24},
{632, 16},
{634, 8},
{635, 0},
{666, 48},
}
_pcsp__validate_utf8_fast = [][2]uint32{
{1, 0},
{4, 8},
{5, 16},
{247, 24},
{251, 16},
{252, 8},
{253, 0},
{527, 24},
{531, 16},
{532, 8},
{534, 0},
}
_pcsp__value = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{495, 88},
{499, 48},
{500, 40},
{502, 32},
{504, 24},
{506, 16},
{508, 8},
{509, 0},
{1316, 88},
}
_pcsp__vnumber = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{803, 104},
{807, 48},
{808, 40},
{810, 32},
{812, 24},
{814, 16},
{816, 8},
{817, 0},
{1547, 104},
}
_pcsp__atof_eisel_lemire64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{292, 32},
{293, 24},
{295, 16},
{297, 8},
{298, 0},
{362, 32},
}
_pcsp__atof_native = [][2]uint32{
{1, 0},
{4, 8},
{587, 56},
{591, 8},
{593, 0},
}
_pcsp__decimal_to_f64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1144, 56},
{1148, 48},
{1149, 40},
{1151, 32},
{1153, 24},
{1155, 16},
{1157, 8},
{1158, 0},
{1169, 56},
}
_pcsp__right_shift = [][2]uint32{
{1, 0},
{318, 8},
{319, 0},
{387, 8},
{388, 0},
{396, 8},
{398, 0},
}
_pcsp__left_shift = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{363, 24},
{364, 16},
{366, 8},
{367, 0},
{470, 24},
{471, 16},
{473, 8},
{474, 0},
{486, 24},
}
_pcsp__vsigned = [][2]uint32{
{1, 0},
{4, 8},
{112, 16},
{113, 8},
{114, 0},
{125, 16},
{126, 8},
{127, 0},
{260, 16},
{261, 8},
{262, 0},
{266, 16},
{267, 8},
{268, 0},
{306, 16},
{307, 8},
{308, 0},
{316, 16},
{317, 8},
{319, 0},
}
_pcsp__vstring = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{105, 56},
{109, 40},
{110, 32},
{112, 24},
{114, 16},
{116, 8},
{118, 0},
}
_pcsp__vunsigned = [][2]uint32{
{1, 0},
{71, 8},
{72, 0},
{83, 8},
{84, 0},
{107, 8},
{108, 0},
{273, 8},
{274, 0},
{312, 8},
{313, 0},
{320, 8},
{322, 0},
}
)
var Funcs = []loader.CFunc{
{"__native_entry__", 0, 67, 0, nil},
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
}

View File

@@ -1,191 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package avx2
import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

View File

@@ -1,610 +0,0 @@
// +build !noasm !appengine
// Code generated by asm2asm, DO NOT EDIT.
package avx2
import (
`github.com/bytedance/sonic/loader`
)
const (
_entry__f32toa = 34624
_entry__f64toa = 320
_entry__format_significand = 38736
_entry__format_integer = 3168
_entry__fsm_exec = 21072
_entry__advance_ns = 16928
_entry__advance_string = 17664
_entry__advance_string_default = 40160
_entry__do_skip_number = 23696
_entry__get_by_path = 28864
_entry__skip_one_fast = 25936
_entry__html_escape = 10560
_entry__i64toa = 3600
_entry__u64toa = 3712
_entry__lspace = 64
_entry__quote = 5104
_entry__skip_array = 21024
_entry__skip_number = 25392
_entry__skip_object = 23088
_entry__skip_one = 25536
_entry__unquote = 7888
_entry__validate_one = 25584
_entry__validate_utf8 = 31040
_entry__validate_utf8_fast = 31984
_entry__value = 15376
_entry__vnumber = 18800
_entry__atof_eisel_lemire64 = 12624
_entry__atof_native = 14768
_entry__decimal_to_f64 = 13056
_entry__right_shift = 39696
_entry__left_shift = 39200
_entry__vsigned = 20352
_entry__vstring = 17424
_entry__vunsigned = 20672
)
const (
_stack__f32toa = 48
_stack__f64toa = 80
_stack__format_significand = 24
_stack__format_integer = 16
_stack__fsm_exec = 144
_stack__advance_ns = 8
_stack__advance_string = 56
_stack__advance_string_default = 48
_stack__do_skip_number = 48
_stack__get_by_path = 272
_stack__skip_one_fast = 184
_stack__html_escape = 72
_stack__i64toa = 16
_stack__u64toa = 8
_stack__lspace = 8
_stack__quote = 56
_stack__skip_array = 152
_stack__skip_number = 88
_stack__skip_object = 152
_stack__skip_one = 152
_stack__unquote = 72
_stack__validate_one = 152
_stack__validate_utf8 = 48
_stack__validate_utf8_fast = 176
_stack__value = 328
_stack__vnumber = 240
_stack__atof_eisel_lemire64 = 32
_stack__atof_native = 136
_stack__decimal_to_f64 = 80
_stack__right_shift = 8
_stack__left_shift = 24
_stack__vsigned = 16
_stack__vstring = 112
_stack__vunsigned = 8
)
const (
_size__f32toa = 3392
_size__f64toa = 2848
_size__format_significand = 464
_size__format_integer = 432
_size__fsm_exec = 1468
_size__advance_ns = 496
_size__advance_string = 1088
_size__advance_string_default = 768
_size__do_skip_number = 1360
_size__get_by_path = 2176
_size__skip_one_fast = 2428
_size__html_escape = 2064
_size__i64toa = 48
_size__u64toa = 1248
_size__lspace = 224
_size__quote = 2736
_size__skip_array = 48
_size__skip_number = 144
_size__skip_object = 48
_size__skip_one = 48
_size__unquote = 2480
_size__validate_one = 48
_size__validate_utf8 = 672
_size__validate_utf8_fast = 2608
_size__value = 1004
_size__vnumber = 1552
_size__atof_eisel_lemire64 = 368
_size__atof_native = 608
_size__decimal_to_f64 = 1712
_size__right_shift = 400
_size__left_shift = 496
_size__vsigned = 320
_size__vstring = 144
_size__vunsigned = 336
)
var (
_pcsp__f32toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{3350, 48},
{3351, 40},
{3353, 32},
{3355, 24},
{3357, 16},
{3359, 8},
{3363, 0},
{3385, 48},
}
_pcsp__f64toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2788, 56},
{2792, 48},
{2793, 40},
{2795, 32},
{2797, 24},
{2799, 16},
{2801, 8},
{2805, 0},
{2843, 56},
}
_pcsp__format_significand = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{452, 24},
{453, 16},
{455, 8},
{457, 0},
}
_pcsp__format_integer = [][2]uint32{
{1, 0},
{4, 8},
{412, 16},
{413, 8},
{414, 0},
{423, 16},
{424, 8},
{426, 0},
}
_pcsp__fsm_exec = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1157, 88},
{1161, 48},
{1162, 40},
{1164, 32},
{1166, 24},
{1168, 16},
{1170, 8},
{1171, 0},
{1468, 88},
}
_pcsp__advance_ns = [][2]uint32{
{1, 0},
{453, 8},
{457, 0},
{481, 8},
{486, 0},
}
_pcsp__advance_string = [][2]uint32{
{14, 0},
{18, 8},
{20, 16},
{22, 24},
{24, 32},
{26, 40},
{27, 48},
{433, 56},
{437, 48},
{438, 40},
{440, 32},
{442, 24},
{444, 16},
{446, 8},
{450, 0},
{1078, 56},
}
_pcsp__advance_string_default = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{332, 48},
{333, 40},
{335, 32},
{337, 24},
{339, 16},
{341, 8},
{345, 0},
{757, 48},
}
_pcsp__do_skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{1274, 48},
{1275, 40},
{1277, 32},
{1279, 24},
{1281, 16},
{1283, 8},
{1287, 0},
{1360, 48},
}
_pcsp__get_by_path = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2049, 88},
{2053, 48},
{2054, 40},
{2056, 32},
{2058, 24},
{2060, 16},
{2062, 8},
{2063, 0},
{2170, 88},
}
_pcsp__skip_one_fast = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{418, 176},
{419, 168},
{421, 160},
{423, 152},
{425, 144},
{427, 136},
{431, 128},
{2428, 176},
}
_pcsp__html_escape = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2045, 72},
{2049, 48},
{2050, 40},
{2052, 32},
{2054, 24},
{2056, 16},
{2058, 8},
{2063, 0},
}
_pcsp__i64toa = [][2]uint32{
{14, 0},
{34, 8},
{36, 0},
}
_pcsp__u64toa = [][2]uint32{
{1, 0},
{161, 8},
{162, 0},
{457, 8},
{458, 0},
{758, 8},
{759, 0},
{1225, 8},
{1227, 0},
}
_pcsp__lspace = [][2]uint32{
{1, 0},
{184, 8},
{188, 0},
{204, 8},
{208, 0},
{215, 8},
{220, 0},
}
_pcsp__quote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2687, 56},
{2691, 48},
{2692, 40},
{2694, 32},
{2696, 24},
{2698, 16},
{2700, 8},
{2704, 0},
{2731, 56},
}
_pcsp__skip_array = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{100, 40},
{101, 32},
{103, 24},
{105, 16},
{107, 8},
{108, 0},
{139, 40},
}
_pcsp__skip_object = [][2]uint32{
{1, 0},
{28, 8},
{34, 0},
}
_pcsp__skip_one = [][2]uint32{
{1, 0},
{30, 8},
{36, 0},
}
_pcsp__unquote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{79, 72},
{83, 48},
{84, 40},
{86, 32},
{88, 24},
{90, 16},
{92, 8},
{96, 0},
{2464, 72},
}
_pcsp__validate_one = [][2]uint32{
{1, 0},
{35, 8},
{41, 0},
}
_pcsp__validate_utf8 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{623, 48},
{627, 40},
{628, 32},
{630, 24},
{632, 16},
{634, 8},
{635, 0},
{666, 48},
}
_pcsp__validate_utf8_fast = [][2]uint32{
{1, 0},
{4, 8},
{5, 16},
{1738, 176},
{1739, 168},
{1743, 160},
{2018, 176},
{2019, 168},
{2023, 160},
{2600, 176},
}
_pcsp__value = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{988, 88},
{992, 48},
{993, 40},
{995, 32},
{997, 24},
{999, 16},
{1001, 8},
{1004, 0},
}
_pcsp__vnumber = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{803, 104},
{807, 48},
{808, 40},
{810, 32},
{812, 24},
{814, 16},
{816, 8},
{817, 0},
{1547, 104},
}
_pcsp__atof_eisel_lemire64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{292, 32},
{293, 24},
{295, 16},
{297, 8},
{298, 0},
{362, 32},
}
_pcsp__atof_native = [][2]uint32{
{1, 0},
{4, 8},
{587, 56},
{591, 8},
{593, 0},
}
_pcsp__decimal_to_f64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1673, 56},
{1677, 48},
{1678, 40},
{1680, 32},
{1682, 24},
{1684, 16},
{1686, 8},
{1690, 0},
{1702, 56},
}
_pcsp__right_shift = [][2]uint32{
{1, 0},
{318, 8},
{319, 0},
{387, 8},
{388, 0},
{396, 8},
{398, 0},
}
_pcsp__left_shift = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{363, 24},
{364, 16},
{366, 8},
{367, 0},
{470, 24},
{471, 16},
{473, 8},
{474, 0},
{486, 24},
}
_pcsp__vsigned = [][2]uint32{
{1, 0},
{4, 8},
{112, 16},
{113, 8},
{114, 0},
{125, 16},
{126, 8},
{127, 0},
{260, 16},
{261, 8},
{262, 0},
{266, 16},
{267, 8},
{268, 0},
{306, 16},
{307, 8},
{308, 0},
{316, 16},
{317, 8},
{319, 0},
}
_pcsp__vstring = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{105, 56},
{109, 40},
{110, 32},
{112, 24},
{114, 16},
{116, 8},
{118, 0},
}
_pcsp__vunsigned = [][2]uint32{
{1, 0},
{71, 8},
{72, 0},
{83, 8},
{84, 0},
{107, 8},
{108, 0},
{273, 8},
{274, 0},
{312, 8},
{313, 0},
{320, 8},
{322, 0},
}
)
var Funcs = []loader.CFunc{
{"__native_entry__", 0, 67, 0, nil},
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
{"_advance_ns", _entry__advance_ns, _size__advance_ns, _stack__advance_ns, _pcsp__advance_ns},
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
}

View File

@@ -20,18 +20,13 @@ import (
`unsafe`
`github.com/bytedance/sonic/internal/cpu`
`github.com/bytedance/sonic/internal/native/avx`
`github.com/bytedance/sonic/internal/native/avx2`
`github.com/bytedance/sonic/internal/native/sse`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
`github.com/bytedance/sonic/loader`
)
const (
MaxFrameSize uintptr = 400
BufPaddingSize int = 64
)
const MaxFrameSize uintptr = 400
var (
S_f64toa uintptr
@@ -42,9 +37,8 @@ var (
)
var (
S_quote uintptr
S_unquote uintptr
S_html_escape uintptr
S_quote uintptr
S_unquote uintptr
)
var (
@@ -56,18 +50,12 @@ var (
)
var (
S_skip_one uintptr
S_skip_one_fast uintptr
S_get_by_path uintptr
S_skip_array uintptr
S_skip_object uintptr
S_skip_number uintptr
)
var (
S_validate_one uintptr
S_validate_utf8 uintptr
S_validate_utf8_fast uintptr
S_skip_one uintptr
S_skip_one_fast uintptr
S_get_by_path uintptr
S_skip_array uintptr
S_skip_object uintptr
S_skip_number uintptr
)
var (
@@ -85,7 +73,7 @@ var (
__GetByPath func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) int
__ValidateOne func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) int
__ValidateOne func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) int
__I64toa func(out unsafe.Pointer, val int64) (ret int)
@@ -93,9 +81,15 @@ var (
__F64toa func(out unsafe.Pointer, val float64) (ret int)
__F32toa func(out unsafe.Pointer, val float32) (ret int)
__ValidateUTF8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__ValidateUTF8Fast func(s unsafe.Pointer) (ret int)
__ParseWithPadding func(parser unsafe.Pointer) (ret int)
__LookupSmallKey func(key unsafe.Pointer, table unsafe.Pointer, lowerOff int) (index int)
)
//go:nosplit
@@ -134,8 +128,8 @@ func GetByPath(s *string, p *int, path *[]interface{}, m *types.StateMachine) in
}
//go:nosplit
func ValidateOne(s *string, p *int, m *types.StateMachine) int {
return __ValidateOne(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
func ValidateOne(s *string, p *int, m *types.StateMachine, flags uint64) int {
return __ValidateOne(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
@@ -153,6 +147,11 @@ func F64toa(out *byte, val float64) (ret int) {
return __F64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func F32toa(out *byte, val float32) (ret int) {
return __F32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func ValidateUTF8(s *string, p *int, m *types.StateMachine) (ret int) {
return __ValidateUTF8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
@@ -163,51 +162,97 @@ func ValidateUTF8Fast(s *string) (ret int) {
return __ValidateUTF8Fast(rt.NoEscape(unsafe.Pointer(s)))
}
var stubs = []loader.GoC{
{"_f64toa", &S_f64toa, &__F64toa},
{"_f32toa", &S_f32toa, nil},
{"_i64toa", &S_i64toa, &__I64toa},
{"_u64toa", &S_u64toa, &__U64toa},
{"_lspace", &S_lspace, nil},
{"_quote", &S_quote, &__Quote},
{"_unquote", &S_unquote, &__Unquote},
{"_html_escape", &S_html_escape, &__HTMLEscape},
{"_value", &S_value, &__Value},
{"_vstring", &S_vstring, nil},
{"_vnumber", &S_vnumber, nil},
{"_vsigned", &S_vsigned, nil},
{"_vunsigned", &S_vunsigned, nil},
{"_skip_one", &S_skip_one, &__SkipOne},
{"_skip_one_fast", &S_skip_one_fast, &__SkipOneFast},
{"_get_by_path", &S_get_by_path, &__GetByPath},
{"_skip_array", &S_skip_array, nil},
{"_skip_object", &S_skip_object, nil},
{"_skip_number", &S_skip_number, nil},
{"_validate_one", &S_validate_one, &__ValidateOne},
{"_validate_utf8", &S_validate_utf8, &__ValidateUTF8},
{"_validate_utf8_fast", &S_validate_utf8_fast, &__ValidateUTF8Fast},
//go:nosplit
func ParseWithPadding(parser unsafe.Pointer) (ret int) {
return __ParseWithPadding(rt.NoEscape(unsafe.Pointer(parser)))
}
func useAVX() {
loader.WrapGoC(avx.Text__native_entry__, avx.Funcs, stubs, "avx", "avx/native.c")
}
func useAVX2() {
loader.WrapGoC(avx2.Text__native_entry__, avx2.Funcs, stubs, "avx2", "avx2/native.c")
//go:nosplit
func LookupSmallKey(key *string, table *[]byte, lowerOff int) (index int) {
return __LookupSmallKey(rt.NoEscape(unsafe.Pointer(key)), rt.NoEscape(unsafe.Pointer(table)), lowerOff)
}
func useSSE() {
loader.WrapGoC(sse.Text__native_entry__, sse.Funcs, stubs, "sse", "sse/native.c")
sse.Use()
S_f64toa = sse.S_f64toa
__F64toa = sse.F_f64toa
S_f32toa = sse.S_f32toa
__F32toa = sse.F_f32toa
S_i64toa = sse.S_i64toa
__I64toa = sse.F_i64toa
S_u64toa = sse.S_u64toa
__U64toa = sse.F_u64toa
S_lspace = sse.S_lspace
S_quote = sse.S_quote
__Quote = sse.F_quote
S_unquote = sse.S_unquote
__Unquote = sse.F_unquote
S_value = sse.S_value
__Value = sse.F_value
S_vstring = sse.S_vstring
S_vnumber = sse.S_vnumber
S_vsigned = sse.S_vsigned
S_vunsigned = sse.S_vunsigned
S_skip_one = sse.S_skip_one
__SkipOne = sse.F_skip_one
__SkipOneFast = sse.F_skip_one_fast
S_skip_array = sse.S_skip_array
S_skip_object = sse.S_skip_object
S_skip_number = sse.S_skip_number
S_get_by_path = sse.S_get_by_path
__GetByPath = sse.F_get_by_path
__HTMLEscape = sse.F_html_escape
__ValidateOne = sse.F_validate_one
__ValidateUTF8= sse.F_validate_utf8
__ValidateUTF8Fast = sse.F_validate_utf8_fast
__ParseWithPadding = sse.F_parse_with_padding
__LookupSmallKey = sse.F_lookup_small_key
}
func init() {
if cpu.HasAVX2 {
useAVX2()
} else if cpu.HasAVX {
useAVX()
} else if cpu.HasSSE {
useSSE()
} else {
panic("Unsupported CPU, maybe it's too old to run Sonic.")
}
func useAVX2() {
avx2.Use()
S_f64toa = avx2.S_f64toa
__F64toa = avx2.F_f64toa
S_f32toa = avx2.S_f32toa
__F32toa = avx2.F_f32toa
S_i64toa = avx2.S_i64toa
__I64toa = avx2.F_i64toa
S_u64toa = avx2.S_u64toa
__U64toa = avx2.F_u64toa
S_lspace = avx2.S_lspace
S_quote = avx2.S_quote
__Quote = avx2.F_quote
S_unquote = avx2.S_unquote
__Unquote = avx2.F_unquote
S_value = avx2.S_value
__Value = avx2.F_value
S_vstring = avx2.S_vstring
S_vnumber = avx2.S_vnumber
S_vsigned = avx2.S_vsigned
S_vunsigned = avx2.S_vunsigned
S_skip_one = avx2.S_skip_one
__SkipOne = avx2.F_skip_one
__SkipOneFast = avx2.F_skip_one_fast
S_skip_array = avx2.S_skip_array
S_skip_object = avx2.S_skip_object
S_skip_number = avx2.S_skip_number
S_get_by_path = avx2.S_get_by_path
__GetByPath = avx2.F_get_by_path
__HTMLEscape = avx2.F_html_escape
__ValidateOne = avx2.F_validate_one
__ValidateUTF8= avx2.F_validate_utf8
__ValidateUTF8Fast = avx2.F_validate_utf8_fast
__ParseWithPadding = avx2.F_parse_with_padding
__LookupSmallKey = avx2.F_lookup_small_key
}
func init() {
if cpu.HasAVX2 {
useAVX2()
} else if cpu.HasSSE {
useSSE()
} else {
panic("Unsupported CPU, lacks of AVX2 or SSE CPUID Flag. maybe it's too old to run Sonic.")
}
}

View File

@@ -1,140 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {{PACKAGE}}
import (
`encoding/json`
`math`
`math/rand`
`strconv`
`testing`
`github.com/stretchr/testify/assert`
)
func TestFastFloat_Encode(t *testing.T) {
var buf [64]byte
assert.Equal(t, "0" , string(buf[:f64toa(&buf[0], 0)]))
assert.Equal(t, "-0" , string(buf[:f64toa(&buf[0], math.Float64frombits(0x8000000000000000))]))
assert.Equal(t, "12340000000" , string(buf[:f64toa(&buf[0], 1234e7)]))
assert.Equal(t, "12.34" , string(buf[:f64toa(&buf[0], 1234e-2)]))
assert.Equal(t, "0.001234" , string(buf[:f64toa(&buf[0], 1234e-6)]))
assert.Equal(t, "1e+30" , string(buf[:f64toa(&buf[0], 1e30)]))
assert.Equal(t, "1.234e+33" , string(buf[:f64toa(&buf[0], 1234e30)]))
assert.Equal(t, "1.234e+308" , string(buf[:f64toa(&buf[0], 1234e305)]))
assert.Equal(t, "1.234e-317" , string(buf[:f64toa(&buf[0], 1234e-320)]))
assert.Equal(t, "1.7976931348623157e+308" , string(buf[:f64toa(&buf[0], 1.7976931348623157e308)]))
assert.Equal(t, "-12340000000" , string(buf[:f64toa(&buf[0], -1234e7)]))
assert.Equal(t, "-12.34" , string(buf[:f64toa(&buf[0], -1234e-2)]))
assert.Equal(t, "-0.001234" , string(buf[:f64toa(&buf[0], -1234e-6)]))
assert.Equal(t, "-1e+30" , string(buf[:f64toa(&buf[0], -1e30)]))
assert.Equal(t, "-1.234e+33" , string(buf[:f64toa(&buf[0], -1234e30)]))
assert.Equal(t, "-1.234e+308" , string(buf[:f64toa(&buf[0], -1234e305)]))
assert.Equal(t, "-1.234e-317" , string(buf[:f64toa(&buf[0], -1234e-320)]))
assert.Equal(t, "-2.2250738585072014e-308" , string(buf[:f64toa(&buf[0], -2.2250738585072014e-308)]))
}
func TestFastFloat_Random(t *testing.T) {
var buf [64]byte
N := 10000
for i := 0; i < N; i++ {
b64 := uint64(rand.Uint32())<<32 | uint64(rand.Uint32())
f64 := math.Float64frombits(b64)
jout, jerr := json.Marshal(f64)
n := f64toa(&buf[0], f64)
if jerr == nil {
assert.Equal(t, jout, buf[:n])
} else {
assert.True(t, n == 0)
}
f32 := math.Float32frombits(rand.Uint32())
jout, jerr = json.Marshal(f32)
n = f32toa(&buf[0], f32)
if jerr == nil {
assert.Equal(t, jout, buf[:n])
} else {
assert.True(t, n == 0)
}
}
}
func BenchmarkParseFloat64(b *testing.B) {
var f64toaBenches = []struct {
name string
float float64
}{
{"Zero", 0},
{"Decimal", 33909},
{"Float", 339.7784},
{"Exp", -5.09e75},
{"NegExp", -5.11e-95},
{"LongExp", 1.234567890123456e-78},
{"Big", 123456789123456789123456789},
}
for _, c := range f64toaBenches {
f64bench := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib",
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { strconv.AppendFloat(buf[:0], c.float, 'g', -1, 64) }},
}, {
name: "FastFloat",
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { f64toa(&buf[0], c.float) }},
}}
for _, bm := range f64bench {
name := bm.name + "_" + c.name
b.Run(name, bm.test)
}
}
}
func BenchmarkParseFloat32(b *testing.B) {
var f32toaBenches = []struct {
name string
float float32
}{
{"Zero", 0},
{"Integer", 33909},
{"ExactFraction", 3.375},
{"Point", 339.7784},
{"Exp", -5.09e25},
{"NegExp", -5.11e-25},
{"Shortest", 1.234567e-8},
}
for _, c := range f32toaBenches {
bench := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib32",
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { strconv.AppendFloat(buf[:0], float64(c.float), 'g', -1, 32) }},
}, {
name: "FastFloat32",
test: func(b *testing.B) { var buf [64]byte; for i := 0; i < b.N; i++ { f32toa(&buf[0], c.float) }},
}}
for _, bm := range bench {
name := bm.name + "_" + c.name
b.Run(name, bm.test)
}
}
}

View File

@@ -1,153 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {{PACKAGE}}
import (
`strconv`
`testing`
`fmt`
`github.com/stretchr/testify/assert`
)
func TestFastInt_IntToString(t *testing.T) {
var buf [32]byte
assert.Equal(t, "0" , string(buf[:i64toa(&buf[0], 0)]))
assert.Equal(t, "1" , string(buf[:i64toa(&buf[0], 1)]))
assert.Equal(t, "12" , string(buf[:i64toa(&buf[0], 12)]))
assert.Equal(t, "123" , string(buf[:i64toa(&buf[0], 123)]))
assert.Equal(t, "1234" , string(buf[:i64toa(&buf[0], 1234)]))
assert.Equal(t, "12345" , string(buf[:i64toa(&buf[0], 12345)]))
assert.Equal(t, "123456" , string(buf[:i64toa(&buf[0], 123456)]))
assert.Equal(t, "1234567" , string(buf[:i64toa(&buf[0], 1234567)]))
assert.Equal(t, "12345678" , string(buf[:i64toa(&buf[0], 12345678)]))
assert.Equal(t, "123456789" , string(buf[:i64toa(&buf[0], 123456789)]))
assert.Equal(t, "1234567890" , string(buf[:i64toa(&buf[0], 1234567890)]))
assert.Equal(t, "12345678901" , string(buf[:i64toa(&buf[0], 12345678901)]))
assert.Equal(t, "123456789012" , string(buf[:i64toa(&buf[0], 123456789012)]))
assert.Equal(t, "1234567890123" , string(buf[:i64toa(&buf[0], 1234567890123)]))
assert.Equal(t, "12345678901234" , string(buf[:i64toa(&buf[0], 12345678901234)]))
assert.Equal(t, "123456789012345" , string(buf[:i64toa(&buf[0], 123456789012345)]))
assert.Equal(t, "1234567890123456" , string(buf[:i64toa(&buf[0], 1234567890123456)]))
assert.Equal(t, "12345678901234567" , string(buf[:i64toa(&buf[0], 12345678901234567)]))
assert.Equal(t, "123456789012345678" , string(buf[:i64toa(&buf[0], 123456789012345678)]))
assert.Equal(t, "1234567890123456789" , string(buf[:i64toa(&buf[0], 1234567890123456789)]))
assert.Equal(t, "9223372036854775807" , string(buf[:i64toa(&buf[0], 9223372036854775807)]))
assert.Equal(t, "-1" , string(buf[:i64toa(&buf[0], -1)]))
assert.Equal(t, "-12" , string(buf[:i64toa(&buf[0], -12)]))
assert.Equal(t, "-123" , string(buf[:i64toa(&buf[0], -123)]))
assert.Equal(t, "-1234" , string(buf[:i64toa(&buf[0], -1234)]))
assert.Equal(t, "-12345" , string(buf[:i64toa(&buf[0], -12345)]))
assert.Equal(t, "-123456" , string(buf[:i64toa(&buf[0], -123456)]))
assert.Equal(t, "-1234567" , string(buf[:i64toa(&buf[0], -1234567)]))
assert.Equal(t, "-12345678" , string(buf[:i64toa(&buf[0], -12345678)]))
assert.Equal(t, "-123456789" , string(buf[:i64toa(&buf[0], -123456789)]))
assert.Equal(t, "-1234567890" , string(buf[:i64toa(&buf[0], -1234567890)]))
assert.Equal(t, "-12345678901" , string(buf[:i64toa(&buf[0], -12345678901)]))
assert.Equal(t, "-123456789012" , string(buf[:i64toa(&buf[0], -123456789012)]))
assert.Equal(t, "-1234567890123" , string(buf[:i64toa(&buf[0], -1234567890123)]))
assert.Equal(t, "-12345678901234" , string(buf[:i64toa(&buf[0], -12345678901234)]))
assert.Equal(t, "-123456789012345" , string(buf[:i64toa(&buf[0], -123456789012345)]))
assert.Equal(t, "-1234567890123456" , string(buf[:i64toa(&buf[0], -1234567890123456)]))
assert.Equal(t, "-12345678901234567" , string(buf[:i64toa(&buf[0], -12345678901234567)]))
assert.Equal(t, "-123456789012345678" , string(buf[:i64toa(&buf[0], -123456789012345678)]))
assert.Equal(t, "-1234567890123456789" , string(buf[:i64toa(&buf[0], -1234567890123456789)]))
assert.Equal(t, "-9223372036854775808" , string(buf[:i64toa(&buf[0], -9223372036854775808)]))
}
func TestFastInt_UintToString(t *testing.T) {
var buf [32]byte
assert.Equal(t, "0" , string(buf[:u64toa(&buf[0], 0)]))
assert.Equal(t, "1" , string(buf[:u64toa(&buf[0], 1)]))
assert.Equal(t, "12" , string(buf[:u64toa(&buf[0], 12)]))
assert.Equal(t, "123" , string(buf[:u64toa(&buf[0], 123)]))
assert.Equal(t, "1234" , string(buf[:u64toa(&buf[0], 1234)]))
assert.Equal(t, "12345" , string(buf[:u64toa(&buf[0], 12345)]))
assert.Equal(t, "123456" , string(buf[:u64toa(&buf[0], 123456)]))
assert.Equal(t, "1234567" , string(buf[:u64toa(&buf[0], 1234567)]))
assert.Equal(t, "12345678" , string(buf[:u64toa(&buf[0], 12345678)]))
assert.Equal(t, "123456789" , string(buf[:u64toa(&buf[0], 123456789)]))
assert.Equal(t, "1234567890" , string(buf[:u64toa(&buf[0], 1234567890)]))
assert.Equal(t, "12345678901" , string(buf[:u64toa(&buf[0], 12345678901)]))
assert.Equal(t, "123456789012" , string(buf[:u64toa(&buf[0], 123456789012)]))
assert.Equal(t, "1234567890123" , string(buf[:u64toa(&buf[0], 1234567890123)]))
assert.Equal(t, "12345678901234" , string(buf[:u64toa(&buf[0], 12345678901234)]))
assert.Equal(t, "123456789012345" , string(buf[:u64toa(&buf[0], 123456789012345)]))
assert.Equal(t, "1234567890123456" , string(buf[:u64toa(&buf[0], 1234567890123456)]))
assert.Equal(t, "12345678901234567" , string(buf[:u64toa(&buf[0], 12345678901234567)]))
assert.Equal(t, "123456789012345678" , string(buf[:u64toa(&buf[0], 123456789012345678)]))
assert.Equal(t, "1234567890123456789" , string(buf[:u64toa(&buf[0], 1234567890123456789)]))
assert.Equal(t, "12345678901234567890" , string(buf[:u64toa(&buf[0], 12345678901234567890)]))
assert.Equal(t, "18446744073709551615" , string(buf[:u64toa(&buf[0], 18446744073709551615)]))
}
func BenchmarkFastInt_IntToString(b *testing.B) {
benchmarks := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib-Positive",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendInt(buf[:0], int64(i), 10) }},
}, {
name: "StdLib-Negative",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendInt(buf[:0], -int64(i), 10) }},
}, {
name: "FastInt-Positive",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { i64toa(&buf[0], int64(i)) }},
}, {
name: "FastInt-Negative",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { i64toa(&buf[0], -int64(i)) }},
}}
for _, bm := range benchmarks {
b.Run(bm.name, bm.test)
}
}
type utoaBench struct {
name string
num uint64
}
func BenchmarkFastInt_UintToString(b *testing.B) {
maxUint := "18446744073709551615"
benchs := make([]utoaBench, len(maxUint) + 1)
benchs[0].name = "Zero"
benchs[0].num = 0
for i := 1; i <= len(maxUint); i++ {
benchs[i].name = strconv.FormatInt(int64(i), 10) + "-Digs"
benchs[i].num, _ = strconv.ParseUint(string(maxUint[:i]), 10, 64)
}
for _, t := range(benchs) {
benchmarks := []struct {
name string
test func(*testing.B)
}{{
name: "StdLib",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { strconv.AppendUint(buf[:0], t.num, 10) }},
}, {
name: "FastInt",
test: func(b *testing.B) { var buf [32]byte; for i := 0; i < b.N; i++ { u64toa(&buf[0], t.num) }},
}}
for _, bm := range benchmarks {
name := fmt.Sprintf("%s_%s", bm.name, t.name)
b.Run(name, bm.test)
}
}
}

View File

@@ -1,189 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {{PACKAGE}}
import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

View File

@@ -1,625 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package {{PACKAGE}}
import (
`encoding/hex`
`fmt`
`math`
`strings`
`testing`
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
`github.com/davecgh/go-spew/spew`
`github.com/stretchr/testify/assert`
`github.com/stretchr/testify/require`
)
func TestNative_Value(t *testing.T) {
var v types.JsonState
s := ` -12345`
p := (*rt.GoString)(unsafe.Pointer(&s))
x := value(p.Ptr, p.Len, 0, &v, 0)
assert.Equal(t, 9, x)
assert.Equal(t, types.V_INTEGER, v.Vt)
assert.Equal(t, int64(-12345), v.Iv)
assert.Equal(t, 3, v.Ep)
}
func TestNative_Value_OutOfBound(t *testing.T) {
var v types.JsonState
mem := []byte{'"', '"'}
s := rt.Mem2Str(mem[:1])
p := (*rt.GoString)(unsafe.Pointer(&s))
x := value(p.Ptr, p.Len, 0, &v, 0)
assert.Equal(t, 1, x)
assert.Equal(t, -int(types.ERR_EOF), int(v.Vt))
}
func TestNative_Quote(t *testing.T) {
s := "hello\b\f\n\r\t\\\"\u666fworld"
d := make([]byte, 256)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
assert.Equal(t, len(s), rv)
assert.Equal(t, 35, len(d))
assert.Equal(t, `hello\u0008\u000c\n\r\t\\\"景world`, string(d))
}
func TestNative_QuoteNoMem(t *testing.T) {
s := "hello\b\f\n\r\t\\\"\u666fworld"
d := make([]byte, 10)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, 0)
assert.Equal(t, -6, rv)
assert.Equal(t, 5, len(d))
assert.Equal(t, `hello`, string(d))
}
func TestNative_DoubleQuote(t *testing.T) {
s := "hello\b\f\n\r\t\\\"\u666fworld"
d := make([]byte, 256)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := quote(sp.Ptr, sp.Len, dp.Ptr, &dp.Len, types.F_DOUBLE_UNQUOTE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
assert.Equal(t, len(s), rv)
assert.Equal(t, 44, len(d))
assert.Equal(t, `hello\\u0008\\u000c\\n\\r\\t\\\\\\\"景world`, string(d))
}
func TestNative_Unquote(t *testing.T) {
s := `hello\b\f\n\r\t\\\"\u2333world`
d := make([]byte, 0, len(s))
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
dp.Len = rv
assert.Equal(t, -1, ep)
assert.Equal(t, "hello\b\f\n\r\t\\\"\u2333world", string(d))
}
func TestNative_UnquoteError(t *testing.T) {
s := `asdf\`
d := make([]byte, 0, len(s))
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_EOF), rv)
assert.Equal(t, 5, ep)
s = `asdf\gqwer`
d = make([]byte, 0, len(s))
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_ESCAPE), rv)
assert.Equal(t, 5, ep)
s = `asdf\u1gggqwer`
d = make([]byte, 0, len(s))
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_CHAR), rv)
assert.Equal(t, 7, ep)
s = `asdf\ud800qwer`
d = make([]byte, 0, len(s))
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 6, ep)
s = `asdf\\ud800qwer`
d = make([]byte, 0, len(s))
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 7, ep)
s = `asdf\ud800\ud800qwer`
d = make([]byte, 0, len(s))
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, 0)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 12, ep)
s = `asdf\\ud800\\ud800qwer`
d = make([]byte, 0, len(s))
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
assert.Equal(t, -int(types.ERR_INVALID_UNICODE), rv)
assert.Equal(t, 14, ep)
}
func TestNative_DoubleUnquote(t *testing.T) {
s := `hello\\b\\f\\n\\r\\t\\\\\\\"\\u2333world`
d := make([]byte, 0, len(s))
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_DOUBLE_UNQUOTE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
dp.Len = rv
assert.Equal(t, -1, ep)
assert.Equal(t, "hello\b\f\n\r\t\\\"\u2333world", string(d))
}
func TestNative_UnquoteUnicodeReplacement(t *testing.T) {
s := `hello\ud800world`
d := make([]byte, 0, len(s))
ep := -1
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
dp.Len = rv
assert.Equal(t, -1, ep)
assert.Equal(t, "hello\ufffdworld", string(d))
s = `hello\ud800\ud800world`
d = make([]byte, 0, len(s))
ep = -1
dp = (*rt.GoSlice)(unsafe.Pointer(&d))
sp = (*rt.GoString)(unsafe.Pointer(&s))
rv = unquote(sp.Ptr, sp.Len, dp.Ptr, &ep, types.F_UNICODE_REPLACE)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
dp.Len = rv
assert.Equal(t, -1, ep)
assert.Equal(t, "hello\ufffd\ufffdworld", string(d))
}
func TestNative_HTMLEscape(t *testing.T) {
s := "hello\u2029\u2028<&>world"
d := make([]byte, 256)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
if rv < 0 {
require.NoError(t, types.ParsingError(-rv))
}
assert.Equal(t, len(s), rv)
assert.Equal(t, 40, len(d))
assert.Equal(t, `hello\u2029\u2028\u003c\u0026\u003eworld`, string(d))
}
func TestNative_HTMLEscapeNoMem(t *testing.T) {
s := "hello\u2029\u2028<&>world"
d := make([]byte, 10)
dp := (*rt.GoSlice)(unsafe.Pointer(&d))
sp := (*rt.GoString)(unsafe.Pointer(&s))
rv := html_escape(sp.Ptr, sp.Len, dp.Ptr, &dp.Len)
assert.Equal(t, -6, rv)
assert.Equal(t, 5, len(d))
assert.Equal(t, `hello`, string(d))
}
func TestNative_Vstring(t *testing.T) {
var v types.JsonState
i := 0
s := `test"test\n2"`
vstring(&s, &i, &v, 0)
assert.Equal(t, 5, i)
assert.Equal(t, -1, v.Ep)
assert.Equal(t, int64(0), v.Iv)
vstring(&s, &i, &v, 0)
assert.Equal(t, 13, i)
assert.Equal(t, 9, v.Ep)
assert.Equal(t, int64(5), v.Iv)
}
func TestNative_Vstring_ValidUnescapedChars(t *testing.T) {
var v types.JsonState
valid := uint64(types.F_VALIDATE_STRING)
i := 0
s := "test\x1f\""
vstring(&s, &i, &v, valid)
assert.Equal(t, -int(types.ERR_INVALID_CHAR), int(v.Vt))
}
func TestNative_VstringEscapeEOF(t *testing.T) {
var v types.JsonState
i := 0
s := `xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\"xxxxxxxxxxxxxxxxxxxxxxxxxxxxx"x`
vstring(&s, &i, &v, 0)
assert.Equal(t, 95, i)
assert.Equal(t, 63, v.Ep)
assert.Equal(t, int64(0), v.Iv)
}
func TestNative_VstringHangUpOnRandomData(t *testing.T) {
v, e := hex.DecodeString(
"228dc61efd54ef80a908fb6026b7f2d5f92a257ba8b347c995f259eb8685376a" +
"8c4500262d9c308b3f3ec2577689cf345d9f86f9b5d18d3e463bec5c22df2d2e" +
"4506010eba1dae7278",
)
assert.Nil(t, e)
p := 1
s := rt.Mem2Str(v)
var js types.JsonState
vstring(&s, &p, &js, 0)
fmt.Printf("js: %s\n", spew.Sdump(js))
}
func TestNative_Vnumber(t *testing.T) {
var v types.JsonState
i := 0
s := "1234"
vnumber(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "1.234"
vnumber(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 1.234, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "1.234e5"
vnumber(&s, &i, &v)
assert.Equal(t, 7, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 1.234e5, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "0.0125"
vnumber(&s, &i, &v)
assert.Equal(t, 6, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 0.0125, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "100000000000000000000"
vnumber(&s, &i, &v)
assert.Equal(t, 21, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 100000000000000000000.0, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "999999999999999900000"
vnumber(&s, &i, &v)
assert.Equal(t, 21, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, 999999999999999900000.0, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
i = 0
s = "-1.234"
vnumber(&s, &i, &v)
assert.Equal(t, 6, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, -1.234, v.Dv)
assert.Equal(t, types.V_DOUBLE, v.Vt)
}
func TestNative_Vsigned(t *testing.T) {
var v types.JsonState
i := 0
s := "1234"
vsigned(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "-1234"
vsigned(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(-1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "9223372036854775807"
vsigned(&s, &i, &v)
assert.Equal(t, 19, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(math.MaxInt64), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "-9223372036854775808"
vsigned(&s, &i, &v)
assert.Equal(t, 20, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(math.MinInt64), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "9223372036854775808"
vsigned(&s, &i, &v)
assert.Equal(t, 18, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
i = 0
s = "-9223372036854775809"
vsigned(&s, &i, &v)
assert.Equal(t, 19, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
i = 0
s = "1.234"
vsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "0.0125"
vsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1234e5"
vsigned(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1234e-5"
vsigned(&s, &i, &v)
assert.Equal(t, 5, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
}
func TestNative_Vunsigned(t *testing.T) {
var v types.JsonState
i := 0
s := "1234"
vunsigned(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, int64(1234), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "18446744073709551615"
vunsigned(&s, &i, &v)
assert.Equal(t, 20, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, ^int64(0), v.Iv)
assert.Equal(t, types.V_INTEGER, v.Vt)
i = 0
s = "18446744073709551616"
vunsigned(&s, &i, &v)
assert.Equal(t, 19, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INTEGER_OVERFLOW)), v.Vt)
i = 0
s = "-1234"
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "1.234"
vunsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "0.0125"
vunsigned(&s, &i, &v)
assert.Equal(t, 1, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "1234e5"
vunsigned(&s, &i, &v)
assert.Equal(t, 4, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1234e5"
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1.234e5"
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
i = 0
s = "-1.234e-5"
vunsigned(&s, &i, &v)
assert.Equal(t, 0, i)
assert.Equal(t, 0, v.Ep)
assert.Equal(t, types.ValueType(-int(types.ERR_INVALID_NUMBER_FMT)), v.Vt)
}
func TestNative_SkipOne(t *testing.T) {
p := 0
s := ` {"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
q := skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 42, p)
assert.Equal(t, 1, q)
p = 0
s = `1 2.5 -3 "asdf\nqwer" true false null {} []`
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 1, p)
assert.Equal(t, 0, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 5, p)
assert.Equal(t, 2, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 8, p)
assert.Equal(t, 6, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 21, p)
assert.Equal(t, 9, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 26, p)
assert.Equal(t, 22, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 32, p)
assert.Equal(t, 27, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 37, p)
assert.Equal(t, 33, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 40, p)
assert.Equal(t, 38, q)
q = skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, 43, p)
assert.Equal(t, 41, q)
}
func TestNative_SkipOne_Error(t *testing.T) {
for _, s := range([]string{
"-", "+", "0.", "0. ", "+1", "0.0e ", "9e+", "0e-",
"tru", "fals", "nul", "trux", "fals ",
`"asdf`, `"\\\"`,
}) {
p := 0
q := skip_one(&s, &p, &types.StateMachine{}, uint64(0))
assert.True(t, q < 0)
}
}
func TestNative_SkipArray(t *testing.T) {
p := 0
s := `null, true, false, 1, 2.0, -3, {"asdf": "wqer"}],`
skip_array(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, p, 48)
}
func TestNative_SkipObject(t *testing.T) {
p := 0
s := `"asdf": "wqer"},`
skip_object(&s, &p, &types.StateMachine{}, uint64(0))
assert.Equal(t, p, 15)
}
func TestNative_SkipNumber(t *testing.T) {
p := 0
s := `-1.23e+12`
q := skip_number(&s, &p)
assert.Equal(t, 9, p)
assert.Equal(t, 0, q)
}
func TestNative_SkipOneFast(t *testing.T) {
p := 0
s := ` {"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
q := skip_one_fast(&s, &p)
assert.Equal(t, 42, p)
assert.Equal(t, 1, q)
p = 0
s = `1, 2.5, -3, "asdf\nqwer", true, false, null, {}, [],`
q = skip_one_fast(&s, &p)
assert.Equal(t, 1, p)
assert.Equal(t, 0, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 6, p)
assert.Equal(t, 3, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 10, p)
assert.Equal(t, 8, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 24, p)
assert.Equal(t, 12, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 30, p)
assert.Equal(t, 26, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 37, p)
assert.Equal(t, 32, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 43, p)
assert.Equal(t, 39, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 47, p)
assert.Equal(t, 45, q)
p += 1
q = skip_one_fast(&s, &p)
assert.Equal(t, 51, p)
assert.Equal(t, 49, q)
}
func TestNative_SkipOneFast_Error(t *testing.T) {
for _, s := range([]string{
"{{", "[{", "{{}",
`"asdf`, `"\\\"`,
}) {
p := 0
q := skip_one_fast(&s, &p)
assert.True(t, q < 0)
}
}
func TestNative_GetByPath(t *testing.T) {
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
p := 0
path := []interface{}{"asdf", 4}
ret := get_by_path(&s, &p, &path, types.NewStateMachine())
assert.Equal(t, strings.Index(s, "2.0"), ret)
}
func BenchmarkNative_SkipOneFast(b *testing.B) {
b.ResetTimer()
for i:=0; i<b.N; i++ {
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
p := 0
_ = skip_one_fast(&s, &p)
}
}
func BenchmarkNative_GetByPath(b *testing.B) {
b.ResetTimer()
for i:=0; i<b.N; i++ {
s := `{"asdf": [null, true, false, 1, 2.0, -3]}, 1234.5`
p := 0
path := []interface{}{"asdf", 3}
sm := types.NewStateMachine()
_ = get_by_path(&s, &p, &path, sm)
types.FreeStateMachine(sm)
}
}

View File

@@ -1,191 +0,0 @@
// Code generated by Makefile, DO NOT EDIT.
// Code generated by Makefile, DO NOT EDIT.
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sse
import (
`unsafe`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
var (
__i64toa func(out unsafe.Pointer, val int64) (ret int)
__u64toa func(out unsafe.Pointer, val uint64) (ret int)
__f64toa func(out unsafe.Pointer, val float64) (ret int)
__f32toa func(out unsafe.Pointer, val float32) (ret int)
__lspace func(sp unsafe.Pointer, nb int, off int) (ret int)
__quote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer, flags uint64) (ret int)
__html_escape func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn unsafe.Pointer) (ret int)
__unquote func(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep unsafe.Pointer, flags uint64) (ret int)
__value func(s unsafe.Pointer, n int, p int, v unsafe.Pointer, flags uint64) (ret int)
__vstring func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer, flags uint64)
__vnumber func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__vunsigned func(s unsafe.Pointer, p unsafe.Pointer, v unsafe.Pointer)
__skip_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_one_fast func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__skip_array func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_object func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer, flags uint64) (ret int)
__skip_number func(s unsafe.Pointer, p unsafe.Pointer) (ret int)
__validate_one func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__get_by_path func(s unsafe.Pointer, p unsafe.Pointer, path unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
__validate_utf8_fast func(s unsafe.Pointer) (ret int)
__fsm_exec func(m unsafe.Pointer, s unsafe.Pointer, p unsafe.Pointer, flags uint64) (ret int)
)
//go:nosplit
func i64toa(out *byte, val int64) (ret int) {
return __i64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func u64toa(out *byte, val uint64) (ret int) {
return __u64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f64toa(out *byte, val float64) (ret int) {
return __f64toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func f32toa(out *byte, val float32) (ret int) {
return __f32toa(rt.NoEscape(unsafe.Pointer(out)), val)
}
//go:nosplit
func lspace(sp unsafe.Pointer, nb int, off int) (ret int) {
return __lspace(rt.NoEscape(sp), nb, off)
}
//go:nosplit
func quote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int, flags uint64) (ret int) {
return __quote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)), flags)
}
//go:nosplit
func html_escape(sp unsafe.Pointer, nb int, dp unsafe.Pointer, dn *int) (ret int) {
return __html_escape(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(dn)))
}
//go:nosplit
func unquote(sp unsafe.Pointer, nb int, dp unsafe.Pointer, ep *int, flags uint64) (ret int) {
return __unquote(rt.NoEscape(unsafe.Pointer(sp)), nb, rt.NoEscape(unsafe.Pointer(dp)), rt.NoEscape(unsafe.Pointer(ep)), flags)
}
//go:nosplit
func value(s unsafe.Pointer, n int, p int, v *types.JsonState, flags uint64) (ret int) {
return __value(rt.NoEscape(unsafe.Pointer(s)), n, p, rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vstring(s *string, p *int, v *types.JsonState, flags uint64) {
__vstring(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)), flags)
}
//go:nosplit
func vnumber(s *string, p *int, v *types.JsonState) {
__vnumber(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vsigned(s *string, p *int, v *types.JsonState) {
__vsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func vunsigned(s *string, p *int, v *types.JsonState) {
__vunsigned(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(v)))
}
//go:nosplit
func skip_one(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_one_fast(s *string, p *int) (ret int) {
return __skip_one_fast(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func skip_array(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_array(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_object(s *string, p *int, m *types.StateMachine, flags uint64) (ret int) {
return __skip_object(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)), flags)
}
//go:nosplit
func skip_number(s *string, p *int) (ret int) {
return __skip_number(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)))
}
//go:nosplit
func validate_one(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_one(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func get_by_path(s *string, p *int, path *[]interface{}, m *types.StateMachine) (ret int) {
return __get_by_path(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(path)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8(s *string, p *int, m *types.StateMachine) (ret int) {
return __validate_utf8(rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), rt.NoEscape(unsafe.Pointer(m)))
}
//go:nosplit
func validate_utf8_fast(s *string) (ret int) {
return __validate_utf8_fast(rt.NoEscape(unsafe.Pointer(s)))
}
//go:nosplit
func fsm_exec(m *types.StateMachine, s *string, p *int, flags uint64) (ret int) {
return __fsm_exec(rt.NoEscape(unsafe.Pointer(m)), rt.NoEscape(unsafe.Pointer(s)), rt.NoEscape(unsafe.Pointer(p)), flags)
}

View File

@@ -1,604 +0,0 @@
// +build !noasm !appengine
// Code generated by asm2asm, DO NOT EDIT.
package sse
import (
`github.com/bytedance/sonic/loader`
)
const (
_entry__f32toa = 31616
_entry__f64toa = 160
_entry__format_significand = 35888
_entry__format_integer = 2960
_entry__fsm_exec = 18016
_entry__advance_string = 14352
_entry__advance_string_default = 37280
_entry__do_skip_number = 20608
_entry__get_by_path = 26176
_entry__skip_one_fast = 22272
_entry__html_escape = 8912
_entry__i64toa = 3392
_entry__u64toa = 3520
_entry__lspace = 16
_entry__quote = 4832
_entry__skip_array = 17984
_entry__skip_number = 21904
_entry__skip_object = 20256
_entry__skip_one = 22048
_entry__unquote = 6576
_entry__validate_one = 22096
_entry__validate_utf8 = 30384
_entry__validate_utf8_fast = 31056
_entry__value = 12352
_entry__vnumber = 15744
_entry__atof_eisel_lemire64 = 10192
_entry__atof_native = 11744
_entry__decimal_to_f64 = 10560
_entry__right_shift = 36848
_entry__left_shift = 36352
_entry__vsigned = 17296
_entry__vstring = 14176
_entry__vunsigned = 17632
)
const (
_stack__f32toa = 48
_stack__f64toa = 80
_stack__format_significand = 24
_stack__format_integer = 16
_stack__fsm_exec = 168
_stack__advance_string = 64
_stack__advance_string_default = 64
_stack__do_skip_number = 48
_stack__get_by_path = 272
_stack__skip_one_fast = 136
_stack__html_escape = 72
_stack__i64toa = 16
_stack__u64toa = 8
_stack__lspace = 8
_stack__quote = 64
_stack__skip_array = 176
_stack__skip_number = 88
_stack__skip_object = 176
_stack__skip_one = 176
_stack__unquote = 88
_stack__validate_one = 176
_stack__validate_utf8 = 48
_stack__validate_utf8_fast = 24
_stack__value = 328
_stack__vnumber = 240
_stack__atof_eisel_lemire64 = 32
_stack__atof_native = 136
_stack__decimal_to_f64 = 80
_stack__right_shift = 8
_stack__left_shift = 24
_stack__vsigned = 16
_stack__vstring = 120
_stack__vunsigned = 8
)
const (
_size__f32toa = 3328
_size__f64toa = 2800
_size__format_significand = 464
_size__format_integer = 432
_size__fsm_exec = 1692
_size__advance_string = 1344
_size__advance_string_default = 960
_size__do_skip_number = 956
_size__get_by_path = 4208
_size__skip_one_fast = 3404
_size__html_escape = 1280
_size__i64toa = 48
_size__u64toa = 1264
_size__lspace = 128
_size__quote = 1728
_size__skip_array = 32
_size__skip_number = 144
_size__skip_object = 32
_size__skip_one = 48
_size__unquote = 2272
_size__validate_one = 48
_size__validate_utf8 = 672
_size__validate_utf8_fast = 544
_size__value = 1316
_size__vnumber = 1552
_size__atof_eisel_lemire64 = 368
_size__atof_native = 608
_size__decimal_to_f64 = 1184
_size__right_shift = 400
_size__left_shift = 496
_size__vsigned = 336
_size__vstring = 128
_size__vunsigned = 336
)
var (
_pcsp__f32toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{3286, 48},
{3287, 40},
{3289, 32},
{3291, 24},
{3293, 16},
{3295, 8},
{3296, 0},
{3318, 48},
}
_pcsp__f64toa = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{2740, 56},
{2744, 48},
{2745, 40},
{2747, 32},
{2749, 24},
{2751, 16},
{2753, 8},
{2754, 0},
{2792, 56},
}
_pcsp__format_significand = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{452, 24},
{453, 16},
{455, 8},
{457, 0},
}
_pcsp__format_integer = [][2]uint32{
{1, 0},
{4, 8},
{412, 16},
{413, 8},
{414, 0},
{423, 16},
{424, 8},
{426, 0},
}
_pcsp__fsm_exec = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1342, 104},
{1346, 48},
{1347, 40},
{1349, 32},
{1351, 24},
{1353, 16},
{1355, 8},
{1356, 0},
{1692, 104},
}
_pcsp__advance_string = [][2]uint32{
{14, 0},
{18, 8},
{20, 16},
{22, 24},
{24, 32},
{26, 40},
{27, 48},
{614, 56},
{618, 48},
{619, 40},
{621, 32},
{623, 24},
{625, 16},
{627, 8},
{628, 0},
{1339, 56},
}
_pcsp__advance_string_default = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{576, 64},
{580, 48},
{581, 40},
{583, 32},
{585, 24},
{587, 16},
{589, 8},
{590, 0},
{955, 64},
}
_pcsp__do_skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{881, 48},
{882, 40},
{884, 32},
{886, 24},
{888, 16},
{890, 8},
{891, 0},
{956, 48},
}
_pcsp__get_by_path = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{4012, 104},
{4016, 48},
{4017, 40},
{4019, 32},
{4021, 24},
{4023, 16},
{4025, 8},
{4026, 0},
{4194, 104},
}
_pcsp__skip_one_fast = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{658, 136},
{662, 48},
{663, 40},
{665, 32},
{667, 24},
{669, 16},
{671, 8},
{672, 0},
{3404, 136},
}
_pcsp__html_escape = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1256, 72},
{1260, 48},
{1261, 40},
{1263, 32},
{1265, 24},
{1267, 16},
{1269, 8},
{1271, 0},
}
_pcsp__i64toa = [][2]uint32{
{14, 0},
{34, 8},
{36, 0},
}
_pcsp__u64toa = [][2]uint32{
{1, 0},
{161, 8},
{162, 0},
{457, 8},
{458, 0},
{772, 8},
{773, 0},
{1249, 8},
{1251, 0},
}
_pcsp__lspace = [][2]uint32{
{1, 0},
{89, 8},
{90, 0},
{103, 8},
{104, 0},
{111, 8},
{113, 0},
}
_pcsp__quote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1681, 64},
{1685, 48},
{1686, 40},
{1688, 32},
{1690, 24},
{1692, 16},
{1694, 8},
{1695, 0},
{1722, 64},
}
_pcsp__skip_array = [][2]uint32{
{1, 0},
{26, 8},
{32, 0},
}
_pcsp__skip_number = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{100, 40},
{101, 32},
{103, 24},
{105, 16},
{107, 8},
{108, 0},
{139, 40},
}
_pcsp__skip_object = [][2]uint32{
{1, 0},
{26, 8},
{32, 0},
}
_pcsp__skip_one = [][2]uint32{
{1, 0},
{30, 8},
{36, 0},
}
_pcsp__unquote = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1684, 88},
{1688, 48},
{1689, 40},
{1691, 32},
{1693, 24},
{1695, 16},
{1697, 8},
{1698, 0},
{2270, 88},
}
_pcsp__validate_one = [][2]uint32{
{1, 0},
{35, 8},
{41, 0},
}
_pcsp__validate_utf8 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{623, 48},
{627, 40},
{628, 32},
{630, 24},
{632, 16},
{634, 8},
{635, 0},
{666, 48},
}
_pcsp__validate_utf8_fast = [][2]uint32{
{1, 0},
{4, 8},
{5, 16},
{247, 24},
{251, 16},
{252, 8},
{253, 0},
{527, 24},
{531, 16},
{532, 8},
{534, 0},
}
_pcsp__value = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{495, 88},
{499, 48},
{500, 40},
{502, 32},
{504, 24},
{506, 16},
{508, 8},
{509, 0},
{1316, 88},
}
_pcsp__vnumber = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{803, 104},
{807, 48},
{808, 40},
{810, 32},
{812, 24},
{814, 16},
{816, 8},
{817, 0},
{1551, 104},
}
_pcsp__atof_eisel_lemire64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{292, 32},
{293, 24},
{295, 16},
{297, 8},
{298, 0},
{362, 32},
}
_pcsp__atof_native = [][2]uint32{
{1, 0},
{4, 8},
{587, 56},
{591, 8},
{593, 0},
}
_pcsp__decimal_to_f64 = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{12, 40},
{13, 48},
{1144, 56},
{1148, 48},
{1149, 40},
{1151, 32},
{1153, 24},
{1155, 16},
{1157, 8},
{1158, 0},
{1169, 56},
}
_pcsp__right_shift = [][2]uint32{
{1, 0},
{318, 8},
{319, 0},
{387, 8},
{388, 0},
{396, 8},
{398, 0},
}
_pcsp__left_shift = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{363, 24},
{364, 16},
{366, 8},
{367, 0},
{470, 24},
{471, 16},
{473, 8},
{474, 0},
{486, 24},
}
_pcsp__vsigned = [][2]uint32{
{1, 0},
{4, 8},
{119, 16},
{120, 8},
{121, 0},
{132, 16},
{133, 8},
{134, 0},
{276, 16},
{277, 8},
{278, 0},
{282, 16},
{283, 8},
{284, 0},
{322, 16},
{323, 8},
{324, 0},
{332, 16},
{333, 8},
{335, 0},
}
_pcsp__vstring = [][2]uint32{
{1, 0},
{4, 8},
{6, 16},
{8, 24},
{10, 32},
{11, 40},
{105, 56},
{109, 40},
{110, 32},
{112, 24},
{114, 16},
{116, 8},
{118, 0},
}
_pcsp__vunsigned = [][2]uint32{
{1, 0},
{78, 8},
{79, 0},
{90, 8},
{91, 0},
{114, 8},
{115, 0},
{273, 8},
{274, 0},
{312, 8},
{313, 0},
{320, 8},
{322, 0},
}
)
var Funcs = []loader.CFunc{
{"__native_entry__", 0, 67, 0, nil},
{"_f32toa", _entry__f32toa, _size__f32toa, _stack__f32toa, _pcsp__f32toa},
{"_f64toa", _entry__f64toa, _size__f64toa, _stack__f64toa, _pcsp__f64toa},
{"_format_significand", _entry__format_significand, _size__format_significand, _stack__format_significand, _pcsp__format_significand},
{"_format_integer", _entry__format_integer, _size__format_integer, _stack__format_integer, _pcsp__format_integer},
{"_fsm_exec", _entry__fsm_exec, _size__fsm_exec, _stack__fsm_exec, _pcsp__fsm_exec},
{"_advance_string", _entry__advance_string, _size__advance_string, _stack__advance_string, _pcsp__advance_string},
{"_advance_string_default", _entry__advance_string_default, _size__advance_string_default, _stack__advance_string_default, _pcsp__advance_string_default},
{"_do_skip_number", _entry__do_skip_number, _size__do_skip_number, _stack__do_skip_number, _pcsp__do_skip_number},
{"_get_by_path", _entry__get_by_path, _size__get_by_path, _stack__get_by_path, _pcsp__get_by_path},
{"_skip_one_fast", _entry__skip_one_fast, _size__skip_one_fast, _stack__skip_one_fast, _pcsp__skip_one_fast},
{"_html_escape", _entry__html_escape, _size__html_escape, _stack__html_escape, _pcsp__html_escape},
{"_i64toa", _entry__i64toa, _size__i64toa, _stack__i64toa, _pcsp__i64toa},
{"_u64toa", _entry__u64toa, _size__u64toa, _stack__u64toa, _pcsp__u64toa},
{"_lspace", _entry__lspace, _size__lspace, _stack__lspace, _pcsp__lspace},
{"_quote", _entry__quote, _size__quote, _stack__quote, _pcsp__quote},
{"_skip_array", _entry__skip_array, _size__skip_array, _stack__skip_array, _pcsp__skip_array},
{"_skip_number", _entry__skip_number, _size__skip_number, _stack__skip_number, _pcsp__skip_number},
{"_skip_object", _entry__skip_object, _size__skip_object, _stack__skip_object, _pcsp__skip_object},
{"_skip_one", _entry__skip_one, _size__skip_one, _stack__skip_one, _pcsp__skip_one},
{"_unquote", _entry__unquote, _size__unquote, _stack__unquote, _pcsp__unquote},
{"_validate_one", _entry__validate_one, _size__validate_one, _stack__validate_one, _pcsp__validate_one},
{"_validate_utf8", _entry__validate_utf8, _size__validate_utf8, _stack__validate_utf8, _pcsp__validate_utf8},
{"_validate_utf8_fast", _entry__validate_utf8_fast, _size__validate_utf8_fast, _stack__validate_utf8_fast, _pcsp__validate_utf8_fast},
{"_value", _entry__value, _size__value, _stack__value, _pcsp__value},
{"_vnumber", _entry__vnumber, _size__vnumber, _stack__vnumber, _pcsp__vnumber},
{"_atof_eisel_lemire64", _entry__atof_eisel_lemire64, _size__atof_eisel_lemire64, _stack__atof_eisel_lemire64, _pcsp__atof_eisel_lemire64},
{"_atof_native", _entry__atof_native, _size__atof_native, _stack__atof_native, _pcsp__atof_native},
{"_decimal_to_f64", _entry__decimal_to_f64, _size__decimal_to_f64, _stack__decimal_to_f64, _pcsp__decimal_to_f64},
{"_right_shift", _entry__right_shift, _size__right_shift, _stack__right_shift, _pcsp__right_shift},
{"_left_shift", _entry__left_shift, _size__left_shift, _stack__left_shift, _pcsp__left_shift},
{"_vsigned", _entry__vsigned, _size__vsigned, _stack__vsigned, _pcsp__vsigned},
{"_vstring", _entry__vstring, _size__vstring, _stack__vstring, _pcsp__vstring},
{"_vunsigned", _entry__vunsigned, _size__vunsigned, _stack__vunsigned, _pcsp__vunsigned},
}

View File

@@ -22,13 +22,15 @@ import (
`unsafe`
)
type ValueType int
type ValueType = int64
type ParsingError uint
type SearchingError uint
// NOTE: !NOT MODIFIED ONLY.
// This definitions are followed in native/types.h.
const BufPaddingSize int = 64
const (
V_EOF ValueType = 1
V_NULL ValueType = 2
@@ -55,6 +57,9 @@ const (
B_USE_NUMBER = 1
B_VALIDATE_STRING = 5
B_ALLOW_CONTROL = 31
// for native.SkipOne() flags
B_NO_VALIDATE_JSON= 6
)
const (
@@ -159,4 +164,4 @@ func NewDbuf() *byte {
func FreeDbuf(p *byte) {
digitPool.Put(p)
}
}

View File

@@ -17,10 +17,11 @@
package resolver
import (
`fmt`
`reflect`
`strings`
`sync`
"fmt"
"reflect"
"strings"
"sync"
_ "unsafe"
)
type FieldOpts int
@@ -29,6 +30,7 @@ type OffsetType int
const (
F_omitempty FieldOpts = 1 << iota
F_stringize
F_omitzero
)
const (
@@ -47,6 +49,7 @@ type FieldMeta struct {
Path []Offset
Opts FieldOpts
Type reflect.Type
IsZero func(reflect.Value) bool
}
func (self *FieldMeta) String() string {
@@ -117,20 +120,26 @@ func resolveFields(vt reflect.Type) []FieldMeta {
/* convert each field */
for _, fv := range tfv.list {
/* add to result */
ret = append(ret, FieldMeta{})
fm := &ret[len(ret)-1]
item := vt
path := []Offset(nil)
opts := FieldOpts(0)
/* check for "string" */
if fv.quoted {
opts |= F_stringize
fm.Opts |= F_stringize
}
/* check for "omitempty" */
if fv.omitEmpty {
opts |= F_omitempty
fm.Opts |= F_omitempty
}
/* handle the "omitzero" */
handleOmitZero(fv, fm)
/* dump the field path */
for _, i := range fv.index {
kind := F_offset
@@ -161,13 +170,9 @@ func resolveFields(vt reflect.Type) []FieldMeta {
path[idx].Kind = F_offset
}
/* add to result */
ret = append(ret, FieldMeta {
Type: fvt,
Opts: opts,
Path: path,
Name: fv.name,
})
fm.Type = fvt
fm.Path = path
fm.Name = fv.name
}
/* optimize the offsets */
@@ -212,3 +217,10 @@ func ResolveStruct(vt reflect.Type) []FieldMeta {
fieldCache[vt] = fm
return fm
}
func handleOmitZero(fv StdField, fm *FieldMeta) {
if fv.omitZero {
fm.Opts |= F_omitzero
fm.IsZero = fv.isZero
}
}

View File

@@ -1,4 +1,4 @@
// +build !noasm !appengine
// +build !noasm,amd64 !appengine,amd64
// Code generated by asm2asm, DO NOT EDIT·
#include "go_asm.h"
@@ -18,43 +18,3 @@ _entry:
_stack_grow:
CALL runtime·morestack_noctxt<>(SB)
JMP _entry
TEXT ·StopProf(SB), NOSPLIT, $0-0
NO_LOCAL_POINTERS
CMPB github·combytedancesonicinternalrt·StopProfiling(SB), $0
JEQ _ret_1
MOVL $1, AX
LEAQ github·combytedancesonicinternalrt·yieldCount(SB), CX
LOCK
XADDL AX, (CX)
MOVL runtime·prof+4(SB), AX
TESTL AX, AX
JEQ _ret_1
MOVL AX, github·combytedancesonicinternalrt·oldHz(SB)
MOVL $0, runtime·prof+4(SB)
_ret_1:
RET
TEXT ·StartProf(SB), NOSPLIT, $0-0
NO_LOCAL_POINTERS
CMPB github·combytedancesonicinternalrt·StopProfiling(SB), $0
JEQ _ret_2
MOVL $-1, AX
LEAQ github·combytedancesonicinternalrt·yieldCount(SB), CX
LOCK
XADDL AX, (CX)
CMPL github·combytedancesonicinternalrt·yieldCount(SB), $0
JNE _ret_2
CMPL runtime·prof+4(SB), $0
JNE _ret_2
CMPL github·combytedancesonicinternalrt·oldHz(SB), $0
JNE _branch_1
MOVL $100, github·combytedancesonicinternalrt·oldHz(SB)
_branch_1:
MOVL github·combytedancesonicinternalrt·oldHz(SB), AX
MOVL AX, runtime·prof+4(SB)
_ret_2:
RET

View File

@@ -1,10 +0,0 @@
// +build !noasm !appengine
// Code generated by asm2asm, DO NOT EDIT.
#include "go_asm.h"
#include "funcdata.h"
#include "textflag.h"
TEXT ·MoreStack(SB), NOSPLIT, $0 - 8
NO_LOCAL_POINTERS
RET

View File

@@ -17,8 +17,10 @@
package rt
import (
`unsafe`
`reflect`
"reflect"
"unsafe"
"github.com/bytedance/sonic/option"
)
//go:nosplit
@@ -66,15 +68,16 @@ func FuncAddr(f interface{}) unsafe.Pointer {
}
}
//go:nocheckptr
func IndexChar(src string, index int) unsafe.Pointer {
return unsafe.Pointer(uintptr((*GoString)(unsafe.Pointer(&src)).Ptr) + uintptr(index))
}
//go:nocheckptr
func IndexByte(ptr []byte, index int) unsafe.Pointer {
return unsafe.Pointer(uintptr((*GoSlice)(unsafe.Pointer(&ptr)).Ptr) + uintptr(index))
}
//go:nosplit
func GuardSlice(buf *[]byte, n int) {
c := cap(*buf)
l := len(*buf)
@@ -89,6 +92,21 @@ func GuardSlice(buf *[]byte, n int) {
}
}
func GuardSlice2(buf []byte, n int) []byte {
c := cap(buf)
l := len(buf)
if c-l < n {
c = c>>1 + n + l
if c < 32 {
c = 32
}
tmp := make([]byte, l, c)
copy(tmp, buf)
buf = tmp
}
return buf
}
//go:nosplit
func Ptr2SlicePtr(s unsafe.Pointer, l int, c int) unsafe.Pointer {
slice := &GoSlice{
@@ -121,4 +139,17 @@ func StrFrom(p unsafe.Pointer, n int64) (s string) {
func NoEscape(p unsafe.Pointer) unsafe.Pointer {
x := uintptr(p)
return unsafe.Pointer(x ^ 0)
}
}
//go:nosplit
func MoreStack(size uintptr)
//go:nosplit
func Add(ptr unsafe.Pointer, off uintptr) unsafe.Pointer {
return unsafe.Pointer(uintptr(ptr) + off)
}
// CanSizeResue
func CanSizeResue(cap int) bool {
return cap <= int(option.LimitBufferSize)
}

View File

@@ -17,197 +17,215 @@
package rt
import (
`reflect`
`unsafe`
"reflect"
"unsafe"
)
var (
reflectRtypeItab = findReflectRtypeItab()
reflectRtypeItab = findReflectRtypeItab()
)
// GoType.KindFlags const
const (
F_direct = 1 << 5
F_kind_mask = (1 << 5) - 1
F_direct = 1 << 5
F_kind_mask = (1 << 5) - 1
)
// GoType.Flags const
const (
tflagUncommon uint8 = 1 << 0
tflagExtraStar uint8 = 1 << 1
tflagNamed uint8 = 1 << 2
tflagRegularMemory uint8 = 1 << 3
tflagUncommon uint8 = 1 << 0
tflagExtraStar uint8 = 1 << 1
tflagNamed uint8 = 1 << 2
tflagRegularMemory uint8 = 1 << 3
)
type GoType struct {
Size uintptr
PtrData uintptr
Hash uint32
Flags uint8
Align uint8
FieldAlign uint8
KindFlags uint8
Traits unsafe.Pointer
GCData *byte
Str int32
PtrToSelf int32
Size uintptr
PtrData uintptr
Hash uint32
Flags uint8
Align uint8
FieldAlign uint8
KindFlags uint8
Traits unsafe.Pointer
GCData *byte
Str int32
PtrToSelf int32
}
func (self *GoType) IsNamed() bool {
return (self.Flags & tflagNamed) != 0
return (self.Flags & tflagNamed) != 0
}
func (self *GoType) Kind() reflect.Kind {
return reflect.Kind(self.KindFlags & F_kind_mask)
return reflect.Kind(self.KindFlags & F_kind_mask)
}
func (self *GoType) Pack() (t reflect.Type) {
(*GoIface)(unsafe.Pointer(&t)).Itab = reflectRtypeItab
(*GoIface)(unsafe.Pointer(&t)).Value = unsafe.Pointer(self)
return
(*GoIface)(unsafe.Pointer(&t)).Itab = reflectRtypeItab
(*GoIface)(unsafe.Pointer(&t)).Value = unsafe.Pointer(self)
return
}
func (self *GoType) String() string {
return self.Pack().String()
return self.Pack().String()
}
func (self *GoType) Indirect() bool {
return self.KindFlags & F_direct == 0
}
type GoMap struct {
Count int
Flags uint8
B uint8
Overflow uint16
Hash0 uint32
Buckets unsafe.Pointer
OldBuckets unsafe.Pointer
Evacuate uintptr
Extra unsafe.Pointer
}
type GoMapIterator struct {
K unsafe.Pointer
V unsafe.Pointer
T *GoMapType
H *GoMap
Buckets unsafe.Pointer
Bptr *unsafe.Pointer
Overflow *[]unsafe.Pointer
OldOverflow *[]unsafe.Pointer
StartBucket uintptr
Offset uint8
Wrapped bool
B uint8
I uint8
Bucket uintptr
CheckBucket uintptr
return self.KindFlags&F_direct == 0
}
type GoItab struct {
it unsafe.Pointer
Vt *GoType
hv uint32
_ [4]byte
fn [1]uintptr
it unsafe.Pointer
Vt *GoType
hv uint32
_ [4]byte
fn [1]uintptr
}
type GoIface struct {
Itab *GoItab
Value unsafe.Pointer
Itab *GoItab
Value unsafe.Pointer
}
type GoEface struct {
Type *GoType
Value unsafe.Pointer
Type *GoType
Value unsafe.Pointer
}
func (self GoEface) Pack() (v interface{}) {
*(*GoEface)(unsafe.Pointer(&v)) = self
return
*(*GoEface)(unsafe.Pointer(&v)) = self
return
}
type GoPtrType struct {
GoType
Elem *GoType
GoType
Elem *GoType
}
type GoMapType struct {
GoType
Key *GoType
Elem *GoType
Bucket *GoType
Hasher func(unsafe.Pointer, uintptr) uintptr
KeySize uint8
ElemSize uint8
BucketSize uint16
Flags uint32
GoType
Key *GoType
Elem *GoType
Bucket *GoType
Hasher func(unsafe.Pointer, uintptr) uintptr
KeySize uint8
ElemSize uint8
BucketSize uint16
Flags uint32
}
func (self *GoMapType) IndirectElem() bool {
return self.Flags & 2 != 0
return self.Flags&2 != 0
}
type GoStructType struct {
GoType
Pkg *byte
Fields []GoStructField
GoType
Pkg *byte
Fields []GoStructField
}
type GoStructField struct {
Name *byte
Type *GoType
OffEmbed uintptr
Name *byte
Type *GoType
OffEmbed uintptr
}
type GoInterfaceType struct {
GoType
PkgPath *byte
Methods []GoInterfaceMethod
GoType
PkgPath *byte
Methods []GoInterfaceMethod
}
type GoInterfaceMethod struct {
Name int32
Type int32
Name int32
Type int32
}
type GoSlice struct {
Ptr unsafe.Pointer
Len int
Cap int
Ptr unsafe.Pointer
Len int
Cap int
}
type GoString struct {
Ptr unsafe.Pointer
Len int
Ptr unsafe.Pointer
Len int
}
func PtrElem(t *GoType) *GoType {
return (*GoPtrType)(unsafe.Pointer(t)).Elem
return (*GoPtrType)(unsafe.Pointer(t)).Elem
}
func MapType(t *GoType) *GoMapType {
return (*GoMapType)(unsafe.Pointer(t))
return (*GoMapType)(unsafe.Pointer(t))
}
func IfaceType(t *GoType) *GoInterfaceType {
return (*GoInterfaceType)(unsafe.Pointer(t))
return (*GoInterfaceType)(unsafe.Pointer(t))
}
func UnpackType(t reflect.Type) *GoType {
return (*GoType)((*GoIface)(unsafe.Pointer(&t)).Value)
return (*GoType)((*GoIface)(unsafe.Pointer(&t)).Value)
}
func UnpackEface(v interface{}) GoEface {
return *(*GoEface)(unsafe.Pointer(&v))
return *(*GoEface)(unsafe.Pointer(&v))
}
func UnpackIface(v interface{}) GoIface {
return *(*GoIface)(unsafe.Pointer(&v))
return *(*GoIface)(unsafe.Pointer(&v))
}
func findReflectRtypeItab() *GoItab {
v := reflect.TypeOf(struct{}{})
return (*GoIface)(unsafe.Pointer(&v)).Itab
v := reflect.TypeOf(struct{}{})
return (*GoIface)(unsafe.Pointer(&v)).Itab
}
func AssertI2I2(t *GoType, i GoIface) (r GoIface) {
inter := IfaceType(t)
tab := i.Itab
if tab == nil {
return
}
if (*GoInterfaceType)(tab.it) != inter {
tab = GetItab(inter, tab.Vt, true)
if tab == nil {
return
}
}
r.Itab = tab
r.Value = i.Value
return
}
func (t *GoType) IsInt64() bool {
return t.Kind() == reflect.Int64 || (t.Kind() == reflect.Int && t.Size == 8)
}
func (t *GoType) IsInt32() bool {
return t.Kind() == reflect.Int32 || (t.Kind() == reflect.Int && t.Size == 4)
}
//go:nosplit
func (t *GoType) IsUint64() bool {
isUint := t.Kind() == reflect.Uint || t.Kind() == reflect.Uintptr
return t.Kind() == reflect.Uint64 || (isUint && t.Size == 8)
}
//go:nosplit
func (t *GoType) IsUint32() bool {
isUint := t.Kind() == reflect.Uint || t.Kind() == reflect.Uintptr
return t.Kind() == reflect.Uint32 || (isUint && t.Size == 4)
}
//go:nosplit
func PtrAdd(ptr unsafe.Pointer, offset uintptr) unsafe.Pointer {
return unsafe.Pointer(uintptr(ptr) + offset)
}
//go:noescape
//go:linkname GetItab runtime.getitab
func GetItab(inter *GoInterfaceType, typ *GoType, canfail bool) *GoItab

View File

@@ -1,3 +1,5 @@
// +build go1.21,!go1.26
/*
* Copyright 2021 ByteDance Inc.
*
@@ -17,13 +19,18 @@
package rt
import (
`os`
`sync/atomic`
`unsafe`
`golang.org/x/arch/x86/x86asm`
)
//go:linkname GcWriteBarrier2 runtime.gcWriteBarrier2
func GcWriteBarrier2()
//go:linkname RuntimeWriteBarrier runtime.writeBarrier
var RuntimeWriteBarrier uintptr
const (
_MaxInstr = 15
)
@@ -76,49 +83,3 @@ func GcwbAddr() uintptr {
}
}
// StopProfiling is used to stop traceback introduced by SIGPROF while native code is running.
// WARN: this option is only a workaround for traceback issue (https://github.com/bytedance/sonic/issues/310),
// and will be dropped when the issue is fixed.
var StopProfiling = os.Getenv("SONIC_STOP_PROFILING") != ""
// WARN: must be aligned with runtime.Prof
// type Prof struct {
// signalLock uint32
// hz int32
// }
var (
// // go:linkname runtimeProf runtime.prof
// runtimeProf Prof
// count of native-C calls
yieldCount uint32
// previous value of runtimeProf.hz
oldHz int32
)
//go:nosplit
func MoreStack(size uintptr)
func StopProf()
// func StopProf() {
// atomic.AddUint32(&yieldCount, 1)
// if runtimeProf.hz != 0 {
// oldHz = runtimeProf.hz
// runtimeProf.hz = 0
// }
// }
func StartProf()
// func StartProf() {
// atomic.AddUint32(&yieldCount, ^uint32(0))
// if yieldCount == 0 && runtimeProf.hz == 0 {
// if oldHz == 0 {
// oldHz = 100
// }
// runtimeProf.hz = oldHz
// }
// }

View File

@@ -17,12 +17,12 @@
package rt
const (
MinInt48 = -(1 << 47)
MaxInt48 = +(1 << 47) - 1
MinInt48 int64 = -(1 << 47)
MaxInt48 int64 = +(1 << 47) - 1
)
func PackInt(v int) uint64 {
if u := uint64(v); v < MinInt48 || v > MaxInt48 {
if u := uint64(v); int64(v) < MinInt48 || int64(v) > MaxInt48 {
panic("int48 out of range")
} else {
return ((u >> 63) << 47) | (u & 0x00007fffffffffff)

View File

@@ -1,181 +0,0 @@
/**
* Copyright 2023 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rt
import (
`fmt`
`strings`
`unsafe`
)
type Bitmap struct {
N int
B []byte
}
func (self *Bitmap) grow() {
if self.N >= len(self.B) * 8 {
self.B = append(self.B, 0)
}
}
func (self *Bitmap) mark(i int, bv int) {
if bv != 0 {
self.B[i / 8] |= 1 << (i % 8)
} else {
self.B[i / 8] &^= 1 << (i % 8)
}
}
func (self *Bitmap) Set(i int, bv int) {
if i >= self.N {
panic("bitmap: invalid bit position")
} else {
self.mark(i, bv)
}
}
func (self *Bitmap) Append(bv int) {
self.grow()
self.mark(self.N, bv)
self.N++
}
func (self *Bitmap) AppendMany(n int, bv int) {
for i := 0; i < n; i++ {
self.Append(bv)
}
}
// var (
// _stackMapLock = sync.Mutex{}
// _stackMapCache = make(map[*StackMap]struct{})
// )
type BitVec struct {
N uintptr
B unsafe.Pointer
}
func (self BitVec) Bit(i uintptr) byte {
return (*(*byte)(unsafe.Pointer(uintptr(self.B) + i / 8)) >> (i % 8)) & 1
}
func (self BitVec) String() string {
var i uintptr
var v []string
/* add each bit */
for i = 0; i < self.N; i++ {
v = append(v, fmt.Sprintf("%d", self.Bit(i)))
}
/* join them together */
return fmt.Sprintf(
"BitVec { %s }",
strings.Join(v, ", "),
)
}
type StackMap struct {
N int32
L int32
B [1]byte
}
// func (self *StackMap) add() {
// _stackMapLock.Lock()
// _stackMapCache[self] = struct{}{}
// _stackMapLock.Unlock()
// }
func (self *StackMap) Pin() uintptr {
// self.add()
return uintptr(unsafe.Pointer(self))
}
func (self *StackMap) Get(i int32) BitVec {
return BitVec {
N: uintptr(self.L),
B: unsafe.Pointer(uintptr(unsafe.Pointer(&self.B)) + uintptr(i * ((self.L + 7) >> 3))),
}
}
func (self *StackMap) String() string {
sb := strings.Builder{}
sb.WriteString("StackMap {")
/* dump every stack map */
for i := int32(0); i < self.N; i++ {
sb.WriteRune('\n')
sb.WriteString(" " + self.Get(i).String())
}
/* close the stackmap */
sb.WriteString("\n}")
return sb.String()
}
func (self *StackMap) MarshalBinary() ([]byte, error) {
size := int(self.N) * int(self.L) + int(unsafe.Sizeof(self.L)) + int(unsafe.Sizeof(self.N))
return BytesFrom(unsafe.Pointer(self), size, size), nil
}
var (
byteType = UnpackEface(byte(0)).Type
)
const (
_StackMapSize = unsafe.Sizeof(StackMap{})
)
//go:linkname mallocgc runtime.mallocgc
//goland:noinspection GoUnusedParameter
func mallocgc(nb uintptr, vt *GoType, zero bool) unsafe.Pointer
type StackMapBuilder struct {
b Bitmap
}
//go:nocheckptr
func (self *StackMapBuilder) Build() (p *StackMap) {
nb := len(self.b.B)
bm := mallocgc(_StackMapSize + uintptr(nb) - 1, byteType, false)
/* initialize as 1 bitmap of N bits */
p = (*StackMap)(bm)
p.N, p.L = 1, int32(self.b.N)
copy(BytesFrom(unsafe.Pointer(&p.B), nb, nb), self.b.B)
return
}
func (self *StackMapBuilder) AddField(ptr bool) {
if ptr {
self.b.Append(1)
} else {
self.b.Append(0)
}
}
func (self *StackMapBuilder) AddFields(n int, ptr bool) {
if ptr {
self.b.AppendMany(n, 1)
} else {
self.b.AppendMany(n, 0)
}
}