用户登录和接口鉴权
This commit is contained in:
8
vendor/github.com/bytedance/sonic/internal/caching/hashing.go
generated
vendored
8
vendor/github.com/bytedance/sonic/internal/caching/hashing.go
generated
vendored
@@ -23,12 +23,16 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
V_strhash = rt.UnpackEface(rt.Strhash)
|
||||
V_strhash = rt.UnpackEface(strhash)
|
||||
S_strhash = *(*uintptr)(V_strhash.Value)
|
||||
)
|
||||
|
||||
//go:noescape
|
||||
//go:linkname strhash runtime.strhash
|
||||
func strhash(_ unsafe.Pointer, _ uintptr) uintptr
|
||||
|
||||
func StrHash(s string) uint64 {
|
||||
if v := rt.Strhash(unsafe.Pointer(&s), 0); v == 0 {
|
||||
if v := strhash(unsafe.Pointer(&s), 0); v == 0 {
|
||||
return 1
|
||||
} else {
|
||||
return uint64(v)
|
||||
|
||||
4
vendor/github.com/bytedance/sonic/internal/cpu/features.go
generated
vendored
4
vendor/github.com/bytedance/sonic/internal/cpu/features.go
generated
vendored
@@ -24,6 +24,7 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
HasAVX = cpuid.CPU.Has(cpuid.AVX)
|
||||
HasAVX2 = cpuid.CPU.Has(cpuid.AVX2)
|
||||
HasSSE = cpuid.CPU.Has(cpuid.SSE)
|
||||
)
|
||||
@@ -32,8 +33,7 @@ func init() {
|
||||
switch v := os.Getenv("SONIC_MODE"); v {
|
||||
case "" : break
|
||||
case "auto" : break
|
||||
case "noavx" : HasAVX2 = false
|
||||
// will also disable avx, act as `noavx`, we remain it to make sure forward compatibility
|
||||
case "noavx" : HasAVX = false; fallthrough
|
||||
case "noavx2" : HasAVX2 = false
|
||||
default : panic(fmt.Sprintf("invalid mode: '%s', should be one of 'auto', 'noavx', 'noavx2'", v))
|
||||
}
|
||||
|
||||
1384
vendor/github.com/bytedance/sonic/internal/encoder/compiler.go
generated
vendored
1384
vendor/github.com/bytedance/sonic/internal/encoder/compiler.go
generated
vendored
File diff suppressed because it is too large
Load Diff
170
vendor/github.com/bytedance/sonic/internal/encoder/encoder.go
generated
vendored
170
vendor/github.com/bytedance/sonic/internal/encoder/encoder.go
generated
vendored
@@ -17,63 +17,72 @@
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"unsafe"
|
||||
`bytes`
|
||||
`encoding/json`
|
||||
`reflect`
|
||||
`runtime`
|
||||
`unsafe`
|
||||
|
||||
"github.com/bytedance/sonic/utf8"
|
||||
"github.com/bytedance/sonic/internal/encoder/alg"
|
||||
"github.com/bytedance/sonic/internal/encoder/vars"
|
||||
"github.com/bytedance/sonic/internal/rt"
|
||||
"github.com/bytedance/sonic/option"
|
||||
"github.com/bytedance/gopkg/lang/dirtmake"
|
||||
`github.com/bytedance/sonic/internal/native`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/bytedance/sonic/utf8`
|
||||
`github.com/bytedance/sonic/option`
|
||||
)
|
||||
|
||||
// Options is a set of encoding options.
|
||||
type Options uint64
|
||||
|
||||
const (
|
||||
bitSortMapKeys = iota
|
||||
bitEscapeHTML
|
||||
bitCompactMarshaler
|
||||
bitNoQuoteTextMarshaler
|
||||
bitNoNullSliceOrMap
|
||||
bitValidateString
|
||||
bitNoValidateJSONMarshaler
|
||||
bitNoEncoderNewline
|
||||
|
||||
// used for recursive compile
|
||||
bitPointerValue = 63
|
||||
)
|
||||
|
||||
const (
|
||||
// SortMapKeys indicates that the keys of a map needs to be sorted
|
||||
// before serializing into JSON.
|
||||
// WARNING: This hurts performance A LOT, USE WITH CARE.
|
||||
SortMapKeys Options = 1 << alg.BitSortMapKeys
|
||||
SortMapKeys Options = 1 << bitSortMapKeys
|
||||
|
||||
// EscapeHTML indicates encoder to escape all HTML characters
|
||||
// after serializing into JSON (see https://pkg.go.dev/encoding/json#HTMLEscape).
|
||||
// WARNING: This hurts performance A LOT, USE WITH CARE.
|
||||
EscapeHTML Options = 1 << alg.BitEscapeHTML
|
||||
EscapeHTML Options = 1 << bitEscapeHTML
|
||||
|
||||
// CompactMarshaler indicates that the output JSON from json.Marshaler
|
||||
// is always compact and needs no validation
|
||||
CompactMarshaler Options = 1 << alg.BitCompactMarshaler
|
||||
CompactMarshaler Options = 1 << bitCompactMarshaler
|
||||
|
||||
// NoQuoteTextMarshaler indicates that the output text from encoding.TextMarshaler
|
||||
// is always escaped string and needs no quoting
|
||||
NoQuoteTextMarshaler Options = 1 << alg.BitNoQuoteTextMarshaler
|
||||
NoQuoteTextMarshaler Options = 1 << bitNoQuoteTextMarshaler
|
||||
|
||||
// NoNullSliceOrMap indicates all empty Array or Object are encoded as '[]' or '{}',
|
||||
// instead of 'null'.
|
||||
// NOTE: The priority of this option is lower than json tag `omitempty`.
|
||||
NoNullSliceOrMap Options = 1 << alg.BitNoNullSliceOrMap
|
||||
// instead of 'null'
|
||||
NoNullSliceOrMap Options = 1 << bitNoNullSliceOrMap
|
||||
|
||||
// ValidateString indicates that encoder should validate the input string
|
||||
// before encoding it into JSON.
|
||||
ValidateString Options = 1 << alg.BitValidateString
|
||||
ValidateString Options = 1 << bitValidateString
|
||||
|
||||
// NoValidateJSONMarshaler indicates that the encoder should not validate the output string
|
||||
// after encoding the JSONMarshaler to JSON.
|
||||
NoValidateJSONMarshaler Options = 1 << alg.BitNoValidateJSONMarshaler
|
||||
NoValidateJSONMarshaler Options = 1 << bitNoValidateJSONMarshaler
|
||||
|
||||
// NoEncoderNewline indicates that the encoder should not add a newline after every message
|
||||
NoEncoderNewline Options = 1 << alg.BitNoEncoderNewline
|
||||
NoEncoderNewline Options = 1 << bitNoEncoderNewline
|
||||
|
||||
// CompatibleWithStd is used to be compatible with std encoder.
|
||||
CompatibleWithStd Options = SortMapKeys | EscapeHTML | CompactMarshaler
|
||||
|
||||
// Encode Infinity or Nan float into `null`, instead of returning an error.
|
||||
EncodeNullForInfOrNan Options = 1 << alg.BitEncodeNullForInfOrNan
|
||||
)
|
||||
|
||||
// Encoder represents a specific set of encoder configurations.
|
||||
@@ -162,45 +171,53 @@ func (enc *Encoder) SetIndent(prefix, indent string) {
|
||||
|
||||
// Quote returns the JSON-quoted version of s.
|
||||
func Quote(s string) string {
|
||||
buf := make([]byte, 0, len(s)+2)
|
||||
buf = alg.Quote(buf, s, false)
|
||||
return rt.Mem2Str(buf)
|
||||
var n int
|
||||
var p []byte
|
||||
|
||||
/* check for empty string */
|
||||
if s == "" {
|
||||
return `""`
|
||||
}
|
||||
|
||||
/* allocate space for result */
|
||||
n = len(s) + 2
|
||||
p = make([]byte, 0, n)
|
||||
|
||||
/* call the encoder */
|
||||
_ = encodeString(&p, s)
|
||||
return rt.Mem2Str(p)
|
||||
}
|
||||
|
||||
// Encode returns the JSON encoding of val, encoded with opts.
|
||||
func Encode(val interface{}, opts Options) ([]byte, error) {
|
||||
var ret []byte
|
||||
|
||||
buf := vars.NewBytes()
|
||||
err := encodeIntoCheckRace(buf, val, opts)
|
||||
buf := newBytes()
|
||||
err := encodeInto(&buf, val, opts)
|
||||
|
||||
/* check for errors */
|
||||
if err != nil {
|
||||
vars.FreeBytes(buf)
|
||||
freeBytes(buf)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
/* htmlescape or correct UTF-8 if opts enable */
|
||||
old := buf
|
||||
*buf = encodeFinish(*old, opts)
|
||||
pbuf := ((*rt.GoSlice)(unsafe.Pointer(buf))).Ptr
|
||||
pold := ((*rt.GoSlice)(unsafe.Pointer(old))).Ptr
|
||||
buf = encodeFinish(old, opts)
|
||||
pbuf := ((*rt.GoSlice)(unsafe.Pointer(&buf))).Ptr
|
||||
pold := ((*rt.GoSlice)(unsafe.Pointer(&old))).Ptr
|
||||
|
||||
/* return when allocated a new buffer */
|
||||
if pbuf != pold {
|
||||
vars.FreeBytes(old)
|
||||
return *buf, nil
|
||||
freeBytes(old)
|
||||
return buf, nil
|
||||
}
|
||||
|
||||
/* make a copy of the result */
|
||||
if rt.CanSizeResue(cap(*buf)) {
|
||||
ret = dirtmake.Bytes(len(*buf), len(*buf))
|
||||
copy(ret, *buf)
|
||||
vars.FreeBytes(buf)
|
||||
} else {
|
||||
ret = *buf
|
||||
}
|
||||
|
||||
ret = make([]byte, len(buf))
|
||||
copy(ret, buf)
|
||||
|
||||
freeBytes(buf)
|
||||
/* return the buffer into pool */
|
||||
return ret, nil
|
||||
}
|
||||
@@ -208,7 +225,7 @@ func Encode(val interface{}, opts Options) ([]byte, error) {
|
||||
// EncodeInto is like Encode but uses a user-supplied buffer instead of allocating
|
||||
// a new one.
|
||||
func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
|
||||
err := encodeIntoCheckRace(buf, val, opts)
|
||||
err := encodeInto(buf, val, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -217,15 +234,15 @@ func EncodeInto(buf *[]byte, val interface{}, opts Options) error {
|
||||
}
|
||||
|
||||
func encodeInto(buf *[]byte, val interface{}, opts Options) error {
|
||||
stk := vars.NewStack()
|
||||
stk := newStack()
|
||||
efv := rt.UnpackEface(val)
|
||||
err := encodeTypedPointer(buf, efv.Type, &efv.Value, stk, uint64(opts))
|
||||
|
||||
/* return the stack into pool */
|
||||
if err != nil {
|
||||
vars.ResetStack(stk)
|
||||
resetStack(stk)
|
||||
}
|
||||
vars.FreeStack(stk)
|
||||
freeStack(stk)
|
||||
|
||||
/* avoid GC ahead */
|
||||
runtime.KeepAlive(buf)
|
||||
@@ -237,12 +254,13 @@ func encodeFinish(buf []byte, opts Options) []byte {
|
||||
if opts & EscapeHTML != 0 {
|
||||
buf = HTMLEscape(nil, buf)
|
||||
}
|
||||
if (opts & ValidateString != 0) && !utf8.Validate(buf) {
|
||||
if opts & ValidateString != 0 && !utf8.Validate(buf) {
|
||||
buf = utf8.CorrectWith(nil, buf, `\ufffd`)
|
||||
}
|
||||
return buf
|
||||
}
|
||||
|
||||
var typeByte = rt.UnpackType(reflect.TypeOf(byte(0)))
|
||||
|
||||
// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
|
||||
// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
|
||||
@@ -251,7 +269,7 @@ func encodeFinish(buf []byte, opts Options) []byte {
|
||||
// escaping within <script> tags, so an alternative JSON encoding must
|
||||
// be used.
|
||||
func HTMLEscape(dst []byte, src []byte) []byte {
|
||||
return alg.HtmlEscape(dst, src)
|
||||
return htmlEscape(dst, src)
|
||||
}
|
||||
|
||||
// EncodeIndented is like Encode but applies Indent to format the output.
|
||||
@@ -259,40 +277,37 @@ func HTMLEscape(dst []byte, src []byte) []byte {
|
||||
// followed by one or more copies of indent according to the indentation nesting.
|
||||
func EncodeIndented(val interface{}, prefix string, indent string, opts Options) ([]byte, error) {
|
||||
var err error
|
||||
var out []byte
|
||||
var buf *bytes.Buffer
|
||||
|
||||
/* encode into the buffer */
|
||||
out := vars.NewBytes()
|
||||
err = EncodeInto(out, val, opts)
|
||||
out = newBytes()
|
||||
err = EncodeInto(&out, val, opts)
|
||||
|
||||
/* check for errors */
|
||||
if err != nil {
|
||||
vars.FreeBytes(out)
|
||||
freeBytes(out)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
/* indent the JSON */
|
||||
buf = vars.NewBuffer()
|
||||
err = json.Indent(buf, *out, prefix, indent)
|
||||
vars.FreeBytes(out)
|
||||
buf = newBuffer()
|
||||
err = json.Indent(buf, out, prefix, indent)
|
||||
|
||||
/* check for errors */
|
||||
if err != nil {
|
||||
vars.FreeBuffer(buf)
|
||||
freeBytes(out)
|
||||
freeBuffer(buf)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
/* copy to the result buffer */
|
||||
var ret []byte
|
||||
if rt.CanSizeResue(cap(buf.Bytes())) {
|
||||
ret = make([]byte, buf.Len())
|
||||
copy(ret, buf.Bytes())
|
||||
/* return the buffers into pool */
|
||||
vars.FreeBuffer(buf)
|
||||
} else {
|
||||
ret = buf.Bytes()
|
||||
}
|
||||
|
||||
ret := make([]byte, buf.Len())
|
||||
copy(ret, buf.Bytes())
|
||||
|
||||
/* return the buffers into pool */
|
||||
freeBytes(out)
|
||||
freeBuffer(buf)
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
@@ -315,5 +330,26 @@ func Pretouch(vt reflect.Type, opts ...option.CompileOption) error {
|
||||
//
|
||||
// Note: it does not check for the invalid UTF-8 characters.
|
||||
func Valid(data []byte) (ok bool, start int) {
|
||||
return alg.Valid(data)
|
||||
n := len(data)
|
||||
if n == 0 {
|
||||
return false, -1
|
||||
}
|
||||
s := rt.Mem2Str(data)
|
||||
p := 0
|
||||
m := types.NewStateMachine()
|
||||
ret := native.ValidateOne(&s, &p, m, types.F_VALIDATE_STRING)
|
||||
types.FreeStateMachine(m)
|
||||
|
||||
if ret < 0 {
|
||||
return false, p-1
|
||||
}
|
||||
|
||||
/* check for trailing spaces */
|
||||
for ;p < n; p++ {
|
||||
if (types.SPACE_MASK & (1 << data[p])) == 0 {
|
||||
return false, p
|
||||
}
|
||||
}
|
||||
|
||||
return true, ret
|
||||
}
|
||||
|
||||
28
vendor/github.com/bytedance/sonic/internal/encoder/stream.go
generated
vendored
28
vendor/github.com/bytedance/sonic/internal/encoder/stream.go
generated
vendored
@@ -17,10 +17,8 @@
|
||||
package encoder
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
|
||||
"github.com/bytedance/sonic/internal/encoder/vars"
|
||||
`encoding/json`
|
||||
`io`
|
||||
)
|
||||
|
||||
// StreamEncoder uses io.Writer as input.
|
||||
@@ -38,20 +36,21 @@ func NewStreamEncoder(w io.Writer) *StreamEncoder {
|
||||
|
||||
// Encode encodes interface{} as JSON to io.Writer
|
||||
func (enc *StreamEncoder) Encode(val interface{}) (err error) {
|
||||
out := vars.NewBytes()
|
||||
buf := newBytes()
|
||||
out := buf
|
||||
|
||||
/* encode into the buffer */
|
||||
err = EncodeInto(out, val, enc.Opts)
|
||||
err = EncodeInto(&out, val, enc.Opts)
|
||||
if err != nil {
|
||||
goto free_bytes
|
||||
}
|
||||
|
||||
if enc.indent != "" || enc.prefix != "" {
|
||||
/* indent the JSON */
|
||||
buf := vars.NewBuffer()
|
||||
err = json.Indent(buf, *out, enc.prefix, enc.indent)
|
||||
buf := newBuffer()
|
||||
err = json.Indent(buf, out, enc.prefix, enc.indent)
|
||||
if err != nil {
|
||||
vars.FreeBuffer(buf)
|
||||
freeBuffer(buf)
|
||||
goto free_bytes
|
||||
}
|
||||
|
||||
@@ -63,17 +62,16 @@ func (enc *StreamEncoder) Encode(val interface{}) (err error) {
|
||||
/* copy into io.Writer */
|
||||
_, err = io.Copy(enc.w, buf)
|
||||
if err != nil {
|
||||
vars.FreeBuffer(buf)
|
||||
freeBuffer(buf)
|
||||
goto free_bytes
|
||||
}
|
||||
|
||||
} else {
|
||||
/* copy into io.Writer */
|
||||
var n int
|
||||
buf := *out
|
||||
for len(buf) > 0 {
|
||||
n, err = enc.w.Write(buf)
|
||||
buf = buf[n:]
|
||||
for len(out) > 0 {
|
||||
n, err = enc.w.Write(out)
|
||||
out = out[n:]
|
||||
if err != nil {
|
||||
goto free_bytes
|
||||
}
|
||||
@@ -86,6 +84,6 @@ func (enc *StreamEncoder) Encode(val interface{}) (err error) {
|
||||
}
|
||||
|
||||
free_bytes:
|
||||
vars.FreeBytes(out)
|
||||
freeBytes(buf)
|
||||
return err
|
||||
}
|
||||
|
||||
13
vendor/github.com/bytedance/sonic/internal/jit/arch_amd64.go
generated
vendored
13
vendor/github.com/bytedance/sonic/internal/jit/arch_amd64.go
generated
vendored
@@ -17,10 +17,8 @@
|
||||
package jit
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/twitchyliquid64/golang-asm/asm/arch"
|
||||
"github.com/twitchyliquid64/golang-asm/obj"
|
||||
`github.com/twitchyliquid64/golang-asm/asm/arch`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -35,13 +33,6 @@ func As(op string) obj.As {
|
||||
}
|
||||
}
|
||||
|
||||
func ImmPtr(imm unsafe.Pointer) obj.Addr {
|
||||
return obj.Addr {
|
||||
Type : obj.TYPE_CONST,
|
||||
Offset : int64(uintptr(imm)),
|
||||
}
|
||||
}
|
||||
|
||||
func Imm(imm int64) obj.Addr {
|
||||
return obj.Addr {
|
||||
Type : obj.TYPE_CONST,
|
||||
|
||||
7
vendor/github.com/bytedance/sonic/internal/jit/backend.go
generated
vendored
7
vendor/github.com/bytedance/sonic/internal/jit/backend.go
generated
vendored
@@ -21,7 +21,6 @@ import (
|
||||
`sync`
|
||||
_ `unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/rt`
|
||||
`github.com/twitchyliquid64/golang-asm/asm/arch`
|
||||
`github.com/twitchyliquid64/golang-asm/obj`
|
||||
`github.com/twitchyliquid64/golang-asm/objabi`
|
||||
@@ -39,6 +38,10 @@ var (
|
||||
_progPool sync.Pool
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
//go:linkname throw runtime.throw
|
||||
func throw(_ string)
|
||||
|
||||
func newProg() *obj.Prog {
|
||||
if val := _progPool.Get(); val == nil {
|
||||
return new(obj.Prog)
|
||||
@@ -68,7 +71,7 @@ func newLinkContext(arch *obj.LinkArch) (ret *obj.Link) {
|
||||
}
|
||||
|
||||
func diagLinkContext(str string, args ...interface{}) {
|
||||
rt.Throw(fmt.Sprintf(str, args...))
|
||||
throw(fmt.Sprintf(str, args...))
|
||||
}
|
||||
|
||||
func (self *Backend) New() (ret *obj.Prog) {
|
||||
|
||||
2
vendor/github.com/bytedance/sonic/internal/jit/runtime.go
generated
vendored
2
vendor/github.com/bytedance/sonic/internal/jit/runtime.go
generated
vendored
@@ -37,7 +37,7 @@ func Type(t reflect.Type) obj.Addr {
|
||||
}
|
||||
|
||||
func Itab(i *rt.GoType, t reflect.Type) obj.Addr {
|
||||
return Imm(int64(uintptr(unsafe.Pointer(rt.GetItab(rt.IfaceType(i), rt.UnpackType(t), false)))))
|
||||
return Imm(int64(uintptr(unsafe.Pointer(rt.Getitab(rt.IfaceType(i), rt.UnpackType(t), false)))))
|
||||
}
|
||||
|
||||
func Gitab(i *rt.GoItab) obj.Addr {
|
||||
|
||||
75
vendor/github.com/bytedance/sonic/internal/native/dispatch_amd64.go
generated
vendored
75
vendor/github.com/bytedance/sonic/internal/native/dispatch_amd64.go
generated
vendored
@@ -20,6 +20,7 @@ import (
|
||||
`unsafe`
|
||||
|
||||
`github.com/bytedance/sonic/internal/cpu`
|
||||
`github.com/bytedance/sonic/internal/native/avx`
|
||||
`github.com/bytedance/sonic/internal/native/avx2`
|
||||
`github.com/bytedance/sonic/internal/native/sse`
|
||||
`github.com/bytedance/sonic/internal/native/types`
|
||||
@@ -86,10 +87,6 @@ var (
|
||||
__ValidateUTF8 func(s unsafe.Pointer, p unsafe.Pointer, m unsafe.Pointer) (ret int)
|
||||
|
||||
__ValidateUTF8Fast func(s unsafe.Pointer) (ret int)
|
||||
|
||||
__ParseWithPadding func(parser unsafe.Pointer) (ret int)
|
||||
|
||||
__LookupSmallKey func(key unsafe.Pointer, table unsafe.Pointer, lowerOff int) (index int)
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
@@ -162,22 +159,12 @@ func ValidateUTF8Fast(s *string) (ret int) {
|
||||
return __ValidateUTF8Fast(rt.NoEscape(unsafe.Pointer(s)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func ParseWithPadding(parser unsafe.Pointer) (ret int) {
|
||||
return __ParseWithPadding(rt.NoEscape(unsafe.Pointer(parser)))
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func LookupSmallKey(key *string, table *[]byte, lowerOff int) (index int) {
|
||||
return __LookupSmallKey(rt.NoEscape(unsafe.Pointer(key)), rt.NoEscape(unsafe.Pointer(table)), lowerOff)
|
||||
}
|
||||
|
||||
func useSSE() {
|
||||
sse.Use()
|
||||
S_f64toa = sse.S_f64toa
|
||||
__F64toa = sse.F_f64toa
|
||||
S_f32toa = sse.S_f32toa
|
||||
__F32toa = sse.F_f32toa
|
||||
__F64toa = sse.F_f64toa
|
||||
S_i64toa = sse.S_i64toa
|
||||
__I64toa = sse.F_i64toa
|
||||
S_u64toa = sse.S_u64toa
|
||||
@@ -205,8 +192,42 @@ func useSSE() {
|
||||
__ValidateOne = sse.F_validate_one
|
||||
__ValidateUTF8= sse.F_validate_utf8
|
||||
__ValidateUTF8Fast = sse.F_validate_utf8_fast
|
||||
__ParseWithPadding = sse.F_parse_with_padding
|
||||
__LookupSmallKey = sse.F_lookup_small_key
|
||||
}
|
||||
|
||||
|
||||
func useAVX() {
|
||||
avx.Use()
|
||||
S_f64toa = avx.S_f64toa
|
||||
__F64toa = avx.F_f64toa
|
||||
S_f32toa = avx.S_f32toa
|
||||
__F64toa = avx.F_f64toa
|
||||
S_i64toa = avx.S_i64toa
|
||||
__I64toa = avx.F_i64toa
|
||||
S_u64toa = avx.S_u64toa
|
||||
__U64toa = avx.F_u64toa
|
||||
S_lspace = avx.S_lspace
|
||||
S_quote = avx.S_quote
|
||||
__Quote = avx.F_quote
|
||||
S_unquote = avx.S_unquote
|
||||
__Unquote = avx.F_unquote
|
||||
S_value = avx.S_value
|
||||
__Value = avx.F_value
|
||||
S_vstring = avx.S_vstring
|
||||
S_vnumber = avx.S_vnumber
|
||||
S_vsigned = avx.S_vsigned
|
||||
S_vunsigned = avx.S_vunsigned
|
||||
S_skip_one = avx.S_skip_one
|
||||
__SkipOne = avx.F_skip_one
|
||||
__SkipOneFast = avx.F_skip_one_fast
|
||||
S_skip_array = avx.S_skip_array
|
||||
S_skip_object = avx.S_skip_object
|
||||
S_skip_number = avx.S_skip_number
|
||||
S_get_by_path = avx.S_get_by_path
|
||||
__GetByPath = avx.F_get_by_path
|
||||
__HTMLEscape = avx.F_html_escape
|
||||
__ValidateOne = avx.F_validate_one
|
||||
__ValidateUTF8= avx.F_validate_utf8
|
||||
__ValidateUTF8Fast = avx.F_validate_utf8_fast
|
||||
}
|
||||
|
||||
func useAVX2() {
|
||||
@@ -214,7 +235,7 @@ func useAVX2() {
|
||||
S_f64toa = avx2.S_f64toa
|
||||
__F64toa = avx2.F_f64toa
|
||||
S_f32toa = avx2.S_f32toa
|
||||
__F32toa = avx2.F_f32toa
|
||||
__F64toa = avx2.F_f64toa
|
||||
S_i64toa = avx2.S_i64toa
|
||||
__I64toa = avx2.F_i64toa
|
||||
S_u64toa = avx2.S_u64toa
|
||||
@@ -242,17 +263,17 @@ func useAVX2() {
|
||||
__ValidateOne = avx2.F_validate_one
|
||||
__ValidateUTF8= avx2.F_validate_utf8
|
||||
__ValidateUTF8Fast = avx2.F_validate_utf8_fast
|
||||
__ParseWithPadding = avx2.F_parse_with_padding
|
||||
__LookupSmallKey = avx2.F_lookup_small_key
|
||||
}
|
||||
|
||||
|
||||
func init() {
|
||||
if cpu.HasAVX2 {
|
||||
useAVX2()
|
||||
} else if cpu.HasSSE {
|
||||
useSSE()
|
||||
} else {
|
||||
panic("Unsupported CPU, lacks of AVX2 or SSE CPUID Flag. maybe it's too old to run Sonic.")
|
||||
}
|
||||
if cpu.HasAVX2 {
|
||||
useAVX2()
|
||||
} else if cpu.HasAVX {
|
||||
useAVX()
|
||||
} else if cpu.HasSSE {
|
||||
useSSE()
|
||||
} else {
|
||||
panic("Unsupported CPU, maybe it's too old to run Sonic.")
|
||||
}
|
||||
}
|
||||
|
||||
5
vendor/github.com/bytedance/sonic/internal/native/types/types.go
generated
vendored
5
vendor/github.com/bytedance/sonic/internal/native/types/types.go
generated
vendored
@@ -22,7 +22,7 @@ import (
|
||||
`unsafe`
|
||||
)
|
||||
|
||||
type ValueType = int64
|
||||
type ValueType int
|
||||
type ParsingError uint
|
||||
type SearchingError uint
|
||||
|
||||
@@ -57,9 +57,6 @@ const (
|
||||
B_USE_NUMBER = 1
|
||||
B_VALIDATE_STRING = 5
|
||||
B_ALLOW_CONTROL = 31
|
||||
|
||||
// for native.SkipOne() flags
|
||||
B_NO_VALIDATE_JSON= 6
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
40
vendor/github.com/bytedance/sonic/internal/resolver/resolver.go
generated
vendored
40
vendor/github.com/bytedance/sonic/internal/resolver/resolver.go
generated
vendored
@@ -17,11 +17,10 @@
|
||||
package resolver
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
_ "unsafe"
|
||||
`fmt`
|
||||
`reflect`
|
||||
`strings`
|
||||
`sync`
|
||||
)
|
||||
|
||||
type FieldOpts int
|
||||
@@ -30,7 +29,6 @@ type OffsetType int
|
||||
const (
|
||||
F_omitempty FieldOpts = 1 << iota
|
||||
F_stringize
|
||||
F_omitzero
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -49,7 +47,6 @@ type FieldMeta struct {
|
||||
Path []Offset
|
||||
Opts FieldOpts
|
||||
Type reflect.Type
|
||||
IsZero func(reflect.Value) bool
|
||||
}
|
||||
|
||||
func (self *FieldMeta) String() string {
|
||||
@@ -120,26 +117,20 @@ func resolveFields(vt reflect.Type) []FieldMeta {
|
||||
|
||||
/* convert each field */
|
||||
for _, fv := range tfv.list {
|
||||
/* add to result */
|
||||
ret = append(ret, FieldMeta{})
|
||||
fm := &ret[len(ret)-1]
|
||||
|
||||
item := vt
|
||||
path := []Offset(nil)
|
||||
opts := FieldOpts(0)
|
||||
|
||||
/* check for "string" */
|
||||
if fv.quoted {
|
||||
fm.Opts |= F_stringize
|
||||
opts |= F_stringize
|
||||
}
|
||||
|
||||
/* check for "omitempty" */
|
||||
if fv.omitEmpty {
|
||||
fm.Opts |= F_omitempty
|
||||
opts |= F_omitempty
|
||||
}
|
||||
|
||||
/* handle the "omitzero" */
|
||||
handleOmitZero(fv, fm)
|
||||
|
||||
/* dump the field path */
|
||||
for _, i := range fv.index {
|
||||
kind := F_offset
|
||||
@@ -170,9 +161,13 @@ func resolveFields(vt reflect.Type) []FieldMeta {
|
||||
path[idx].Kind = F_offset
|
||||
}
|
||||
|
||||
fm.Type = fvt
|
||||
fm.Path = path
|
||||
fm.Name = fv.name
|
||||
/* add to result */
|
||||
ret = append(ret, FieldMeta {
|
||||
Type: fvt,
|
||||
Opts: opts,
|
||||
Path: path,
|
||||
Name: fv.name,
|
||||
})
|
||||
}
|
||||
|
||||
/* optimize the offsets */
|
||||
@@ -217,10 +212,3 @@ func ResolveStruct(vt reflect.Type) []FieldMeta {
|
||||
fieldCache[vt] = fm
|
||||
return fm
|
||||
}
|
||||
|
||||
func handleOmitZero(fv StdField, fm *FieldMeta) {
|
||||
if fv.omitZero {
|
||||
fm.Opts |= F_omitzero
|
||||
fm.IsZero = fv.isZero
|
||||
}
|
||||
}
|
||||
|
||||
41
vendor/github.com/bytedance/sonic/internal/rt/asm_amd64.s
generated
vendored
41
vendor/github.com/bytedance/sonic/internal/rt/asm_amd64.s
generated
vendored
@@ -1,5 +1,4 @@
|
||||
// +build !noasm,amd64 !appengine,amd64
|
||||
// Code generated by asm2asm, DO NOT EDIT·
|
||||
|
||||
#include "go_asm.h"
|
||||
#include "funcdata.h"
|
||||
@@ -18,3 +17,43 @@ _entry:
|
||||
_stack_grow:
|
||||
CALL runtime·morestack_noctxt<>(SB)
|
||||
JMP _entry
|
||||
|
||||
|
||||
TEXT ·StopProf(SB), NOSPLIT, $0-0
|
||||
NO_LOCAL_POINTERS
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕rt·StopProfiling(SB), $0
|
||||
JEQ _ret_1
|
||||
MOVL $1, AX
|
||||
LEAQ github·com∕bytedance∕sonic∕internal∕rt·yieldCount(SB), CX
|
||||
LOCK
|
||||
XADDL AX, (CX)
|
||||
MOVL runtime·prof+4(SB), AX
|
||||
TESTL AX, AX
|
||||
JEQ _ret_1
|
||||
MOVL AX, github·com∕bytedance∕sonic∕internal∕rt·oldHz(SB)
|
||||
MOVL $0, runtime·prof+4(SB)
|
||||
_ret_1:
|
||||
RET
|
||||
|
||||
|
||||
TEXT ·StartProf(SB), NOSPLIT, $0-0
|
||||
NO_LOCAL_POINTERS
|
||||
CMPB github·com∕bytedance∕sonic∕internal∕rt·StopProfiling(SB), $0
|
||||
JEQ _ret_2
|
||||
MOVL $-1, AX
|
||||
LEAQ github·com∕bytedance∕sonic∕internal∕rt·yieldCount(SB), CX
|
||||
LOCK
|
||||
XADDL AX, (CX)
|
||||
CMPL github·com∕bytedance∕sonic∕internal∕rt·yieldCount(SB), $0
|
||||
JNE _ret_2
|
||||
CMPL runtime·prof+4(SB), $0
|
||||
JNE _ret_2
|
||||
CMPL github·com∕bytedance∕sonic∕internal∕rt·oldHz(SB), $0
|
||||
JNE _branch_1
|
||||
MOVL $100, github·com∕bytedance∕sonic∕internal∕rt·oldHz(SB)
|
||||
_branch_1:
|
||||
MOVL github·com∕bytedance∕sonic∕internal∕rt·oldHz(SB), AX
|
||||
MOVL AX, runtime·prof+4(SB)
|
||||
_ret_2:
|
||||
RET
|
||||
|
||||
36
vendor/github.com/bytedance/sonic/internal/rt/fastmem.go
generated
vendored
36
vendor/github.com/bytedance/sonic/internal/rt/fastmem.go
generated
vendored
@@ -17,10 +17,8 @@
|
||||
package rt
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
|
||||
"github.com/bytedance/sonic/option"
|
||||
`unsafe`
|
||||
`reflect`
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
@@ -92,21 +90,6 @@ func GuardSlice(buf *[]byte, n int) {
|
||||
}
|
||||
}
|
||||
|
||||
func GuardSlice2(buf []byte, n int) []byte {
|
||||
c := cap(buf)
|
||||
l := len(buf)
|
||||
if c-l < n {
|
||||
c = c>>1 + n + l
|
||||
if c < 32 {
|
||||
c = 32
|
||||
}
|
||||
tmp := make([]byte, l, c)
|
||||
copy(tmp, buf)
|
||||
buf = tmp
|
||||
}
|
||||
return buf
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func Ptr2SlicePtr(s unsafe.Pointer, l int, c int) unsafe.Pointer {
|
||||
slice := &GoSlice{
|
||||
@@ -139,17 +122,4 @@ func StrFrom(p unsafe.Pointer, n int64) (s string) {
|
||||
func NoEscape(p unsafe.Pointer) unsafe.Pointer {
|
||||
x := uintptr(p)
|
||||
return unsafe.Pointer(x ^ 0)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func MoreStack(size uintptr)
|
||||
|
||||
//go:nosplit
|
||||
func Add(ptr unsafe.Pointer, off uintptr) unsafe.Pointer {
|
||||
return unsafe.Pointer(uintptr(ptr) + off)
|
||||
}
|
||||
|
||||
// CanSizeResue
|
||||
func CanSizeResue(cap int) bool {
|
||||
return cap <= int(option.LimitBufferSize)
|
||||
}
|
||||
}
|
||||
218
vendor/github.com/bytedance/sonic/internal/rt/fastvalue.go
generated
vendored
218
vendor/github.com/bytedance/sonic/internal/rt/fastvalue.go
generated
vendored
@@ -17,179 +17,209 @@
|
||||
package rt
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
`reflect`
|
||||
`unsafe`
|
||||
)
|
||||
|
||||
var (
|
||||
reflectRtypeItab = findReflectRtypeItab()
|
||||
reflectRtypeItab = findReflectRtypeItab()
|
||||
)
|
||||
|
||||
// GoType.KindFlags const
|
||||
const (
|
||||
F_direct = 1 << 5
|
||||
F_kind_mask = (1 << 5) - 1
|
||||
F_direct = 1 << 5
|
||||
F_kind_mask = (1 << 5) - 1
|
||||
)
|
||||
|
||||
// GoType.Flags const
|
||||
const (
|
||||
tflagUncommon uint8 = 1 << 0
|
||||
tflagExtraStar uint8 = 1 << 1
|
||||
tflagNamed uint8 = 1 << 2
|
||||
tflagRegularMemory uint8 = 1 << 3
|
||||
tflagUncommon uint8 = 1 << 0
|
||||
tflagExtraStar uint8 = 1 << 1
|
||||
tflagNamed uint8 = 1 << 2
|
||||
tflagRegularMemory uint8 = 1 << 3
|
||||
)
|
||||
|
||||
type GoType struct {
|
||||
Size uintptr
|
||||
PtrData uintptr
|
||||
Hash uint32
|
||||
Flags uint8
|
||||
Align uint8
|
||||
FieldAlign uint8
|
||||
KindFlags uint8
|
||||
Traits unsafe.Pointer
|
||||
GCData *byte
|
||||
Str int32
|
||||
PtrToSelf int32
|
||||
Size uintptr
|
||||
PtrData uintptr
|
||||
Hash uint32
|
||||
Flags uint8
|
||||
Align uint8
|
||||
FieldAlign uint8
|
||||
KindFlags uint8
|
||||
Traits unsafe.Pointer
|
||||
GCData *byte
|
||||
Str int32
|
||||
PtrToSelf int32
|
||||
}
|
||||
|
||||
func (self *GoType) IsNamed() bool {
|
||||
return (self.Flags & tflagNamed) != 0
|
||||
return (self.Flags & tflagNamed) != 0
|
||||
}
|
||||
|
||||
func (self *GoType) Kind() reflect.Kind {
|
||||
return reflect.Kind(self.KindFlags & F_kind_mask)
|
||||
return reflect.Kind(self.KindFlags & F_kind_mask)
|
||||
}
|
||||
|
||||
func (self *GoType) Pack() (t reflect.Type) {
|
||||
(*GoIface)(unsafe.Pointer(&t)).Itab = reflectRtypeItab
|
||||
(*GoIface)(unsafe.Pointer(&t)).Value = unsafe.Pointer(self)
|
||||
return
|
||||
(*GoIface)(unsafe.Pointer(&t)).Itab = reflectRtypeItab
|
||||
(*GoIface)(unsafe.Pointer(&t)).Value = unsafe.Pointer(self)
|
||||
return
|
||||
}
|
||||
|
||||
func (self *GoType) String() string {
|
||||
return self.Pack().String()
|
||||
return self.Pack().String()
|
||||
}
|
||||
|
||||
func (self *GoType) Indirect() bool {
|
||||
return self.KindFlags&F_direct == 0
|
||||
return self.KindFlags & F_direct == 0
|
||||
}
|
||||
|
||||
type GoMap struct {
|
||||
Count int
|
||||
Flags uint8
|
||||
B uint8
|
||||
Overflow uint16
|
||||
Hash0 uint32
|
||||
Buckets unsafe.Pointer
|
||||
OldBuckets unsafe.Pointer
|
||||
Evacuate uintptr
|
||||
Extra unsafe.Pointer
|
||||
}
|
||||
|
||||
type GoMapIterator struct {
|
||||
K unsafe.Pointer
|
||||
V unsafe.Pointer
|
||||
T *GoMapType
|
||||
H *GoMap
|
||||
Buckets unsafe.Pointer
|
||||
Bptr *unsafe.Pointer
|
||||
Overflow *[]unsafe.Pointer
|
||||
OldOverflow *[]unsafe.Pointer
|
||||
StartBucket uintptr
|
||||
Offset uint8
|
||||
Wrapped bool
|
||||
B uint8
|
||||
I uint8
|
||||
Bucket uintptr
|
||||
CheckBucket uintptr
|
||||
}
|
||||
|
||||
type GoItab struct {
|
||||
it unsafe.Pointer
|
||||
Vt *GoType
|
||||
hv uint32
|
||||
_ [4]byte
|
||||
fn [1]uintptr
|
||||
it unsafe.Pointer
|
||||
Vt *GoType
|
||||
hv uint32
|
||||
_ [4]byte
|
||||
fn [1]uintptr
|
||||
}
|
||||
|
||||
type GoIface struct {
|
||||
Itab *GoItab
|
||||
Value unsafe.Pointer
|
||||
Itab *GoItab
|
||||
Value unsafe.Pointer
|
||||
}
|
||||
|
||||
type GoEface struct {
|
||||
Type *GoType
|
||||
Value unsafe.Pointer
|
||||
Type *GoType
|
||||
Value unsafe.Pointer
|
||||
}
|
||||
|
||||
func (self GoEface) Pack() (v interface{}) {
|
||||
*(*GoEface)(unsafe.Pointer(&v)) = self
|
||||
return
|
||||
*(*GoEface)(unsafe.Pointer(&v)) = self
|
||||
return
|
||||
}
|
||||
|
||||
type GoPtrType struct {
|
||||
GoType
|
||||
Elem *GoType
|
||||
GoType
|
||||
Elem *GoType
|
||||
}
|
||||
|
||||
type GoMapType struct {
|
||||
GoType
|
||||
Key *GoType
|
||||
Elem *GoType
|
||||
Bucket *GoType
|
||||
Hasher func(unsafe.Pointer, uintptr) uintptr
|
||||
KeySize uint8
|
||||
ElemSize uint8
|
||||
BucketSize uint16
|
||||
Flags uint32
|
||||
GoType
|
||||
Key *GoType
|
||||
Elem *GoType
|
||||
Bucket *GoType
|
||||
Hasher func(unsafe.Pointer, uintptr) uintptr
|
||||
KeySize uint8
|
||||
ElemSize uint8
|
||||
BucketSize uint16
|
||||
Flags uint32
|
||||
}
|
||||
|
||||
func (self *GoMapType) IndirectElem() bool {
|
||||
return self.Flags&2 != 0
|
||||
return self.Flags & 2 != 0
|
||||
}
|
||||
|
||||
type GoStructType struct {
|
||||
GoType
|
||||
Pkg *byte
|
||||
Fields []GoStructField
|
||||
GoType
|
||||
Pkg *byte
|
||||
Fields []GoStructField
|
||||
}
|
||||
|
||||
type GoStructField struct {
|
||||
Name *byte
|
||||
Type *GoType
|
||||
OffEmbed uintptr
|
||||
Name *byte
|
||||
Type *GoType
|
||||
OffEmbed uintptr
|
||||
}
|
||||
|
||||
type GoInterfaceType struct {
|
||||
GoType
|
||||
PkgPath *byte
|
||||
Methods []GoInterfaceMethod
|
||||
GoType
|
||||
PkgPath *byte
|
||||
Methods []GoInterfaceMethod
|
||||
}
|
||||
|
||||
type GoInterfaceMethod struct {
|
||||
Name int32
|
||||
Type int32
|
||||
Name int32
|
||||
Type int32
|
||||
}
|
||||
|
||||
type GoSlice struct {
|
||||
Ptr unsafe.Pointer
|
||||
Len int
|
||||
Cap int
|
||||
Ptr unsafe.Pointer
|
||||
Len int
|
||||
Cap int
|
||||
}
|
||||
|
||||
type GoString struct {
|
||||
Ptr unsafe.Pointer
|
||||
Len int
|
||||
Ptr unsafe.Pointer
|
||||
Len int
|
||||
}
|
||||
|
||||
func PtrElem(t *GoType) *GoType {
|
||||
return (*GoPtrType)(unsafe.Pointer(t)).Elem
|
||||
return (*GoPtrType)(unsafe.Pointer(t)).Elem
|
||||
}
|
||||
|
||||
func MapType(t *GoType) *GoMapType {
|
||||
return (*GoMapType)(unsafe.Pointer(t))
|
||||
return (*GoMapType)(unsafe.Pointer(t))
|
||||
}
|
||||
|
||||
func IfaceType(t *GoType) *GoInterfaceType {
|
||||
return (*GoInterfaceType)(unsafe.Pointer(t))
|
||||
return (*GoInterfaceType)(unsafe.Pointer(t))
|
||||
}
|
||||
|
||||
func UnpackType(t reflect.Type) *GoType {
|
||||
return (*GoType)((*GoIface)(unsafe.Pointer(&t)).Value)
|
||||
return (*GoType)((*GoIface)(unsafe.Pointer(&t)).Value)
|
||||
}
|
||||
|
||||
func UnpackEface(v interface{}) GoEface {
|
||||
return *(*GoEface)(unsafe.Pointer(&v))
|
||||
return *(*GoEface)(unsafe.Pointer(&v))
|
||||
}
|
||||
|
||||
func UnpackIface(v interface{}) GoIface {
|
||||
return *(*GoIface)(unsafe.Pointer(&v))
|
||||
return *(*GoIface)(unsafe.Pointer(&v))
|
||||
}
|
||||
|
||||
func findReflectRtypeItab() *GoItab {
|
||||
v := reflect.TypeOf(struct{}{})
|
||||
return (*GoIface)(unsafe.Pointer(&v)).Itab
|
||||
v := reflect.TypeOf(struct{}{})
|
||||
return (*GoIface)(unsafe.Pointer(&v)).Itab
|
||||
}
|
||||
|
||||
func AssertI2I2(t *GoType, i GoIface) (r GoIface) {
|
||||
inter := IfaceType(t)
|
||||
inter := IfaceType(t)
|
||||
tab := i.Itab
|
||||
if tab == nil {
|
||||
return
|
||||
}
|
||||
if (*GoInterfaceType)(tab.it) != inter {
|
||||
tab = GetItab(inter, tab.Vt, true)
|
||||
tab = Getitab(inter, tab.Vt, true)
|
||||
if tab == nil {
|
||||
return
|
||||
}
|
||||
@@ -199,33 +229,15 @@ func AssertI2I2(t *GoType, i GoIface) (r GoIface) {
|
||||
return
|
||||
}
|
||||
|
||||
func (t *GoType) IsInt64() bool {
|
||||
return t.Kind() == reflect.Int64 || (t.Kind() == reflect.Int && t.Size == 8)
|
||||
}
|
||||
|
||||
func (t *GoType) IsInt32() bool {
|
||||
return t.Kind() == reflect.Int32 || (t.Kind() == reflect.Int && t.Size == 4)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func (t *GoType) IsUint64() bool {
|
||||
isUint := t.Kind() == reflect.Uint || t.Kind() == reflect.Uintptr
|
||||
return t.Kind() == reflect.Uint64 || (isUint && t.Size == 8)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func (t *GoType) IsUint32() bool {
|
||||
isUint := t.Kind() == reflect.Uint || t.Kind() == reflect.Uintptr
|
||||
return t.Kind() == reflect.Uint32 || (isUint && t.Size == 4)
|
||||
}
|
||||
|
||||
//go:nosplit
|
||||
func PtrAdd(ptr unsafe.Pointer, offset uintptr) unsafe.Pointer {
|
||||
return unsafe.Pointer(uintptr(ptr) + offset)
|
||||
}
|
||||
|
||||
//go:noescape
|
||||
//go:linkname GetItab runtime.getitab
|
||||
func GetItab(inter *GoInterfaceType, typ *GoType, canfail bool) *GoItab
|
||||
//go:linkname Getitab runtime.getitab
|
||||
func Getitab(inter *GoInterfaceType, typ *GoType, canfail bool) *GoItab
|
||||
|
||||
|
||||
func GetFuncPC(fn interface{}) uintptr {
|
||||
ft := UnpackEface(fn)
|
||||
if ft.Type.Kind() != reflect.Func {
|
||||
panic("not a function")
|
||||
}
|
||||
return *(*uintptr)(ft.Value)
|
||||
}
|
||||
55
vendor/github.com/bytedance/sonic/internal/rt/gcwb.go
generated
vendored
55
vendor/github.com/bytedance/sonic/internal/rt/gcwb.go
generated
vendored
@@ -1,5 +1,3 @@
|
||||
// +build go1.21,!go1.26
|
||||
|
||||
/*
|
||||
* Copyright 2021 ByteDance Inc.
|
||||
*
|
||||
@@ -19,18 +17,13 @@
|
||||
package rt
|
||||
|
||||
import (
|
||||
`os`
|
||||
`sync/atomic`
|
||||
`unsafe`
|
||||
|
||||
`golang.org/x/arch/x86/x86asm`
|
||||
)
|
||||
|
||||
//go:linkname GcWriteBarrier2 runtime.gcWriteBarrier2
|
||||
func GcWriteBarrier2()
|
||||
|
||||
//go:linkname RuntimeWriteBarrier runtime.writeBarrier
|
||||
var RuntimeWriteBarrier uintptr
|
||||
|
||||
const (
|
||||
_MaxInstr = 15
|
||||
)
|
||||
@@ -83,3 +76,49 @@ func GcwbAddr() uintptr {
|
||||
}
|
||||
}
|
||||
|
||||
// StopProfiling is used to stop traceback introduced by SIGPROF while native code is running.
|
||||
// WARN: this option is only a workaround for traceback issue (https://github.com/bytedance/sonic/issues/310),
|
||||
// and will be dropped when the issue is fixed.
|
||||
var StopProfiling = os.Getenv("SONIC_STOP_PROFILING") != ""
|
||||
|
||||
// WARN: must be aligned with runtime.Prof
|
||||
// type Prof struct {
|
||||
// signalLock uint32
|
||||
// hz int32
|
||||
// }
|
||||
|
||||
var (
|
||||
// // go:linkname runtimeProf runtime.prof
|
||||
// runtimeProf Prof
|
||||
|
||||
// count of native-C calls
|
||||
yieldCount uint32
|
||||
|
||||
// previous value of runtimeProf.hz
|
||||
oldHz int32
|
||||
)
|
||||
|
||||
//go:nosplit
|
||||
func MoreStack(size uintptr)
|
||||
|
||||
func StopProf()
|
||||
|
||||
// func StopProf() {
|
||||
// atomic.AddUint32(&yieldCount, 1)
|
||||
// if runtimeProf.hz != 0 {
|
||||
// oldHz = runtimeProf.hz
|
||||
// runtimeProf.hz = 0
|
||||
// }
|
||||
// }
|
||||
|
||||
func StartProf()
|
||||
|
||||
// func StartProf() {
|
||||
// atomic.AddUint32(&yieldCount, ^uint32(0))
|
||||
// if yieldCount == 0 && runtimeProf.hz == 0 {
|
||||
// if oldHz == 0 {
|
||||
// oldHz = 100
|
||||
// }
|
||||
// runtimeProf.hz = oldHz
|
||||
// }
|
||||
// }
|
||||
|
||||
Reference in New Issue
Block a user