处理AI胡乱生成的乱摊子

This commit is contained in:
2025-09-07 20:31:38 +08:00
parent c377a0784d
commit ba513e0827
202 changed files with 11751 additions and 67183 deletions

View File

@@ -1,4 +1,4 @@
// +build amd64,go1.15,!go1.21
// +build amd64,go1.16,!go1.22
/*
* Copyright 2022 ByteDance Inc.
@@ -87,7 +87,13 @@ func encodeBase64(src []byte) string {
func (self *Parser) decodeValue() (val types.JsonState) {
sv := (*rt.GoString)(unsafe.Pointer(&self.s))
self.p = native.Value(sv.Ptr, sv.Len, self.p, &val, 0)
flag := types.F_USE_NUMBER
if self.dbuf != nil {
flag = 0
val.Dbuf = self.dbuf
val.Dcap = types.MaxDigitNums
}
self.p = native.Value(sv.Ptr, sv.Len, self.p, &val, uint64(flag))
return
}
@@ -148,4 +154,4 @@ func (self *Searcher) GetByPath(path ...interface{}) (Node, error) {
return Node{}, self.parser.ExportError(err)
}
return newRawNode(self.parser.s[start:self.parser.p], t), nil
}
}

View File

@@ -1,4 +1,4 @@
// +build !amd64 go1.21
// +build !amd64 !go1.16 go1.22
/*
* Copyright 2022 ByteDance Inc.
@@ -27,6 +27,10 @@ import (
`github.com/bytedance/sonic/internal/rt`
)
func init() {
println("WARNING: sonic only supports Go1.16~1.20 && CPU amd64, but your environment is not suitable")
}
func quote(buf *[]byte, val string) {
quoteString(buf, val)
}
@@ -49,7 +53,7 @@ func encodeBase64(src []byte) string {
}
func (self *Parser) decodeValue() (val types.JsonState) {
e, v := decodeValue(self.s, self.p)
e, v := decodeValue(self.s, self.p, self.dbuf == nil)
if e < 0 {
return v
}

View File

@@ -220,7 +220,7 @@ func decodeFloat64(src string, pos int) (ret int, v float64, err error) {
return ret, v, nil
}
func decodeValue(src string, pos int) (ret int, v types.JsonState) {
func decodeValue(src string, pos int, skipnum bool) (ret int, v types.JsonState) {
pos = skipBlank(src, pos)
if pos < 0 {
return pos, types.JsonState{Vt: types.ValueType(pos)}
@@ -256,20 +256,30 @@ func decodeValue(src string, pos int) (ret int, v types.JsonState) {
}
return ret, types.JsonState{Vt: types.V_FALSE}
case '-', '+', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
var iv int64
ret, iv, _ = decodeInt64(src, pos)
if ret >= 0 {
return ret, types.JsonState{Vt: types.V_INTEGER, Iv: iv, Ep: pos}
} else if ret != -int(types.ERR_INVALID_NUMBER_FMT) {
return ret, types.JsonState{Vt: types.ValueType(ret)}
}
var fv float64
ret, fv, _ = decodeFloat64(src, pos)
if ret >= 0 {
return ret, types.JsonState{Vt: types.V_DOUBLE, Dv: fv, Ep: pos}
if skipnum {
ret = skipNumber(src, pos)
if ret >= 0 {
return ret, types.JsonState{Vt: types.V_DOUBLE, Iv: 0, Ep: pos}
} else {
return ret, types.JsonState{Vt: types.ValueType(ret)}
}
} else {
return ret, types.JsonState{Vt: types.ValueType(ret)}
var iv int64
ret, iv, _ = decodeInt64(src, pos)
if ret >= 0 {
return ret, types.JsonState{Vt: types.V_INTEGER, Iv: iv, Ep: pos}
} else if ret != -int(types.ERR_INVALID_NUMBER_FMT) {
return ret, types.JsonState{Vt: types.ValueType(ret)}
}
var fv float64
ret, fv, _ = decodeFloat64(src, pos)
if ret >= 0 {
return ret, types.JsonState{Vt: types.V_DOUBLE, Dv: fv, Ep: pos}
} else {
return ret, types.JsonState{Vt: types.ValueType(ret)}
}
}
default:
return -int(types.ERR_INVALID_CHAR), types.JsonState{Vt:-types.ValueType(types.ERR_INVALID_CHAR)}
}

View File

@@ -19,8 +19,6 @@ package ast
import (
`sync`
`unicode/utf8`
`github.com/bytedance/sonic/internal/rt`
)
const (
@@ -165,18 +163,18 @@ func (self *Node) encodeFalse(buf *[]byte) error {
}
func (self *Node) encodeNumber(buf *[]byte) error {
str := rt.StrFrom(self.p, self.v)
str := self.toString()
*buf = append(*buf, str...)
return nil
}
func (self *Node) encodeString(buf *[]byte) error {
if self.v == 0 {
if self.l == 0 {
*buf = append(*buf, '"', '"')
return nil
}
quote(buf, rt.StrFrom(self.p, self.v))
quote(buf, self.toString())
return nil
}
@@ -195,16 +193,28 @@ func (self *Node) encodeArray(buf *[]byte) error {
*buf = append(*buf, '[')
var p = (*Node)(self.p)
err := p.encode(buf)
if err != nil {
return err
var s = (*linkedNodes)(self.p)
var started bool
if nb > 0 {
n := s.At(0)
if n.Exists() {
if err := n.encode(buf); err != nil {
return err
}
started = true
}
}
for i := 1; i < nb; i++ {
*buf = append(*buf, ',')
p = p.unsafe_next()
err := p.encode(buf)
if err != nil {
n := s.At(i)
if !n.Exists() {
continue
}
if started {
*buf = append(*buf, ',')
}
started = true
if err := n.encode(buf); err != nil {
return err
}
}
@@ -240,20 +250,32 @@ func (self *Node) encodeObject(buf *[]byte) error {
*buf = append(*buf, '{')
var p = (*Pair)(self.p)
err := p.encode(buf)
if err != nil {
return err
var s = (*linkedPairs)(self.p)
var started bool
if nb > 0 {
n := s.At(0)
if n.Value.Exists() {
if err := n.encode(buf); err != nil {
return err
}
started = true
}
}
for i := 1; i < nb; i++ {
*buf = append(*buf, ',')
p = p.unsafe_next()
err := p.encode(buf)
if err != nil {
n := s.At(i)
if !n.Value.Exists() {
continue
}
if started {
*buf = append(*buf, ',')
}
started = true
if err := n.encode(buf); err != nil {
return err
}
}
*buf = append(*buf, '}')
return nil
}
}

View File

@@ -8,6 +8,33 @@ import (
`github.com/bytedance/sonic/internal/native/types`
)
func newError(err types.ParsingError, msg string) *Node {
return &Node{
t: V_ERROR,
l: uint(err),
p: unsafe.Pointer(&msg),
}
}
// Error returns error message if the node is invalid
func (self Node) Error() string {
if self.t != V_ERROR {
return ""
} else {
return *(*string)(self.p)
}
}
func newSyntaxError(err SyntaxError) *Node {
msg := err.Description()
return &Node{
t: V_ERROR,
l: uint(err.Code),
p: unsafe.Pointer(&msg),
}
}
func (self *Parser) syntaxError(err types.ParsingError) SyntaxError {
return SyntaxError{
Pos : self.p,
@@ -16,12 +43,17 @@ func (self *Parser) syntaxError(err types.ParsingError) SyntaxError {
}
}
func newSyntaxError(err SyntaxError) *Node {
msg := err.Description()
return &Node{
t: V_ERROR,
v: int64(err.Code),
p: unsafe.Pointer(&msg),
func unwrapError(err error) *Node {
if se, ok := err.(*Node); ok {
return se
}else if sse, ok := err.(Node); ok {
return &sse
} else {
msg := err.Error()
return &Node{
t: V_ERROR,
p: unsafe.Pointer(&msg),
}
}
}

View File

@@ -82,26 +82,54 @@ type ObjectIterator struct {
Iterator
}
func (self *ListIterator) next() *Node {
next_start:
if !self.HasNext() {
return nil
} else {
n := self.p.nodeAt(self.i)
self.i++
if !n.Exists() {
goto next_start
}
return n
}
}
// Next scans through children of underlying V_ARRAY,
// copies each child to v, and returns .HasNext().
func (self *ListIterator) Next(v *Node) bool {
if !self.HasNext() {
n := self.next()
if n == nil {
return false
}
*v = *n
return true
}
func (self *ObjectIterator) next() *Pair {
next_start:
if !self.HasNext() {
return nil
} else {
*v, self.i = *self.p.nodeAt(self.i), self.i + 1
return true
n := self.p.pairAt(self.i)
self.i++
if !n.Value.Exists() {
goto next_start
}
return n
}
}
// Next scans through children of underlying V_OBJECT,
// copies each child to v, and returns .HasNext().
func (self *ObjectIterator) Next(p *Pair) bool {
if !self.HasNext() {
n := self.next()
if n == nil {
return false
} else {
*p, self.i = *self.p.pairAt(self.i), self.i + 1
return true
}
*p = *n
return true
}
// Sequence represents scanning path of single-layer nodes.
@@ -129,36 +157,39 @@ type Scanner func(path Sequence, node *Node) bool
//
// Especailly, if the node is not V_ARRAY or V_OBJECT,
// the node itself will be returned and Sequence.Index == -1.
//
// NOTICE: A unsetted node WON'T trigger sc, but its index still counts into Path.Index
func (self *Node) ForEach(sc Scanner) error {
switch self.itype() {
case types.V_ARRAY:
ns, err := self.UnsafeArray()
iter, err := self.Values()
if err != nil {
return err
}
for i := range ns {
if !sc(Sequence{i, nil}, &ns[i]) {
return err
v := iter.next()
for v != nil {
if !sc(Sequence{iter.i-1, nil}, v) {
return nil
}
v = iter.next()
}
case types.V_OBJECT:
ns, err := self.UnsafeMap()
iter, err := self.Properties()
if err != nil {
return err
}
for i := range ns {
if !sc(Sequence{i, &ns[i].Key}, &ns[i].Value) {
return err
v := iter.next()
for v != nil {
if !sc(Sequence{iter.i-1, &v.Key}, &v.Value) {
return nil
}
v = iter.next()
}
default:
if self.Check() != nil {
return self
}
sc(Sequence{-1, nil}, self)
}
return self.Check()
return nil
}
type PairSlice []Pair
func (self PairSlice) Sort() {
radixQsort(self, 0, maxDepth(len(self)))
}

File diff suppressed because it is too large Load Diff

View File

@@ -18,11 +18,15 @@ package ast
import (
`fmt`
`github.com/bytedance/sonic/internal/native/types`
`github.com/bytedance/sonic/internal/rt`
)
const _DEFAULT_NODE_CAP int = 16
const (
_DEFAULT_NODE_CAP int = 8
_APPEND_GROW_SHIFT = 1
)
const (
_ERR_NOT_FOUND types.ParsingError = 33
@@ -30,7 +34,10 @@ const (
)
var (
// ErrNotExist means both key and value doesn't exist
ErrNotExist error = newError(_ERR_NOT_FOUND, "value not exists")
// ErrUnsupportType means API on the node is unsupported
ErrUnsupportType error = newError(_ERR_UNSUPPORT_TYPE, "unsupported type")
)
@@ -39,6 +46,7 @@ type Parser struct {
s string
noLazy bool
skipValue bool
dbuf *byte
}
/** Parser Private Methods **/
@@ -107,7 +115,7 @@ func (self *Parser) lspace(sp int) int {
return sp
}
func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
func (self *Parser) decodeArray(ret *linkedNodes) (Node, types.ParsingError) {
sp := self.p
ns := len(self.s)
@@ -119,7 +127,7 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
/* check for empty array */
if self.s[self.p] == ']' {
self.p++
return emptyArrayNode, 0
return Node{t: types.V_ARRAY}, 0
}
/* allocate array space and parse every element */
@@ -149,7 +157,7 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
}
/* add the value to result */
ret = append(ret, val)
ret.Add(val)
self.p = self.lspace(self.p)
/* check for EOF */
@@ -160,17 +168,17 @@ func (self *Parser) decodeArray(ret []Node) (Node, types.ParsingError) {
/* check for the next character */
switch self.s[self.p] {
case ',' : self.p++
case ']' : self.p++; return NewArray(ret), 0
default:
if val.isLazy() {
return newLazyArray(self, ret), 0
}
return Node{}, types.ERR_INVALID_CHAR
case ']' : self.p++; return newArray(ret), 0
default:
// if val.isLazy() {
// return newLazyArray(self, ret), 0
// }
return Node{}, types.ERR_INVALID_CHAR
}
}
}
func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
func (self *Parser) decodeObject(ret *linkedPairs) (Node, types.ParsingError) {
sp := self.p
ns := len(self.s)
@@ -182,7 +190,7 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
/* check for empty object */
if self.s[self.p] == '}' {
self.p++
return emptyObjectNode, 0
return Node{t: types.V_OBJECT}, 0
}
/* decode each pair */
@@ -235,7 +243,8 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
}
/* add the value to result */
ret = append(ret, Pair{Key: key, Value: val})
// FIXME: ret's address may change here, thus previous referred node in ret may be invalid !!
ret.Add(Pair{Key: key, Value: val})
self.p = self.lspace(self.p)
/* check for EOF */
@@ -246,11 +255,11 @@ func (self *Parser) decodeObject(ret []Pair) (Node, types.ParsingError) {
/* check for the next character */
switch self.s[self.p] {
case ',' : self.p++
case '}' : self.p++; return NewObject(ret), 0
case '}' : self.p++; return newObject(ret), 0
default:
if val.isLazy() {
return newLazyObject(self, ret), 0
}
// if val.isLazy() {
// return newLazyObject(self, ret), 0
// }
return Node{}, types.ERR_INVALID_CHAR
}
}
@@ -290,15 +299,23 @@ func (self *Parser) Parse() (Node, types.ParsingError) {
case types.V_FALSE : return falseNode, 0
case types.V_STRING : return self.decodeString(val.Iv, val.Ep)
case types.V_ARRAY:
if self.noLazy {
return self.decodeArray(make([]Node, 0, _DEFAULT_NODE_CAP))
if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == ']' {
self.p = p + 1
return Node{t: types.V_ARRAY}, 0
}
return newLazyArray(self, make([]Node, 0, _DEFAULT_NODE_CAP)), 0
if self.noLazy {
return self.decodeArray(new(linkedNodes))
}
return newLazyArray(self), 0
case types.V_OBJECT:
if self.noLazy {
return self.decodeObject(make([]Pair, 0, _DEFAULT_NODE_CAP))
if p := skipBlank(self.s, self.p); p >= self.p && self.s[p] == '}' {
self.p = p + 1
return Node{t: types.V_OBJECT}, 0
}
return newLazyObject(self, make([]Pair, 0, _DEFAULT_NODE_CAP)), 0
if self.noLazy {
return self.decodeObject(new(linkedPairs))
}
return newLazyObject(self), 0
case types.V_DOUBLE : return NewNumber(self.s[val.Ep:self.p]), 0
case types.V_INTEGER : return NewNumber(self.s[val.Ep:self.p]), 0
default : return Node{}, types.ParsingError(-val.Vt)
@@ -429,7 +446,7 @@ func (self *Node) skipNextNode() *Node {
}
parser, stack := self.getParserAndArrayStack()
ret := stack.v
ret := &stack.v
sp := parser.p
ns := len(parser.s)
@@ -458,7 +475,8 @@ func (self *Node) skipNextNode() *Node {
}
/* add the value to result */
ret = append(ret, val)
ret.Add(val)
self.l++
parser.p = parser.lspace(parser.p)
/* check for EOF */
@@ -470,12 +488,11 @@ func (self *Node) skipNextNode() *Node {
switch parser.s[parser.p] {
case ',':
parser.p++
self.setLazyArray(parser, ret)
return &ret[len(ret)-1]
return ret.At(ret.Len()-1)
case ']':
parser.p++
self.setArray(ret)
return &ret[len(ret)-1]
return ret.At(ret.Len()-1)
default:
return newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))
}
@@ -487,7 +504,7 @@ func (self *Node) skipNextPair() (*Pair) {
}
parser, stack := self.getParserAndObjectStack()
ret := stack.v
ret := &stack.v
sp := parser.p
ns := len(parser.s)
@@ -541,7 +558,8 @@ func (self *Node) skipNextPair() (*Pair) {
}
/* add the value to result */
ret = append(ret, Pair{Key: key, Value: val})
ret.Add(Pair{Key: key, Value: val})
self.l++
parser.p = parser.lspace(parser.p)
/* check for EOF */
@@ -553,12 +571,11 @@ func (self *Node) skipNextPair() (*Pair) {
switch parser.s[parser.p] {
case ',':
parser.p++
self.setLazyObject(parser, ret)
return &ret[len(ret)-1]
return ret.At(ret.Len()-1)
case '}':
parser.p++
self.setObject(ret)
return &ret[len(ret)-1]
return ret.At(ret.Len()-1)
default:
return &Pair{key, *newSyntaxError(parser.syntaxError(types.ERR_INVALID_CHAR))}
}
@@ -601,10 +618,30 @@ func LoadsUseNumber(src string) (int, interface{}, error) {
}
}
// NewParser returns pointer of new allocated parser
func NewParser(src string) *Parser {
return &Parser{s: src}
}
// NewParser returns new allocated parser
func NewParserObj(src string) Parser {
return Parser{s: src}
}
// decodeNumber controls if parser decodes the number values instead of skip them
// WARN: once you set decodeNumber(true), please set decodeNumber(false) before you drop the parser
// otherwise the memory CANNOT be reused
func (self *Parser) decodeNumber(decode bool) {
if !decode && self.dbuf != nil {
types.FreeDbuf(self.dbuf)
self.dbuf = nil
return
}
if decode && self.dbuf == nil {
self.dbuf = types.NewDbuf()
}
}
// ExportError converts types.ParsingError to std Error
func (self *Parser) ExportError(err types.ParsingError) error {
if err == _ERR_NOT_FOUND {
@@ -615,4 +652,4 @@ func (self *Parser) ExportError(err types.ParsingError) error {
Src : self.s,
Code: err,
}.Description())
}
}

View File

@@ -1,206 +0,0 @@
/*
* Copyright 2021 ByteDance Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ast
// Algorithm 3-way Radix Quicksort, d means the radix.
// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html
func radixQsort(kvs PairSlice, d, maxDepth int) {
for len(kvs) > 11 {
// To avoid the worst case of quickSort (time: O(n^2)), use introsort here.
// Reference: https://en.wikipedia.org/wiki/Introsort and
// https://github.com/golang/go/issues/467
if maxDepth == 0 {
heapSort(kvs, 0, len(kvs))
return
}
maxDepth--
p := pivot(kvs, d)
lt, i, gt := 0, 0, len(kvs)
for i < gt {
c := byteAt(kvs[i].Key, d)
if c < p {
swap(kvs, lt, i)
i++
lt++
} else if c > p {
gt--
swap(kvs, i, gt)
} else {
i++
}
}
// kvs[0:lt] < v = kvs[lt:gt] < kvs[gt:len(kvs)]
// Native implemention:
// radixQsort(kvs[:lt], d, maxDepth)
// if p > -1 {
// radixQsort(kvs[lt:gt], d+1, maxDepth)
// }
// radixQsort(kvs[gt:], d, maxDepth)
// Optimize as follows: make recursive calls only for the smaller parts.
// Reference: https://www.geeksforgeeks.org/quicksort-tail-call-optimization-reducing-worst-case-space-log-n/
if p == -1 {
if lt > len(kvs) - gt {
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else {
radixQsort(kvs[:lt], d, maxDepth)
kvs = kvs[gt:]
}
} else {
ml := maxThree(lt, gt-lt, len(kvs)-gt)
if ml == lt {
radixQsort(kvs[lt:gt], d+1, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else if ml == gt-lt {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[lt:gt]
d += 1
} else {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[lt:gt], d+1, maxDepth)
kvs = kvs[gt:]
}
}
}
insertRadixSort(kvs, d)
}
func insertRadixSort(kvs PairSlice, d int) {
for i := 1; i < len(kvs); i++ {
for j := i; j > 0 && lessFrom(kvs[j].Key, kvs[j-1].Key, d); j-- {
swap(kvs, j, j-1)
}
}
}
func pivot(kvs PairSlice, d int) int {
m := len(kvs) >> 1
if len(kvs) > 40 {
// Tukey's ``Ninther,'' median of three mediankvs of three.
t := len(kvs) / 8
return medianThree(
medianThree(byteAt(kvs[0].Key, d), byteAt(kvs[t].Key, d), byteAt(kvs[2*t].Key, d)),
medianThree(byteAt(kvs[m].Key, d), byteAt(kvs[m-t].Key, d), byteAt(kvs[m+t].Key, d)),
medianThree(byteAt(kvs[len(kvs)-1].Key, d),
byteAt(kvs[len(kvs)-1-t].Key, d),
byteAt(kvs[len(kvs)-1-2*t].Key, d)))
}
return medianThree(byteAt(kvs[0].Key, d), byteAt(kvs[m].Key, d), byteAt(kvs[len(kvs)-1].Key, d))
}
func medianThree(i, j, k int) int {
if i > j {
i, j = j, i
} // i < j
if k < i {
return i
}
if k > j {
return j
}
return k
}
func maxThree(i, j, k int) int {
max := i
if max < j {
max = j
}
if max < k {
max = k
}
return max
}
// maxDepth returns a threshold at which quicksort should switch
// to heapsort. It returnkvs 2*ceil(lg(n+1)).
func maxDepth(n int) int {
var depth int
for i := n; i > 0; i >>= 1 {
depth++
}
return depth * 2
}
// siftDown implements the heap property on kvs[lo:hi].
// first is an offset into the array where the root of the heap lies.
func siftDown(kvs PairSlice, lo, hi, first int) {
root := lo
for {
child := 2*root + 1
if child >= hi {
break
}
if child+1 < hi && kvs[first+child].Key < kvs[first+child+1].Key {
child++
}
if kvs[first+root].Key >= kvs[first+child].Key {
return
}
swap(kvs, first+root, first+child)
root = child
}
}
func heapSort(kvs PairSlice, a, b int) {
first := a
lo := 0
hi := b - a
// Build heap with the greatest element at top.
for i := (hi - 1) / 2; i >= 0; i-- {
siftDown(kvs, i, hi, first)
}
// Pop elements, the largest first, into end of kvs.
for i := hi - 1; i >= 0; i-- {
swap(kvs, first, first+i)
siftDown(kvs, lo, i, first)
}
}
// Note that Pair.Key is NOT pointed to Pair.m when map key is integer after swap
func swap(kvs PairSlice, a, b int) {
kvs[a].Key, kvs[b].Key = kvs[b].Key, kvs[a].Key
kvs[a].Value, kvs[b].Value = kvs[b].Value, kvs[a].Value
}
// Compare two strings from the pos d.
func lessFrom(a, b string, d int) bool {
l := len(a)
if l > len(b) {
l = len(b)
}
for i := d; i < l; i++ {
if a[i] == b[i] {
continue
}
return a[i] < b[i]
}
return len(a) < len(b)
}
func byteAt(b string, p int) int {
if p < len(b) {
return int(b[p])
}
return -1
}

View File

@@ -52,4 +52,4 @@ var (
)
//go:linkname unquoteBytes encoding/json.unquoteBytes
func unquoteBytes(s []byte) (t []byte, ok bool)
func unquoteBytes(s []byte) (t []byte, ok bool)