diff --git a/cli/upgrade_test.go b/cli/upgrade_test.go index d1499453e..249b7744b 100644 --- a/cli/upgrade_test.go +++ b/cli/upgrade_test.go @@ -3,7 +3,6 @@ package cli import ( "archive/tar" "compress/gzip" - "encoding/json" "fmt" "io" "io/ioutil" @@ -18,6 +17,7 @@ import ( "github.com/qiniu/log" "github.com/qiniu/logkit/utils" + "github.com/json-iterator/go" "github.com/labstack/echo" "github.com/stretchr/testify/assert" ) @@ -355,7 +355,7 @@ func (m *mockGithub) respFunction(c echo.Context, data map[string]interface{}) e c.Response().Header().Set(RateLimitReset, data[RateLimitReset].(string)) c.Response().Header().Set(RateLimitRemaining, data[RateLimitRemaining].(string)) c.Response().WriteHeader(data["statusCode"].(int)) - return json.NewEncoder(c.Response()).Encode(data["data"]) + return jsoniter.NewEncoder(c.Response()).Encode(data["data"]) } // 请求含有错误参数,该函数通过错误参数来构造不同的错误 diff --git a/mgr/api_metric_test.go b/mgr/api_metric_test.go index 686d6aac6..8fc670b4c 100644 --- a/mgr/api_metric_test.go +++ b/mgr/api_metric_test.go @@ -1,9 +1,9 @@ package mgr import ( - "encoding/json" "net/http" + "github.com/json-iterator/go" "github.com/qiniu/logkit/metric" "github.com/qiniu/logkit/utils" "github.com/stretchr/testify/assert" @@ -33,7 +33,7 @@ func metricAPITest(p *testParam) { respCode, respBody, err := makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got1); err != nil { + if err = jsoniter.Unmarshal(respBody, &got1); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, metric.GetMetricUsages(), got1.Data) @@ -43,7 +43,7 @@ func metricAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got2); err != nil { + if err = jsoniter.Unmarshal(respBody, &got2); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, metric.GetMetricOptions(), got2.Data) @@ -53,7 +53,7 @@ func metricAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got3); err != nil { + if err = jsoniter.Unmarshal(respBody, &got3); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, metric.GetMetricTypeKey(), got3.Data) diff --git a/mgr/api_parser_test.go b/mgr/api_parser_test.go index bee4dcc68..0bc423a35 100644 --- a/mgr/api_parser_test.go +++ b/mgr/api_parser_test.go @@ -1,9 +1,9 @@ package mgr import ( - "encoding/json" "net/http" + "github.com/json-iterator/go" conf2 "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/parser" "github.com/qiniu/logkit/sender" @@ -24,7 +24,7 @@ func parserParseTest(p *testParam) { rawConf := conf2.MapConf{} rawConf[KeySampleLog] = parser.SampleLogs[parser.TypeRaw] rawConf[parser.KeyParserType] = parser.TypeRaw - rawpst, err := json.Marshal(rawConf) + rawpst, err := jsoniter.Marshal(rawConf) assert.NoError(t, err) url := "http://127.0.0.1" + rs.address + "/logkit/parser/parse" respCode, respBody, err := makeRequest(url, http.MethodPost, rawpst) @@ -32,7 +32,7 @@ func parserParseTest(p *testParam) { assert.Equal(t, http.StatusOK, respCode) var got1 respParserRet - err = json.Unmarshal(respBody, &got1) + err = jsoniter.Unmarshal(respBody, &got1) assert.NoError(t, err, string(respBody)) assert.Equal(t, 4, len(got1.Data.SamplePoints)) @@ -41,13 +41,13 @@ func parserParseTest(p *testParam) { jsonConf := conf2.MapConf{} jsonConf[KeySampleLog] = parser.SampleLogs[parser.TypeJson] jsonConf[parser.KeyParserType] = parser.TypeJson - rawpst, err = json.Marshal(jsonConf) + rawpst, err = jsoniter.Marshal(jsonConf) assert.NoError(t, err) url = "http://127.0.0.1" + rs.address + "/logkit/parser/parse" respCode, respBody, err = makeRequest(url, http.MethodPost, rawpst) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - err = json.Unmarshal(respBody, &got2) + err = jsoniter.Unmarshal(respBody, &got2) if err != nil { t.Error(err) } @@ -64,13 +64,13 @@ func parserParseTest(p *testParam) { grokConf[KeySampleLog] = parser.SampleLogs[parser.TypeGrok] grokConf[parser.KeyParserType] = parser.TypeGrok grokConf[parser.KeyGrokPatterns] = "%{COMMON_LOG_FORMAT}" - rawpst, err = json.Marshal(grokConf) + rawpst, err = jsoniter.Marshal(grokConf) assert.NoError(t, err) url = "http://127.0.0.1" + rs.address + "/logkit/parser/parse" respCode, respBody, err = makeRequest(url, http.MethodPost, rawpst) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - err = json.Unmarshal(respBody, &got3) + err = jsoniter.Unmarshal(respBody, &got3) if err != nil { t.Error(err) } @@ -97,7 +97,7 @@ func parserAPITest(p *testParam) { respCode, respBody, err := makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got1); err != nil { + if err = jsoniter.Unmarshal(respBody, &got1); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, parser.ModeUsages, got1.Data) @@ -107,7 +107,7 @@ func parserAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got2); err != nil { + if err = jsoniter.Unmarshal(respBody, &got2); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, parser.ModeKeyOptions, got2.Data) @@ -117,7 +117,7 @@ func parserAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got3); err != nil { + if err = jsoniter.Unmarshal(respBody, &got3); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, parser.SampleLogs, got3.Data) diff --git a/mgr/api_reader_test.go b/mgr/api_reader_test.go index bb3fb813e..dc539786f 100644 --- a/mgr/api_reader_test.go +++ b/mgr/api_reader_test.go @@ -1,9 +1,9 @@ package mgr import ( - "encoding/json" "net/http" + "github.com/json-iterator/go" "github.com/qiniu/logkit/reader" "github.com/stretchr/testify/assert" ) @@ -18,7 +18,7 @@ func readerAPITest(p *testParam) { respCode, respBody, err := makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got1); err != nil { + if err = jsoniter.Unmarshal(respBody, &got1); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, reader.ModeUsages, got1.Data) @@ -28,7 +28,7 @@ func readerAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got2); err != nil { + if err = jsoniter.Unmarshal(respBody, &got2); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, reader.ModeKeyOptions, got2.Data) diff --git a/mgr/api_sender_test.go b/mgr/api_sender_test.go index ecafadc82..32f08b663 100644 --- a/mgr/api_sender_test.go +++ b/mgr/api_sender_test.go @@ -1,9 +1,9 @@ package mgr import ( - "encoding/json" "net/http" + "github.com/json-iterator/go" "github.com/qiniu/logkit/sender" "github.com/stretchr/testify/assert" ) @@ -18,7 +18,7 @@ func senderAPITest(p *testParam) { respCode, respBody, err := makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got1); err != nil { + if err = jsoniter.Unmarshal(respBody, &got1); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, sender.ModeUsages, got1.Data) @@ -28,7 +28,7 @@ func senderAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got2); err != nil { + if err = jsoniter.Unmarshal(respBody, &got2); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, sender.ModeKeyOptions, got2.Data) diff --git a/mgr/api_transformer.go b/mgr/api_transformer.go index 4f9c6711d..996ac627b 100644 --- a/mgr/api_transformer.go +++ b/mgr/api_transformer.go @@ -1,10 +1,10 @@ package mgr import ( - "encoding/json" "fmt" "net/http" + "github.com/json-iterator/go" "github.com/labstack/echo" "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/transforms" @@ -93,9 +93,9 @@ func (rs *RestService) PostTransform() echo.HandlerFunc { if rawLogs, ok = (reqConf[KeySampleLog]).(string); !ok { return RespError(c, http.StatusBadRequest, utils.ErrTransformTransform, fmt.Sprintf("missing param %s", KeySampleLog)) } - if jsonErr = json.Unmarshal([]byte(rawLogs), &singleData); jsonErr != nil { + if jsonErr = jsoniter.Unmarshal([]byte(rawLogs), &singleData); jsonErr != nil { // may be multiple sample logs - if jsonErr = json.Unmarshal([]byte(rawLogs), &data); jsonErr != nil { + if jsonErr = jsoniter.Unmarshal([]byte(rawLogs), &data); jsonErr != nil { // invalid JSON, neither multiple sample logs nor single sample log return RespError(c, http.StatusBadRequest, utils.ErrTransformTransform, jsonErr.Error()) } @@ -107,10 +107,10 @@ func (rs *RestService) PostTransform() echo.HandlerFunc { trans = create() reqConf = convertWebTransformerConfig(reqConf) delete(reqConf, KeySampleLog) - if bts, jsonErr = json.Marshal(reqConf); jsonErr != nil { + if bts, jsonErr = jsoniter.Marshal(reqConf); jsonErr != nil { return RespError(c, http.StatusBadRequest, utils.ErrTransformTransform, jsonErr.Error()) } - if jsonErr = json.Unmarshal(bts, trans); jsonErr != nil { + if jsonErr = jsoniter.Unmarshal(bts, trans); jsonErr != nil { return RespError(c, http.StatusBadRequest, utils.ErrTransformTransform, jsonErr.Error()) } if trans, ok := trans.(transforms.Initialize); ok { diff --git a/mgr/api_transformer_test.go b/mgr/api_transformer_test.go index 7326cfe46..6c634a15d 100644 --- a/mgr/api_transformer_test.go +++ b/mgr/api_transformer_test.go @@ -1,9 +1,9 @@ package mgr import ( - "encoding/json" "net/http" + "github.com/json-iterator/go" "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/transforms" _ "github.com/qiniu/logkit/transforms/date" @@ -25,7 +25,7 @@ func transformerAPITest(p *testParam) { respCode, respBody, err := makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got1); err != nil { + if err = jsoniter.Unmarshal(respBody, &got1); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, len(transforms.Transformers), len(got1.Data)) @@ -35,7 +35,7 @@ func transformerAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got2); err != nil { + if err = jsoniter.Unmarshal(respBody, &got2); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } assert.Equal(t, len(transforms.Transformers), len(got2.Data)) @@ -54,7 +54,7 @@ func transformerAPITest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodPost, []byte(dateTransformerConfig)) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - if err = json.Unmarshal(respBody, &got3); err != nil { + if err = jsoniter.Unmarshal(respBody, &got3); err != nil { t.Fatalf("respBody %v unmarshal failed, error is %v", respBody, err) } exp := []sender.Data{{"ts": "2006-01-02T14:04:05Z"}} diff --git a/mgr/cluster.go b/mgr/cluster.go index 105bed2ce..923403963 100644 --- a/mgr/cluster.go +++ b/mgr/cluster.go @@ -1,23 +1,19 @@ package mgr import ( + "bytes" "errors" + "fmt" + "io/ioutil" "net/http" + "strings" "sync" - - "github.com/qiniu/logkit/utils" - - "bytes" - "encoding/json" - "io/ioutil" - - "fmt" "time" - "strings" - + "github.com/json-iterator/go" "github.com/labstack/echo" "github.com/qiniu/log" + "github.com/qiniu/logkit/utils" ) type ClusterConfig struct { @@ -45,13 +41,13 @@ type Slave struct { type ClusterStatus struct { Status map[string]RunnerStatus `json:"status"` Tag string `json:"tag"` - Err error `json:"error"` + Err string `json:"error"` } type SlaveConfig struct { Configs map[string]RunnerConfig `json:"configs"` Tag string `json:"tag"` - Err error `json:"error"` + Err string `json:"error"` } type respRunnersNameList struct { @@ -210,7 +206,7 @@ func (rs *RestService) GetClusterRunners() echo.HandlerFunc { log.Errorf("get slave(tag='%v', url='%v') runner name list failed, resp is %v, error is %v", v.Tag, v.Url, string(respBody), err.Error()) return } else { - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { log.Errorf("unmarshal slave(tag='%v', url='%v') runner name list failed, error is %v", v.Tag, v.Url, err.Error()) } else { mutex.Lock() @@ -258,10 +254,10 @@ func (rs *RestService) ClusterStatus() echo.HandlerFunc { respCode, respBody, err := executeToOneCluster(url, http.MethodGet, []byte{}) if err != nil || respCode != http.StatusOK { errInfo := fmt.Errorf("%v %v", string(respBody), err) - cs.Err = errInfo + cs.Err = errInfo.Error() } else { - if err = json.Unmarshal(respBody, &respRss); err != nil { - cs.Err = fmt.Errorf("unmarshal query result error %v, body is %v", err, string(respBody)) + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { + cs.Err = fmt.Sprintf("unmarshal query result error %v, body is %v", err, string(respBody)) } else { cs.Status = respRss.Data } @@ -301,7 +297,7 @@ func (rs *RestService) GetClusterConfig() echo.HandlerFunc { lastErrMsg = fmt.Sprintf("get slave(tag = '%v'', url = '%v') config failed resp is %v, error is %v", tag, url, string(respBody), err) continue } else { - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { lastErrMsg = fmt.Sprintf("get slave(tag = '%v'', url = '%v') config unmarshal failed, resp is %v, error is %v", tag, url, string(respBody), err) continue } else { @@ -346,10 +342,10 @@ func (rs *RestService) GetClusterConfigs() echo.HandlerFunc { respCode, respBody, err := executeToOneCluster(url, http.MethodGet, []byte{}) if err != nil || respCode != http.StatusOK { errInfo := fmt.Errorf("%v %v", string(respBody), err) - sc.Err = errInfo + sc.Err = errInfo.Error() } else { - if err = json.Unmarshal(respBody, &respRss); err != nil { - sc.Err = fmt.Errorf("unmarshal query result error %v, body is %v", err, string(respBody)) + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { + sc.Err = fmt.Sprintf("unmarshal query result error %v, body is %v", err, string(respBody)) } else { sc.Configs = respRss.Data } @@ -714,7 +710,7 @@ func registerOne(master, myhost, tag string) error { return errors.New("master host is not configed") } req := RegisterReq{Url: myhost, Tag: tag} - data, err := json.Marshal(req) + data, err := jsoniter.Marshal(req) if err != nil { return err } diff --git a/mgr/cluster_test.go b/mgr/cluster_test.go index c461f8a39..c7e91f14c 100644 --- a/mgr/cluster_test.go +++ b/mgr/cluster_test.go @@ -2,7 +2,6 @@ package mgr import ( "bufio" - "encoding/json" "io" "net/http" "os" @@ -11,7 +10,11 @@ import ( "testing" "time" + "encoding/json" + + "github.com/json-iterator/go" "github.com/labstack/echo" + "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/reader" "github.com/stretchr/testify/assert" ) @@ -190,7 +193,7 @@ func clusterUpdateTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respGotStatus respClusterStatus - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus := respGotStatus.Data @@ -259,7 +262,7 @@ func clusterUpdateTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotStatus = respClusterStatus{} - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus = respGotStatus.Data @@ -323,7 +326,7 @@ func clusterStartStopTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respGotStatus respClusterStatus - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus := respGotStatus.Data @@ -379,7 +382,7 @@ func clusterStartStopTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotStatus = respClusterStatus{} - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus = respGotStatus.Data @@ -437,7 +440,7 @@ func clusterStartStopTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotStatus = respClusterStatus{} - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus = respGotStatus.Data @@ -477,7 +480,7 @@ func clusterStartStopTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotStatus = respClusterStatus{} - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus = respGotStatus.Data @@ -536,7 +539,7 @@ func clusterStartStopTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotStatus = respClusterStatus{} - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus = respGotStatus.Data @@ -647,7 +650,7 @@ func clusterResetDeleteTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotStatus := respClusterStatus{} - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus := respGotStatus.Data @@ -673,7 +676,7 @@ func clusterResetDeleteTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotStatus = respClusterStatus{} - err = json.Unmarshal(respBody, &respGotStatus) + err = jsoniter.Unmarshal(respBody, &respGotStatus) assert.NoError(t, err, string(respBody)) gotStatus = respGotStatus.Data @@ -723,12 +726,13 @@ func clusterSalveConfigsTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotConfigs := respSlaveConfig{} - err = json.Unmarshal(respBody, &respGotConfigs) + err = jsoniter.Unmarshal(respBody, &respGotConfigs) + assert.NoError(t, err, string(respBody)) gotConfigs := respGotConfigs.Data rc := RunnerConfig{} - err = json.Unmarshal([]byte(runnerConf), &rc) + err = jsoniter.Unmarshal(runnerConf, &rc) assert.NoError(t, err) sc, ok := gotConfigs[rs[1].cluster.Address] @@ -755,7 +759,7 @@ func clusterSalveConfigsTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotConfigs = respSlaveConfig{} - err = json.Unmarshal(respBody, &respGotConfigs) + err = jsoniter.Unmarshal(respBody, &respGotConfigs) assert.NoError(t, err, string(respBody)) gotConfigs = respGotConfigs.Data assert.Equal(t, logkitCount-1, len(gotConfigs)) @@ -766,12 +770,12 @@ func clusterSalveConfigsTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotConfigs = respSlaveConfig{} - err = json.Unmarshal(respBody, &respGotConfigs) + err = jsoniter.Unmarshal(respBody, &respGotConfigs) assert.NoError(t, err, string(respBody)) gotConfigs = respGotConfigs.Data rc = RunnerConfig{} - err = json.Unmarshal([]byte(runnerConf), &rc) + err = jsoniter.Unmarshal([]byte(runnerConf), &rc) assert.NoError(t, err) sc, ok = gotConfigs[rs[3].cluster.Address] @@ -801,7 +805,7 @@ func changeTagsTest(p *testCluParam) { // 测试通过 master 改变 slave tag req := TagReq{Tag: "test-test"} - marshaled, err := json.Marshal(req) + marshaled, err := jsoniter.Marshal(req) assert.NoError(t, err) url := rs[0].cluster.Address + "/logkit/cluster/slaves/tag?tag=" + rs[1].cluster.Tag respCode, respBody, err := makeRequest(url, http.MethodPost, marshaled) @@ -818,7 +822,7 @@ func changeTagsTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respGetSlaves respSlave - err = json.Unmarshal(respBody, &respGetSlaves) + err = jsoniter.Unmarshal(respBody, &respGetSlaves) assert.NoError(t, err) getSlaves := respGetSlaves.Data for i, _ := range getSlaves { @@ -840,7 +844,7 @@ func changeTagsTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGetSlaves = respSlave{} - err = json.Unmarshal(respBody, &respGetSlaves) + err = jsoniter.Unmarshal(respBody, &respGetSlaves) assert.NoError(t, err) getSlaves = respGetSlaves.Data for i := range getSlaves { @@ -873,7 +877,7 @@ func clusterSlavesDeleteTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respGetSlaves respSlave - err = json.Unmarshal(respBody, &respGetSlaves) + err = jsoniter.Unmarshal(respBody, &respGetSlaves) assert.NoError(t, err) getSlaves := respGetSlaves.Data for i, _ := range getSlaves { @@ -903,7 +907,7 @@ func clusterSlavesDeleteTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGetSlaves = respSlave{} - err = json.Unmarshal(respBody, &respGetSlaves) + err = jsoniter.Unmarshal(respBody, &respGetSlaves) assert.NoError(t, err) getSlaves = respGetSlaves.Data for i := range getSlaves { @@ -952,7 +956,7 @@ func getSlavesRunnerTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respRss respRunnersNameList - err = json.Unmarshal(respBody, &respRss) + err = jsoniter.Unmarshal(respBody, &respRss) assert.NoError(t, err, string(respBody)) nameList := respRss.Data isExist := false @@ -969,7 +973,7 @@ func getSlavesRunnerTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respRss = respRunnersNameList{} - err = json.Unmarshal(respBody, &respRss) + err = jsoniter.Unmarshal(respBody, &respRss) assert.NoError(t, err, string(respBody)) nameList = respRss.Data isExist = false @@ -1021,13 +1025,13 @@ func getSlaveConfigTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respRss respRunnerConfig - err = json.Unmarshal(respBody, &respRss) + err = jsoniter.Unmarshal(respBody, &respRss) assert.NoError(t, err, string(respBody)) respConfig := respRss.Data respConfig.CreateTime = "" expConfig := RunnerConfig{} - err = json.Unmarshal([]byte(runnerConf), &expConfig) + err = jsoniter.Unmarshal([]byte(runnerConf), &expConfig) assert.NoError(t, err, runnerConf) expConfig.ReaderConfig["name"] = runnerName @@ -1044,3 +1048,50 @@ func getSlaveConfigTest(p *testCluParam) { assert.NoError(t, err, string(respBody)) assert.NotEqual(t, http.StatusOK, respCode) } + +func TestJsoniterMashalUnmashal(t *testing.T) { + runnerConf := RunnerConfig{ + RunnerInfo: RunnerInfo{ + RunnerName: "xxx", + MaxBatchLen: 1, + MaxBatchSize: 200, + CollectInterval: 1, + MaxBatchInteval: 1, + MaxBatchTryTimes: 3, + }, + ReaderConfig: conf.MapConf{ + "log_path": "sx", + "meta_path": "/xs/xs", + "mode": "dir", + "read_from": "oldest", + "ignore_hidden": "true", + }, + ParserConf: conf.MapConf{ + "type": "json", + "name": "json_parser", + }, + SenderConfig: []conf.MapConf{{ + "name": "file_sender", + "sender_type": "file", + "file_send_path": "/xsxs", + }}, + } + bt, err := jsoniter.Marshal(runnerConf) + assert.NoError(t, err) + var rc RunnerConfig + err = jsoniter.Unmarshal(bt, &rc) + assert.NoError(t, err) + assert.Equal(t, runnerConf, rc) + assert.Equal(t, "/xsxs", rc.SenderConfig[0]["file_send_path"]) +} + +func TestJsoniter(t *testing.T) { + respGotConfigs1, respGotConfigs2 := respSlaveConfig{}, respSlaveConfig{} + var teststring = `{"code":"L200","data":{"http://192.168.0.106:6202":{"configs":{"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/slave1/confs/clusterSalveConfigsTest.conf":{"name":"clusterSalveConfigsTest","collect_interval":1,"batch_len":1,"batch_size":200,"batch_interval":1,"batch_try_times":3,"createtime":"2018-01-03T22:25:36.497442704+08:00","reader":{"ignore_hidden":"true","log_path":"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/clusterSalveConfigsTestDir/logdir","meta_path":"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/clusterSalveConfigsTestDir/meta","mode":"dir","name":"clusterSalveConfigsTest","read_from":"oldest","runner_name":"clusterSalveConfigsTest"},"parser":{"name":"json_parser","runner_name":"clusterSalveConfigsTest","type":"json"},"senders":[{"file_send_path":"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/clusterSalveConfigsTestDir/sender/sendData","name":"file_sender","runner_name":"clusterSalveConfigsTest","sender_type":"file"}],"router":{"router_key_name":"","router_match_type":"","router_default_sender":0,"router_routes":null},"web_folder":true}},"tag":"test","error":null},"http://192.168.0.106:6203":{"configs":{"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/slave2/confs/clusterSalveConfigsTest.conf":{"name":"clusterSalveConfigsTest","collect_interval":1,"batch_len":1,"batch_size":200,"batch_interval":1,"batch_try_times":3,"createtime":"2018-01-03T22:25:36.497453622+08:00","reader":{"ignore_hidden":"true","log_path":"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/clusterSalveConfigsTestDir/logdir","meta_path":"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/clusterSalveConfigsTestDir/meta","mode":"dir","name":"clusterSalveConfigsTest","read_from":"oldest","runner_name":"clusterSalveConfigsTest"},"parser":{"name":"json_parser","runner_name":"clusterSalveConfigsTest","type":"json"},"senders":[{"file_send_path":"/Users/sunjianbo/gopath/src/github.com/qiniu/logkit/mgr/testClusterRest/clusterSalveConfigsTestDir/sender/sendData","name":"file_sender","runner_name":"clusterSalveConfigsTest","sender_type":"file"}],"router":{"router_key_name":"","router_match_type":"","router_default_sender":0,"router_routes":null},"web_folder":true}},"tag":"test","error":null}}}` + err := json.Unmarshal([]byte(teststring), &respGotConfigs1) + assert.NoError(t, err) + stjson := jsoniter.ConfigCompatibleWithStandardLibrary + err = stjson.Unmarshal([]byte(teststring), &respGotConfigs2) + assert.NoError(t, err) + assert.Equal(t, respGotConfigs1, respGotConfigs2) +} diff --git a/mgr/metric_runner.go b/mgr/metric_runner.go index 9ade23095..3ed3ec8f1 100644 --- a/mgr/metric_runner.go +++ b/mgr/metric_runner.go @@ -1,7 +1,6 @@ package mgr import ( - "encoding/json" "errors" "fmt" "strings" @@ -9,6 +8,7 @@ import ( "sync/atomic" "time" + "github.com/json-iterator/go" "github.com/qiniu/log" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/metric" @@ -85,11 +85,11 @@ func NewMetricRunner(rc RunnerConfig, sr *sender.SenderRegistry) (runner *Metric err = nil continue } - configBytes, err := json.Marshal(m.Config) + configBytes, err := jsoniter.Marshal(m.Config) if err != nil { return nil, fmt.Errorf("metric %v marshal config error %v", tp, err) } - err = json.Unmarshal(configBytes, c) + err = jsoniter.Unmarshal(configBytes, c) if err != nil { return nil, fmt.Errorf("metric %v unmarshal config error %v", tp, err) } @@ -463,11 +463,11 @@ func createDiscardTransformer(key string) (transforms.Transformer, error) { "stage": "after_parser", } trans := creater() - bts, err := json.Marshal(tConf) + bts, err := jsoniter.Marshal(tConf) if err != nil { return nil, fmt.Errorf("type %v of transformer marshal config error %v", strTP, err) } - err = json.Unmarshal(bts, trans) + err = jsoniter.Unmarshal(bts, trans) if err != nil { return nil, fmt.Errorf("type %v of transformer unmarshal config error %v", strTP, err) } diff --git a/mgr/metric_runner_test.go b/mgr/metric_runner_test.go index b5923be31..8efcf3e1f 100644 --- a/mgr/metric_runner_test.go +++ b/mgr/metric_runner_test.go @@ -2,7 +2,6 @@ package mgr import ( "bufio" - "encoding/json" "io" "log" "net/http" @@ -11,6 +10,7 @@ import ( "testing" "time" + "github.com/json-iterator/go" "github.com/labstack/echo" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/metric/system" @@ -35,7 +35,7 @@ func getMetricRunnerConfig(name string, mc []MetricConfig, senderPath string) ([ "file_send_path": senderPath, }}, } - return json.Marshal(runnerConf) + return jsoniter.Marshal(runnerConf) } func TestMetricRunner(t *testing.T) { @@ -149,7 +149,7 @@ func metricRunTest(p *testParam) { break } result := make([]map[string]interface{}, 0) - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("metricRunTest error unmarshal %v curLine = %v %v", string(str), curLine, err) } @@ -206,7 +206,7 @@ func metricRunTest(p *testParam) { break } result := make([]map[string]interface{}, 0) - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("metricRunTest error unmarshal %v curLine = %v %v", string(str), curLine, err) } @@ -270,7 +270,7 @@ func metricNetTest(p *testParam) { break } curLine++ - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("metricNetTest error unmarshal %v curLine = %v %v", string(str), curLine, err) } @@ -335,7 +335,7 @@ func metricDiskioTest(p *testParam) { break } curLine++ - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("metricDiskioTest error unmarshal %v curLine = %v %v", string(str), curLine, err) } @@ -383,7 +383,7 @@ func metricDiskioTest(p *testParam) { break } curLine++ - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("metricDiskioTest error unmarshal %v curLine = %v %v", string(str), curLine, err) } diff --git a/mgr/mgr.go b/mgr/mgr.go index 377ef9b09..3483cf318 100644 --- a/mgr/mgr.go +++ b/mgr/mgr.go @@ -506,13 +506,5 @@ func (m *Manager) Configs() (rss map[string]RunnerConfig) { tmpRss[k] = v } deepCopy(&rss, &tmpRss) - //if tmpRssByte, err = json.Marshal(tmpRss); err != nil { - // log.Debugf("runner configs marshal error %v", err) - // return tmpRss - //} - //if err = json.Unmarshal(tmpRssByte, &rss); err != nil { - // log.Debugf("runner configs unmarshal error %v", err) - // return tmpRss - //} return } diff --git a/mgr/rest.go b/mgr/rest.go index 9dc771274..73efaaaea 100644 --- a/mgr/rest.go +++ b/mgr/rest.go @@ -2,7 +2,7 @@ package mgr import ( "encoding/base64" - "encoding/json" + "errors" "fmt" "io/ioutil" "net" @@ -11,12 +11,10 @@ import ( "path/filepath" "strconv" "strings" - "time" - "sync" + "time" - "errors" - + "github.com/json-iterator/go" "github.com/labstack/echo" "github.com/qiniu/log" "github.com/qiniu/logkit/conf" @@ -294,7 +292,7 @@ func convertWebTransformerConfig(conf map[string]interface{}) map[string]interfa } func (rs *RestService) backupRunnerConfig(rconf interface{}, filename string) error { - confBytes, err := json.MarshalIndent(rconf, "", " ") + confBytes, err := jsoniter.MarshalIndent(rconf, "", " ") if err != nil { return fmt.Errorf("runner config %v marshal failed, err is %v", rconf, err) } diff --git a/mgr/rest_test.go b/mgr/rest_test.go index 159bb492d..c67d4d999 100644 --- a/mgr/rest_test.go +++ b/mgr/rest_test.go @@ -3,7 +3,6 @@ package mgr import ( "bufio" "bytes" - "encoding/json" "fmt" "io" "io/ioutil" @@ -18,6 +17,7 @@ import ( "testing" "time" + "github.com/json-iterator/go" "github.com/labstack/echo" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/parser" @@ -80,7 +80,7 @@ func getRunnerConfig(name, logPath, metaPath, mode, senderPath string) ([]byte, "file_send_path": senderPath, }}, } - return json.Marshal(runnerConf) + return jsoniter.Marshal(runnerConf) } func getRunnerStatus(rn, lp, rs string, rdc, rds, pe, ps, se, ss int64) map[string]RunnerStatus { @@ -342,7 +342,7 @@ func restGetStatusTest(p *testParam) { } rss := make(map[string]RunnerStatus) var respRss respRunnerStatus - err = json.Unmarshal([]byte(out.String()), &respRss) + err = jsoniter.Unmarshal([]byte(out.String()), &respRss) assert.NoError(t, err, out.String()) rss = respRss.Data exp := getRunnerStatus(runnerName, logDir, RunnerRunning, 1, 29, 0, 1, 0, 1) @@ -407,7 +407,7 @@ func restCRUDTest(p *testParam) { var expconf1, got1 RunnerConfig var respGot1 respRunnerConfig - err = json.Unmarshal([]byte(conf1), &expconf1) + err = jsoniter.Unmarshal([]byte(conf1), &expconf1) assert.NoError(t, err) expconf1.ReaderConfig[utils.GlobalKeyName] = expconf1.RunnerName expconf1.ReaderConfig[reader.KeyRunnerName] = expconf1.RunnerName @@ -421,7 +421,7 @@ func restCRUDTest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - err = json.Unmarshal(respBody, &respGot1) + err = jsoniter.Unmarshal(respBody, &respGot1) if err != nil { fmt.Println(string(respBody)) t.Error(err) @@ -434,7 +434,7 @@ func restCRUDTest(p *testParam) { var expconf2, got2 RunnerConfig var respGot2 respRunnerConfig - err = json.Unmarshal([]byte(conf2), &expconf2) + err = jsoniter.Unmarshal([]byte(conf2), &expconf2) assert.NoError(t, err) expconf2.ReaderConfig[utils.GlobalKeyName] = expconf2.RunnerName @@ -461,7 +461,7 @@ func restCRUDTest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) - err = json.Unmarshal(respBody, &respGot2) + err = jsoniter.Unmarshal(respBody, &respGot2) assert.NoError(t, err) got2 = respGot2.Data got2.CreateTime = "" @@ -473,7 +473,7 @@ func restCRUDTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respGotLists respRunnerConfigs - err = json.Unmarshal(respBody, &respGotLists) + err = jsoniter.Unmarshal(respBody, &respGotLists) assert.NoError(t, err) gotLists := make(map[string]RunnerConfig) gotLists = respGotLists.Data @@ -503,7 +503,7 @@ func restCRUDTest(p *testParam) { assert.Equal(t, http.StatusOK, respCode) var gotUpdate RunnerConfig var respGotUpdate respRunnerConfig - err = json.Unmarshal(respBody, &respGotUpdate) + err = jsoniter.Unmarshal(respBody, &respGotUpdate) assert.NoError(t, err) gotUpdate = respGotUpdate.Data assert.Equal(t, mode, gotUpdate.ReaderConfig["mode"]) @@ -521,7 +521,7 @@ func restCRUDTest(p *testParam) { respCode, respBody, err = makeRequest(url, http.MethodGet, []byte{}) assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusBadRequest, respCode) - err = json.Unmarshal(respBody, &respGot2) + err = jsoniter.Unmarshal(respBody, &respGot2) assert.NoError(t, err) got2 = respGot2.Data got2.CreateTime = "" @@ -532,7 +532,7 @@ func restCRUDTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respGotLists = respRunnerConfigs{} - err = json.Unmarshal(respBody, &respGotLists) + err = jsoniter.Unmarshal(respBody, &respGotLists) assert.NoError(t, err) gotLists = respGotLists.Data _, ex := gotLists[rs.mgr.RestDir+"/"+runnerName1+".conf"] @@ -577,7 +577,7 @@ func runnerResetTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respRss := respRunnerStatus{} - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { t.Fatalf("status unmarshal failed error is %v, respBody is %v", err, string(respBody)) } rss := respRss.Data @@ -597,7 +597,7 @@ func runnerResetTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respRss = respRunnerStatus{} - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { t.Fatalf("status unmarshal failed error is %v, respBody is %v", err, string(respBody)) } rss = respRss.Data @@ -644,7 +644,7 @@ func runnerStopStartTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respRss := respRunnerStatus{} - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { t.Fatalf("status unmarshal failed error is %v, respBody is %v", err, string(respBody)) } rss := respRss.Data @@ -665,7 +665,7 @@ func runnerStopStartTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respRss = respRunnerStatus{} - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { t.Fatalf("status unmarshal failed error is %v, respBody is %v", err, string(respBody)) } rss = respRss.Data @@ -688,7 +688,7 @@ func runnerStopStartTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respRss = respRunnerStatus{} - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { t.Fatalf("status unmarshal failed error is %v, respBody is %v", err, string(respBody)) } rss = respRss.Data @@ -778,7 +778,7 @@ func runnerDataIntegrityTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) respRss := respRunnerStatus{} - if err = json.Unmarshal(respBody, &respRss); err != nil { + if err = jsoniter.Unmarshal(respBody, &respRss); err != nil { t.Fatalf("status unmarshal failed error is %v, respBody is %v", err, string(respBody)) } rss := respRss.Data @@ -796,7 +796,7 @@ func runnerDataIntegrityTest(p *testParam) { if c == io.EOF { break } - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("Test_Run error unmarshal result curLine = %v %v", curLine, err) } @@ -830,7 +830,7 @@ func getErrorCodeTest(p *testParam) { assert.NoError(t, err) assert.Equal(t, http.StatusOK, respCode) respCodeMap := respErrorCode{} - err = json.Unmarshal(respBody, &respCodeMap) + err = jsoniter.Unmarshal(respBody, &respCodeMap) assert.NoError(t, err) codeMap := respCodeMap.Data assert.Equal(t, len(utils.ErrorCodeHumanize), len(codeMap)) @@ -886,7 +886,7 @@ func getRunnersTest(p *testParam) { assert.NoError(t, err, string(respBody)) assert.Equal(t, http.StatusOK, respCode) var respRunner respRunnersNameList - err = json.Unmarshal(respBody, &respRunner) + err = jsoniter.Unmarshal(respBody, &respRunner) assert.NoError(t, err) runnerNameList := respRunner.Data runnerExist := 0 @@ -909,7 +909,7 @@ func getRunnersTest(p *testParam) { assert.Equal(t, http.StatusOK, respCode) respRunner = respRunnersNameList{} - err = json.Unmarshal(respBody, &respRunner) + err = jsoniter.Unmarshal(respBody, &respRunner) assert.NoError(t, err) runnerNameList = respRunner.Data runnerExist = 0 @@ -964,7 +964,7 @@ func senderRouterTest(p *testParam) { t.Fatalf("get runner config failed, error is %v", err) } runnerConf := RunnerConfig{} - err = json.Unmarshal(runnerConfBytes, &runnerConf) + err = jsoniter.Unmarshal(runnerConfBytes, &runnerConf) assert.NoError(t, err) runnerConf.SenderConfig = []conf.MapConf{ conf.MapConf{ @@ -994,7 +994,7 @@ func senderRouterTest(p *testParam) { }, } - runnerConfBytes, err = json.Marshal(runnerConf) + runnerConfBytes, err = jsoniter.Marshal(runnerConf) assert.NoError(t, err) url := "http://127.0.0.1" + rs.address + "/logkit/configs/" + runnerName respCode, respBody, err := makeRequest(url, http.MethodPost, runnerConfBytes) @@ -1013,7 +1013,7 @@ func senderRouterTest(p *testParam) { if c == io.EOF { break } - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("TestSenderRouter error unmarshal result curLine = %v %v", dataCnt, err) } @@ -1032,7 +1032,7 @@ func senderRouterTest(p *testParam) { if c == io.EOF { break } - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("TestSenderRouter error unmarshal result curLine = %v %v", dataCnt, err) } @@ -1051,7 +1051,7 @@ func senderRouterTest(p *testParam) { if c == io.EOF { break } - err = json.Unmarshal([]byte(str), &result) + err = jsoniter.Unmarshal([]byte(str), &result) if err != nil { log.Fatalf("TestSenderRouter error unmarshal result curLine = %v %v", dataCnt, err) } diff --git a/mgr/runner.go b/mgr/runner.go index cc2328c78..e5d6dc549 100644 --- a/mgr/runner.go +++ b/mgr/runner.go @@ -1,7 +1,6 @@ package mgr import ( - "encoding/json" "errors" "fmt" "io" @@ -14,6 +13,7 @@ import ( "sync/atomic" "time" + jsoniter "github.com/json-iterator/go" "github.com/qiniu/log" "github.com/qiniu/logkit/cleaner" "github.com/qiniu/logkit/conf" @@ -308,12 +308,12 @@ func createTransformers(rc RunnerConfig) []transforms.Transformer { continue } trans := creater() - bts, err := json.Marshal(tConf) + bts, err := jsoniter.Marshal(tConf) if err != nil { log.Errorf("type %v of transformer marshal config error %v", strTP, err) continue } - err = json.Unmarshal(bts, trans) + err = jsoniter.Unmarshal(bts, trans) if err != nil { log.Errorf("type %v of transformer unmarshal config error %v", strTP, err) continue @@ -813,11 +813,11 @@ func calcSpeedTrend(old, new utils.StatsInfo, elaspedtime float64) (speed float6 func deepCopy(dst, src interface{}) { var err error var confByte []byte - if confByte, err = json.Marshal(src); err != nil { + if confByte, err = jsoniter.Marshal(src); err != nil { log.Debugf("runner config marshal error %v", err) dst = src } - if err = json.Unmarshal(confByte, dst); err != nil { + if err = jsoniter.Unmarshal(confByte, dst); err != nil { log.Debugf("runner config unmarshal error %v", err) dst = src } diff --git a/mgr/runner_test.go b/mgr/runner_test.go index 6f8b5f1da..66bf9be68 100644 --- a/mgr/runner_test.go +++ b/mgr/runner_test.go @@ -1,10 +1,11 @@ package mgr import ( - "encoding/json" "io/ioutil" + "log/syslog" "os" "path/filepath" + "strings" "testing" "time" @@ -14,13 +15,10 @@ import ( "github.com/qiniu/logkit/reader" "github.com/qiniu/logkit/sender" _ "github.com/qiniu/logkit/transforms/all" + "github.com/qiniu/logkit/utils" - "log/syslog" - - "strings" - + "github.com/json-iterator/go" "github.com/qiniu/log" - "github.com/qiniu/logkit/utils" "github.com/stretchr/testify/assert" ) @@ -158,7 +156,7 @@ func Test_Run(t *testing.T) { } var dts []sender.Data rawData := r.senders[0].Name()[len("mock_sender "):] - err = json.Unmarshal([]byte(rawData), &dts) + err = jsoniter.Unmarshal([]byte(rawData), &dts) if err != nil { t.Error(err) } @@ -357,7 +355,7 @@ func Test_QiniulogRun(t *testing.T) { } var dts []sender.Data rawData := r.senders[0].Name()[len("mock_sender "):] - err = json.Unmarshal([]byte(rawData), &dts) + err = jsoniter.Unmarshal([]byte(rawData), &dts) if err != nil { t.Error(err) } @@ -396,7 +394,7 @@ func TestCreateTransforms(t *testing.T) { }` rc := RunnerConfig{} - err := json.Unmarshal([]byte(config1), &rc) + err := jsoniter.Unmarshal([]byte(config1), &rc) assert.NoError(t, err) transformers := createTransformers(rc) datas := []sender.Data{{"ip": "111.2.3.4"}} @@ -439,7 +437,7 @@ func TestReplaceTransforms(t *testing.T) { }` newData := make([]sender.Data, 0) rc := RunnerConfig{} - err := json.Unmarshal([]byte(config1), &rc) + err := jsoniter.Unmarshal([]byte(config1), &rc) assert.NoError(t, err) transformers := createTransformers(rc) datas := []string{`{"status":"200","request_method":"POST","request_body":"\x0A","content_type":"text/xml"}`, `{"status":"200","request_method":"POST","request_body":"x0A","content_type":"text/xml"}`} @@ -448,7 +446,7 @@ func TestReplaceTransforms(t *testing.T) { assert.NoError(t, err) for i := range datas { var da sender.Data - err = json.Unmarshal([]byte(datas[i]), &da) + err = jsoniter.Unmarshal([]byte(datas[i]), &da) assert.NoError(t, err) newData = append(newData, da) } @@ -496,7 +494,7 @@ func TestDateTransforms(t *testing.T) { }] }` rc := RunnerConfig{} - err := json.Unmarshal([]byte(config1), &rc) + err := jsoniter.Unmarshal([]byte(config1), &rc) assert.NoError(t, err) transformers := createTransformers(rc) datas := []sender.Data{{"status": "02/01/2016--15:04:05"}, {"status": "2006-01-02 15:04:15"}} @@ -542,7 +540,7 @@ func TestSplitAndConvertTransforms(t *testing.T) { }] }` rc := RunnerConfig{} - err := json.Unmarshal([]byte(config1), &rc) + err := jsoniter.Unmarshal([]byte(config1), &rc) assert.NoError(t, err) transformers := createTransformers(rc) datas := []sender.Data{{"status": "1,2,3"}, {"status": "4,5,6"}} @@ -758,7 +756,7 @@ func TestSyslogRunnerX(t *testing.T) { }` rc := RunnerConfig{} - err := json.Unmarshal([]byte(config1), &rc) + err := jsoniter.Unmarshal([]byte(config1), &rc) assert.NoError(t, err) rr, err := NewCustomRunner(rc, make(chan cleaner.CleanSignal), parser.NewParserRegistry(), sender.NewSenderRegistry()) assert.NoError(t, err) @@ -874,3 +872,179 @@ func TestClassifySenderData(t *testing.T) { assert.Equal(t, 4, len(senderDataList[1])) assert.Equal(t, 4, len(senderDataList[2])) } + +// Reponse from Clearbit API. Size: 2.4kb +var mediumFixture []byte = []byte(`{ + "person": { + "id": "d50887ca-a6ce-4e59-b89f-14f0b5d03b03", + "name": { + "fullName": "Leonid Bugaev", + "givenName": "Leonid", + "familyName": "Bugaev" + }, + "email": "leonsbox@gmail.com", + "gender": "male", + "location": "Saint Petersburg, Saint Petersburg, RU", + "geo": { + "city": "Saint Petersburg", + "state": "Saint Petersburg", + "country": "Russia", + "lat": 59.9342802, + "lng": 30.3350986 + }, + "bio": "Senior engineer at Granify.com", + "site": "http://flickfaver.com", + "avatar": "https://d1ts43dypk8bqh.cloudfront.net/v1/avatars/d50887ca-a6ce-4e59-b89f-14f0b5d03b03", + "employment": { + "name": "www.latera.ru", + "title": "Software Engineer", + "domain": "gmail.com" + }, + "facebook": { + "handle": "leonid.bugaev" + }, + "github": { + "handle": "buger", + "id": 14009, + "avatar": "https://avatars.githubusercontent.com/u/14009?v=3", + "company": "Granify", + "blog": "http://leonsbox.com", + "followers": 95, + "following": 10 + }, + "twitter": { + "handle": "flickfaver", + "id": 77004410, + "bio": null, + "followers": 2, + "following": 1, + "statuses": 5, + "favorites": 0, + "location": "", + "site": "http://flickfaver.com", + "avatar": null + }, + "linkedin": { + "handle": "in/leonidbugaev" + }, + "googleplus": { + "handle": null + }, + "angellist": { + "handle": "leonid-bugaev", + "id": 61541, + "bio": "Senior engineer at Granify.com", + "blog": "http://buger.github.com", + "site": "http://buger.github.com", + "followers": 41, + "avatar": "https://d1qb2nb5cznatu.cloudfront.net/users/61541-medium_jpg?1405474390" + }, + "klout": { + "handle": null, + "score": null + }, + "foursquare": { + "handle": null + }, + "aboutme": { + "handle": "leonid.bugaev", + "bio": null, + "avatar": null + }, + "gravatar": { + "handle": "buger", + "urls": [ + ], + "avatar": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510", + "avatars": [ + { + "url": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510", + "type": "thumbnail" + } + ] + }, + "fuzzy": false + }, + "company": null +}`) + +type CBAvatar struct { + Url string `json:"url"` +} + +type CBGravatar struct { + Avatars []*CBAvatar `json:"avatars"` +} + +type CBGithub struct { + Followers int `json:"followers"` +} + +type CBName struct { + FullName string `json:"fullName"` +} + +type CBPerson struct { + Name *CBName `json:"name"` + Github *CBGithub `json:"github"` + Gravatar *CBGravatar `json:"gravatar"` +} + +type MediumPayload struct { + Person *CBPerson `json:"person"` + Company string `json:"compnay"` +} + +func BenchmarkDecodeStdStructMedium(b *testing.B) { + b.ReportAllocs() + var data MediumPayload + for i := 0; i < b.N; i++ { + jsoniter.Unmarshal(mediumFixture, &data) + } +} + +func BenchmarkEncodeStdStructMedium(b *testing.B) { + var data MediumPayload + jsoniter.Unmarshal(mediumFixture, &data) + b.ReportAllocs() + for i := 0; i < b.N; i++ { + jsoniter.Marshal(data) + } +} + +func BenchmarkDecodeJsoniterStructMedium(b *testing.B) { + b.ReportAllocs() + var data MediumPayload + for i := 0; i < b.N; i++ { + jsoniter.Unmarshal(mediumFixture, &data) + } +} + +func BenchmarkEncodeJsoniterStructMedium(b *testing.B) { + var data MediumPayload + jsoniter.Unmarshal(mediumFixture, &data) + b.ReportAllocs() + for i := 0; i < b.N; i++ { + jsoniter.Marshal(data) + } +} + +func BenchmarkEncodeJsoniterCompatibleStructMedium(b *testing.B) { + var data MediumPayload + jsoniter.Unmarshal(mediumFixture, &data) + b.ReportAllocs() + jsonc := jsoniter.ConfigCompatibleWithStandardLibrary + for i := 0; i < b.N; i++ { + jsonc.Marshal(data) + } +} + +/* +BenchmarkDecodeStdStructMedium-4 50000 39162 ns/op 1960 B/op 99 allocs/op +BenchmarkEncodeStdStructMedium-4 1000000 2106 ns/op 712 B/op 5 allocs/op +BenchmarkDecodeJsoniterStructMedium-4 200000 7676 ns/op 320 B/op 36 allocs/op +BenchmarkEncodeJsoniterStructMedium-4 1000000 1046 ns/op 240 B/op 3 allocs/op +BenchmarkEncodeJsoniterCompatibleStructMedium-4 1000000 1023 ns/op 240 B/op 3 allocs/op +PASS +性能明显提升 +*/ diff --git a/parser/csv_parser.go b/parser/csv_parser.go index 9fcd73d48..f35ab7195 100644 --- a/parser/csv_parser.go +++ b/parser/csv_parser.go @@ -1,8 +1,6 @@ package parser import ( - "bytes" - "encoding/json" "errors" "fmt" "reflect" @@ -10,11 +8,13 @@ import ( "strings" "time" - "github.com/qiniu/log" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/times" "github.com/qiniu/logkit/utils" + + "github.com/json-iterator/go" + "github.com/qiniu/log" ) // Type 类型常量 @@ -37,6 +37,12 @@ const ( const MaxParserSchemaErrOutput = 5 +var jsontool = jsoniter.Config{ + EscapeHTML: true, + UseNumber: true, + ValidateJsonRawMessage: true, +}.Froze() + type CsvParser struct { name string schema []field @@ -324,9 +330,7 @@ func (f field) ValueParse(value string, timeZoneOffset int) (datas sender.Data, return } m := make(map[string]interface{}) - d := json.NewDecoder(bytes.NewReader([]byte(value))) - d.UseNumber() - if err = d.Decode(&m); err != nil { + if err = jsontool.Unmarshal([]byte(value), &m); err != nil { err = fmt.Errorf("unmarshal json map type error: %v", err) return } diff --git a/parser/csv_parser_test.go b/parser/csv_parser_test.go index c35897863..9e03567db 100644 --- a/parser/csv_parser_test.go +++ b/parser/csv_parser_test.go @@ -7,11 +7,14 @@ import ( "testing" "time" + "github.com/json-iterator/go" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/times" "github.com/qiniu/logkit/utils" + "fmt" + "github.com/stretchr/testify/assert" ) @@ -44,12 +47,12 @@ func Test_CsvParser(t *testing.T) { } tmstr := time.Now().Format(time.RFC3339Nano) lines := []string{ - `123 fufu 3.14 {"x":1,"y":"2"} ` + tmstr, //correct - `cc jj uu {"x":1,"y":"2"} ` + tmstr, // error => uu 不是float - `123 fufu 3.15 999 ` + tmstr, //error,999不是jsonmap - `123 fufu 3.16 {"x":1,"y":["xx:12"]} ` + tmstr, //correct + `1 fufu 3.14 {"x":1,"y":"2"} ` + tmstr, //correct + `cc jj uu {"x":1,"y":"2"} ` + tmstr, // error => uu 不是float + `2 fufu 3.15 999 ` + tmstr, //error,999不是jsonmap + `3 fufu 3.16 {"x":1,"y":["xx:12"]} ` + tmstr, //correct ` `, - `123 fufu 3.17 ` + tmstr, //correct,jsonmap允许为空 + `4 fufu 3.17 ` + tmstr, //correct,jsonmap允许为空 } datas, err := parser.Parse(lines) if c, ok := err.(*utils.StatsError); ok { @@ -58,7 +61,7 @@ func Test_CsvParser(t *testing.T) { assert.Error(t, err) exp := make(map[string]interface{}) - exp["a"] = int64(123) + exp["a"] = int64(1) exp["b"] = "fufu" exp["c"] = 3.14 exp["d-x"] = json.Number("1") @@ -71,11 +74,10 @@ func Test_CsvParser(t *testing.T) { } expNum := 3 - if len(datas) != expNum { - t.Errorf("correct line should be %v, but got %v", expNum, len(datas)) - } - if datas[0]["a"] != int64(123) { - t.Errorf("a should be 123 but got %v", datas[0]["a"]) + assert.Equal(t, expNum, len(datas), fmt.Sprintln(datas)) + + if datas[0]["a"] != int64(1) { + t.Errorf("a should be 1 but got %v", datas[0]["a"]) } if "fufu" != datas[0]["b"] { t.Error("b should be fufu") @@ -201,7 +203,7 @@ func Test_ParseField(t *testing.T) { func Test_convertValue(t *testing.T) { jsonraw := "{\"a\":null}" m := make(map[string]interface{}) - if err := json.Unmarshal([]byte(jsonraw), &m); err != nil { + if err := jsoniter.Unmarshal([]byte(jsonraw), &m); err != nil { t.Error(err) } for _, v := range m { @@ -317,3 +319,15 @@ func TestRename(t *testing.T) { } } } + +func TestJsonMap(t *testing.T) { + fd := field{ + name: "c", + dataType: TypeJsonMap, + } + testx := "999" + data, err := fd.ValueParse(testx, 0) + assert.Error(t, err) + assert.Equal(t, data, sender.Data{}) + +} diff --git a/parser/json_parser.go b/parser/json_parser.go index 99c8e6150..abc9b9b74 100644 --- a/parser/json_parser.go +++ b/parser/json_parser.go @@ -1,19 +1,20 @@ package parser import ( - "bytes" - "encoding/json" "fmt" - "github.com/qiniu/log" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/utils" + + "github.com/json-iterator/go" + "github.com/qiniu/log" ) type JsonParser struct { - name string - labels []Label + name string + labels []Label + jsontool jsoniter.API } func NewJsonParser(c conf.MapConf) (LogParser, error) { @@ -21,10 +22,15 @@ func NewJsonParser(c conf.MapConf) (LogParser, error) { labelList, _ := c.GetStringListOr(KeyLabels, []string{}) nameMap := map[string]struct{}{} labels := GetLabels(labelList, nameMap) + jsontool := jsoniter.Config{ + EscapeHTML: true, + UseNumber: true, + }.Froze() return &JsonParser{ - name: name, - labels: labels, + name: name, + labels: labels, + jsontool: jsontool, }, nil } @@ -55,9 +61,7 @@ func (im *JsonParser) Parse(lines []string) ([]sender.Data, error) { func (im *JsonParser) parseLine(line string) (data sender.Data, err error) { data = sender.Data{} - decoder := json.NewDecoder(bytes.NewReader([]byte(line))) - decoder.UseNumber() - if err = decoder.Decode(&data); err != nil { + if err = im.jsontool.Unmarshal([]byte(line), &data); err != nil { err = fmt.Errorf("parse json line error %v, raw data is: %v", err, line) log.Debug(err) return diff --git a/parser/json_parser_test.go b/parser/json_parser_test.go index a41f4021c..08e315215 100644 --- a/parser/json_parser_test.go +++ b/parser/json_parser_test.go @@ -8,6 +8,9 @@ import ( "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/utils" + "bytes" + + "github.com/json-iterator/go" "github.com/stretchr/testify/assert" ) @@ -62,3 +65,162 @@ func TestJsonParser(t *testing.T) { } assert.EqualValues(t, "testjsonparser", p.Name()) } + +var testjsonline = `{"a":1,"b":[1.0,2.0,3.0],"c":{"d":"123","g":1.2},"e":"x","mm":1.23,"jjj":1493797500346428926}` +var testmiddleline = `{ + "person": { + "id": "d50887ca-a6ce-4e59-b89f-14f0b5d03b03", + "name": { + "fullName": "Leonid Bugaev", + "givenName": "Leonid", + "familyName": "Bugaev" + }, + "email": "leonsbox@gmail.com", + "gender": "male", + "location": "Saint Petersburg, Saint Petersburg, RU", + "geo": { + "city": "Saint Petersburg", + "state": "Saint Petersburg", + "country": "Russia", + "lat": 59.9342802, + "lng": 30.3350986 + }, + "bio": "Senior engineer at Granify.com", + "site": "http://flickfaver.com", + "avatar": "https://d1ts43dypk8bqh.cloudfront.net/v1/avatars/d50887ca-a6ce-4e59-b89f-14f0b5d03b03", + "employment": { + "name": "www.latera.ru", + "title": "Software Engineer", + "domain": "gmail.com" + }, + "facebook": { + "handle": "leonid.bugaev" + }, + "github": { + "handle": "buger", + "id": 14009, + "avatar": "https://avatars.githubusercontent.com/u/14009?v=3", + "company": "Granify", + "blog": "http://leonsbox.com", + "followers": 95, + "following": 10 + }, + "twitter": { + "handle": "flickfaver", + "id": 77004410, + "bio": null, + "followers": 2, + "following": 1, + "statuses": 5, + "favorites": 0, + "location": "", + "site": "http://flickfaver.com", + "avatar": null + }, + "linkedin": { + "handle": "in/leonidbugaev" + }, + "googleplus": { + "handle": null + }, + "angellist": { + "handle": "leonid-bugaev", + "id": 61541, + "bio": "Senior engineer at Granify.com", + "blog": "http://buger.github.com", + "site": "http://buger.github.com", + "followers": 41, + "avatar": "https://d1qb2nb5cznatu.cloudfront.net/users/61541-medium_jpg?1405474390" + }, + "klout": { + "handle": null, + "score": null + }, + "foursquare": { + "handle": null + }, + "aboutme": { + "handle": "leonid.bugaev", + "bio": null, + "avatar": null + }, + "gravatar": { + "handle": "buger", + "urls": [ + ], + "avatar": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510", + "avatars": [ + { + "url": "http://1.gravatar.com/avatar/f7c8edd577d13b8930d5522f28123510", + "type": "thumbnail" + } + ] + }, + "fuzzy": false + }, + "company": null +}` + +//BenchmarkJsoninterParser-4 300000 5144 ns/op +func BenchmarkJsoninterParser(b *testing.B) { + jsonnumber := jsoniter.Config{ + EscapeHTML: true, + UseNumber: true, + }.Froze() + for i := 0; i < b.N; i++ { + data := sender.Data{} + if err := jsonnumber.Unmarshal([]byte(testjsonline), &data); err != nil { + b.Error(err) + } + } +} + +//BenchmarkJsonParser-4 200000 7767 ns/op +func BenchmarkJsonParser(b *testing.B) { + for i := 0; i < b.N; i++ { + data := sender.Data{} + decoder := json.NewDecoder(bytes.NewReader([]byte(testjsonline))) + decoder.UseNumber() + if err := decoder.Decode(&data); err != nil { + b.Error(err) + } + } +} + +//BenchmarkJsonMiddlelineParser-4 30000 58441 ns/op +func BenchmarkJsonMiddlelineParser(b *testing.B) { + for i := 0; i < b.N; i++ { + data := sender.Data{} + decoder := json.NewDecoder(bytes.NewReader([]byte(testmiddleline))) + decoder.UseNumber() + if err := decoder.Decode(&data); err != nil { + b.Error(err) + } + } +} + +//BenchmarkJsoniterMiddlelineWithDecoderParser-4 30000 41496 ns/op +func BenchmarkJsoniterMiddlelineWithDecoderParser(b *testing.B) { + for i := 0; i < b.N; i++ { + data := sender.Data{} + decoder := jsoniter.NewDecoder(bytes.NewReader([]byte(testmiddleline))) + decoder.UseNumber() + if err := decoder.Decode(&data); err != nil { + b.Error(err) + } + } +} + +//BenchmarkMiddlelineWithConfigParser-4 50000 35298 ns/op +func BenchmarkMiddlelineWithConfigParser(b *testing.B) { + jsonnumber := jsoniter.Config{ + EscapeHTML: true, + UseNumber: true, + }.Froze() + for i := 0; i < b.N; i++ { + data := sender.Data{} + if err := jsonnumber.Unmarshal([]byte(testmiddleline), &data); err != nil { + b.Error(err) + } + } +} diff --git a/parser/nginx_parser_test.go b/parser/nginx_parser_test.go index b98a93ec8..683ffb735 100644 --- a/parser/nginx_parser_test.go +++ b/parser/nginx_parser_test.go @@ -3,11 +3,8 @@ package parser import ( "fmt" "testing" - "time" - "log" - "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/utils" @@ -101,5 +98,4 @@ func Benchmark_BenchNginxParser(b *testing.B) { } } grokBench = m - log.Println(m) } diff --git a/reader/autofile.go b/reader/autofile.go index 1a72e3479..c496007f3 100644 --- a/reader/autofile.go +++ b/reader/autofile.go @@ -1,12 +1,11 @@ package reader import ( + "fmt" "os" "path/filepath" "strings" - "fmt" - "github.com/qiniu/logkit/conf" ) diff --git a/reader/bufreader.go b/reader/bufreader.go index 8f2857828..f4fbff432 100644 --- a/reader/bufreader.go +++ b/reader/bufreader.go @@ -15,9 +15,8 @@ import ( "os" "regexp" "sync" - "unsafe" - "sync/atomic" + "unsafe" "github.com/axgle/mahonia" "github.com/qiniu/log" diff --git a/reader/bufreader_test.go b/reader/bufreader_test.go index 9c4772fbb..08d2c03ec 100644 --- a/reader/bufreader_test.go +++ b/reader/bufreader_test.go @@ -6,10 +6,10 @@ import ( "testing" "time" - "github.com/qiniu/logkit/conf" - "github.com/qiniu/log" + "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/utils" + "github.com/stretchr/testify/assert" ) diff --git a/reader/elastic_test.go b/reader/elastic_test.go index 5c0112099..fdbfe57e8 100644 --- a/reader/elastic_test.go +++ b/reader/elastic_test.go @@ -6,8 +6,8 @@ import ( "testing" "github.com/qiniu/logkit/conf" - "github.com/qiniu/logkit/utils" + "github.com/stretchr/testify/assert" ) diff --git a/reader/meta.go b/reader/meta.go index aaf0a6b3d..89f233dc8 100644 --- a/reader/meta.go +++ b/reader/meta.go @@ -1,7 +1,6 @@ package reader import ( - "encoding/json" "errors" "fmt" "hash/fnv" @@ -16,6 +15,7 @@ import ( "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/utils" + "github.com/json-iterator/go" "github.com/qiniu/log" ) @@ -489,12 +489,12 @@ func (m *Meta) ReadStatistic() (stat Statistic, err error) { if statData == nil || err != nil { return } - err = json.Unmarshal(statData, &stat) + err = jsoniter.Unmarshal(statData, &stat) return } func (m *Meta) WriteStatistic(stat *Statistic) error { - statStr, err := json.Marshal(stat) + statStr, err := jsoniter.Marshal(stat) if err != nil { return err } diff --git a/reader/mongo.go b/reader/mongo.go index e9d877b53..98032d30b 100644 --- a/reader/mongo.go +++ b/reader/mongo.go @@ -1,7 +1,6 @@ package reader import ( - "encoding/json" "errors" "sync" "sync/atomic" @@ -11,6 +10,7 @@ import ( "strings" + "github.com/json-iterator/go" "github.com/qiniu/log" "github.com/robfig/cron" "gopkg.in/mgo.v2" @@ -89,7 +89,7 @@ func NewMongoReader(meta *Meta, readBatch int, host, database, collection, offse } if filters != "" { - if jerr := json.Unmarshal([]byte(filters), &mr.collectionFilters); jerr != nil { + if jerr := jsoniter.Unmarshal([]byte(filters), &mr.collectionFilters); jerr != nil { err = errors.New("malformed collection_filters") return } @@ -273,7 +273,7 @@ func (mr *MongoReader) exec() (err error) { if id, ok := result[mr.offsetkey]; ok { mr.offset = id } - bytes, ierr := json.Marshal(result) + bytes, ierr := jsoniter.Marshal(result) if ierr != nil { log.Errorf("Runner[%v] %v json marshal inner error %v", mr.meta.RunnerName, result, ierr) } diff --git a/reader/mongo_test.go b/reader/mongo_test.go index f8f072976..eae50470e 100644 --- a/reader/mongo_test.go +++ b/reader/mongo_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/qiniu/logkit/conf" - "github.com/qiniu/logkit/utils" + "github.com/stretchr/testify/assert" "gopkg.in/mgo.v2/bson" ) diff --git a/reader/multireader_test.go b/reader/multireader_test.go index 49522a1dd..99617bf99 100644 --- a/reader/multireader_test.go +++ b/reader/multireader_test.go @@ -8,9 +8,9 @@ import ( "time" "github.com/qiniu/logkit/conf" + "github.com/qiniu/logkit/utils" "github.com/qiniu/log" - "github.com/qiniu/logkit/utils" "github.com/stretchr/testify/assert" ) diff --git a/reader/mutireader.go b/reader/mutireader.go index 1b57c6ac5..e25c1f325 100644 --- a/reader/mutireader.go +++ b/reader/mutireader.go @@ -1,7 +1,6 @@ package reader import ( - "encoding/json" "fmt" "io" "os" @@ -12,6 +11,7 @@ import ( "sync/atomic" "time" + "github.com/json-iterator/go" "github.com/qiniu/log" "github.com/qiniu/logkit/utils" ) @@ -255,7 +255,7 @@ func NewMultiReader(meta *Meta, logPathPattern, whence, expireDur, statIntervalD log.Warnf("Runner[%v] %v read buf error %v, ignore...", mr.meta.RunnerName, mr.Name(), err) } } else { - err = json.Unmarshal(buf, &mr.cacheMap) + err = jsoniter.Unmarshal(buf, &mr.cacheMap) if err != nil { log.Warnf("Runner[%v] %v Unmarshal read buf error %v, ignore...", mr.meta.RunnerName, mr.Name(), err) } @@ -482,7 +482,7 @@ func (mr *MultiReader) SyncMeta() { mr.armapmux.Unlock() } mr.armapmux.Lock() - buf, err := json.Marshal(mr.cacheMap) + buf, err := jsoniter.Marshal(mr.cacheMap) mr.armapmux.Unlock() if err != nil { log.Errorf("%v sync meta error %v, cacheMap %v", mr.Name(), err, mr.cacheMap) diff --git a/reader/redis.go b/reader/redis.go index b0d6bc7c0..a4a968359 100644 --- a/reader/redis.go +++ b/reader/redis.go @@ -3,12 +3,11 @@ package reader import ( "errors" "fmt" + "strings" "sync" "sync/atomic" "time" - "strings" - "github.com/go-redis/redis" "github.com/qiniu/log" "github.com/qiniu/logkit/conf" diff --git a/reader/seqfile.go b/reader/seqfile.go index 91b533066..18cbb6d42 100644 --- a/reader/seqfile.go +++ b/reader/seqfile.go @@ -7,12 +7,11 @@ import ( "os" "path/filepath" "strings" + "sync" "sync/atomic" "syscall" "time" - "sync" - "github.com/qiniu/log" "github.com/qiniu/logkit/rateio" "github.com/qiniu/logkit/utils" diff --git a/reader/singlefile.go b/reader/singlefile.go index 36f39e623..df4c24604 100644 --- a/reader/singlefile.go +++ b/reader/singlefile.go @@ -2,21 +2,19 @@ package reader import ( "errors" + "fmt" "io" "io/ioutil" "os" "path/filepath" "strings" + "sync" "sync/atomic" "time" "github.com/qiniu/logkit/rateio" "github.com/qiniu/logkit/utils" - "sync" - - "fmt" - "github.com/qiniu/log" ) diff --git a/reader/sql.go b/reader/sql.go index 7150a069f..25e15c96f 100644 --- a/reader/sql.go +++ b/reader/sql.go @@ -2,27 +2,25 @@ package reader import ( "database/sql" - "encoding/json" + "encoding/binary" "errors" "fmt" + "reflect" "strconv" "strings" "sync" "sync/atomic" "time" - "github.com/qiniu/log" - "github.com/robfig/cron" - - "reflect" - - "encoding/binary" + "github.com/qiniu/logkit/conf" + "github.com/qiniu/logkit/utils" _ "github.com/denisenkom/go-mssqldb" //mssql 驱动 _ "github.com/go-sql-driver/mysql" //mysql 驱动 - _ "github.com/lib/pq" //postgres 驱动 - "github.com/qiniu/logkit/conf" - "github.com/qiniu/logkit/utils" + "github.com/json-iterator/go" + _ "github.com/lib/pq" //postgres 驱动 + "github.com/qiniu/log" + "github.com/robfig/cron" ) const ( @@ -688,7 +686,7 @@ func (mr *SqlReader) exec(connectStr string) (err error) { } } } - ret, err := json.Marshal(data) + ret, err := jsoniter.Marshal(data) if err != nil { log.Errorf("Runner[%v] %v unmarshal sql data error %v", mr.meta.RunnerName, mr.Name(), err) continue diff --git a/reader/sql_test.go b/reader/sql_test.go index ee83b1549..4c7320e3d 100644 --- a/reader/sql_test.go +++ b/reader/sql_test.go @@ -1,16 +1,15 @@ package reader import ( + "fmt" "os" + "reflect" "testing" "time" "github.com/qiniu/logkit/conf" - - "fmt" - "reflect" - "github.com/qiniu/logkit/utils" + "github.com/stretchr/testify/assert" ) diff --git a/sender/fault_tolerant_sender.go b/sender/fault_tolerant_sender.go index 359829f17..4f98d60c2 100644 --- a/sender/fault_tolerant_sender.go +++ b/sender/fault_tolerant_sender.go @@ -1,7 +1,6 @@ package sender import ( - "bytes" "encoding/json" "errors" "os" @@ -9,10 +8,12 @@ import ( "sync/atomic" "time" - "github.com/qiniu/log" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/queue" "github.com/qiniu/logkit/utils" + + "github.com/json-iterator/go" + "github.com/qiniu/log" "github.com/qiniu/pandora-go-sdk/base/reqerr" ) @@ -60,6 +61,7 @@ type FtSender struct { opt *FtOption stats utils.StatsInfo statsMutex *sync.RWMutex + jsontool jsoniter.API } type FtOption struct { @@ -130,6 +132,7 @@ func newFtSender(innerSender Sender, runnerName string, opt *FtOption) (*FtSende runnerName: runnerName, opt: opt, statsMutex: new(sync.RWMutex), + jsontool: jsoniter.Config{EscapeHTML: true, UseNumber: true}.Froze(), } go ftSender.asyncSendLogFromDiskQueue() return &ftSender, nil @@ -222,7 +225,7 @@ func (ft *FtSender) Close() error { func (ft *FtSender) marshalData(datas []Data) (bs []byte, err error) { ctx := new(datasContext) ctx.Datas = datas - bs, err = json.Marshal(ctx) + bs, err = jsoniter.Marshal(ctx) if err != nil { err = reqerr.NewSendError("Cannot marshal data :"+err.Error(), ConvertDatasBack(datas), reqerr.TypeDefault) return @@ -233,9 +236,7 @@ func (ft *FtSender) marshalData(datas []Data) (bs []byte, err error) { // unmarshalData 如何将数据从磁盘中反序列化出来 func (ft *FtSender) unmarshalData(dat []byte) (datas []Data, err error) { ctx := new(datasContext) - d := json.NewDecoder(bytes.NewReader(dat)) - d.UseNumber() - err = d.Decode(&ctx) + err = ft.jsontool.Unmarshal(dat, &ctx) if err != nil { return } @@ -316,7 +317,7 @@ func (ft *FtSender) trySendDatas(datas []Data, failSleep int, isRetry bool) (bac if err != nil { retDatasContext := ft.handleSendError(err, datas) for _, v := range retDatasContext { - nnBytes, _ := json.Marshal(v) + nnBytes, _ := jsoniter.Marshal(v) qErr := ft.backupQueue.Put(nnBytes) if qErr != nil { log.Errorf("Runner[%v] Sender[%v] cannot write points back to queue %v: %v", ft.runnerName, ft.innerSender.Name(), ft.backupQueue.Name(), qErr) diff --git a/sender/file_sender.go b/sender/file_sender.go index 0b2655863..b74e9dee3 100644 --- a/sender/file_sender.go +++ b/sender/file_sender.go @@ -1,9 +1,9 @@ package sender import ( - "encoding/json" "os" + "github.com/json-iterator/go" "github.com/qiniu/logkit/conf" "github.com/qiniu/pandora-go-sdk/base/reqerr" ) @@ -71,7 +71,7 @@ func (fs *FileSender) Close() error { // JSONLineMarshalFunc 将数据json并且按换行符分隔 func JSONLineMarshalFunc(datas []Data) ([]byte, error) { - bytes, err := json.Marshal(datas) + bytes, err := jsoniter.Marshal(datas) if err != nil { return nil, err } diff --git a/sender/kafka_sender.go b/sender/kafka_sender.go index f818c5d1a..830f595db 100644 --- a/sender/kafka_sender.go +++ b/sender/kafka_sender.go @@ -1,16 +1,17 @@ package sender import ( - "encoding/json" "fmt" "os" "strings" "time" - "github.com/Shopify/sarama" "github.com/qiniu/log" "github.com/qiniu/logkit/conf" "github.com/qiniu/logkit/utils" + + "github.com/Shopify/sarama" + "github.com/json-iterator/go" ) type KafkaSender struct { @@ -171,7 +172,7 @@ func (kf *KafkaSender) getEventMessage(event map[string]interface{}) (pm *sarama } else { topic = kf.topic[0] } - value, err := json.Marshal(event) + value, err := jsoniter.Marshal(event) if err != nil { return } diff --git a/sender/mock_sender.go b/sender/mock_sender.go index 3f1b6553b..0f11f38fd 100644 --- a/sender/mock_sender.go +++ b/sender/mock_sender.go @@ -1,10 +1,9 @@ package sender import ( - "encoding/json" - "sync" + "github.com/json-iterator/go" "github.com/qiniu/logkit/conf" ) @@ -32,7 +31,7 @@ func NewMockSender(c conf.MapConf) (Sender, error) { func (mock *MockSender) Name() string { mock.mux.Lock() defer mock.mux.Unlock() - raw, err := json.Marshal(mock.datas) + raw, err := jsoniter.Marshal(mock.datas) if err != nil { raw = []byte(err.Error()) } diff --git a/sender/pandora_sender_test.go b/sender/pandora_sender_test.go index ae31baccb..6dfd7625e 100644 --- a/sender/pandora_sender_test.go +++ b/sender/pandora_sender_test.go @@ -17,6 +17,7 @@ import ( "testing" "time" + "github.com/json-iterator/go" "github.com/labstack/echo" "github.com/qiniu/log" "github.com/qiniu/logkit/cli" @@ -154,7 +155,7 @@ func (s *mock_pandora) PostRepos_Data() echo.HandlerFunc { } else if strings.Contains(s.Body, "typeBinaryUnpack") && !strings.Contains(s.Body, KeyPandoraStash) { c.Response().Header().Set(cli.ContentType, cli.ApplicationJson) c.Response().WriteHeader(http.StatusBadRequest) - return json.NewEncoder(c.Response()).Encode(map[string]string{"error": "E18111 mock_pandora error"}) + return jsoniter.NewEncoder(c.Response()).Encode(map[string]string{"error": "E18111 mock_pandora error"}) } s.PostDataNum++ return nil @@ -299,7 +300,7 @@ func TestPandoraSender(t *testing.T) { dataJson := `{"ab":"REQ","ac":200,"d":14774559431867215}` d = Data{} - jsonDecoder := json.NewDecoder(bytes.NewReader([]byte(dataJson))) + jsonDecoder := jsoniter.NewDecoder(bytes.NewReader([]byte(dataJson))) jsonDecoder.UseNumber() err = jsonDecoder.Decode(&d) if err != nil { diff --git a/transforms/mutate/urlparam.go b/transforms/mutate/urlparam.go index 6022fc207..c9ef55a6f 100644 --- a/transforms/mutate/urlparam.go +++ b/transforms/mutate/urlparam.go @@ -2,12 +2,9 @@ package mutate import ( "errors" - - "strings" - "fmt" - "strconv" + "strings" "github.com/qiniu/log" "github.com/qiniu/logkit/sender" diff --git a/transforms/service/k8stag.go b/transforms/service/k8stag.go index c6abba350..b4e15d7da 100644 --- a/transforms/service/k8stag.go +++ b/transforms/service/k8stag.go @@ -2,11 +2,9 @@ package service import ( "errors" - "strings" - "fmt" - "path/filepath" + "strings" "github.com/qiniu/logkit/sender" "github.com/qiniu/logkit/transforms" diff --git a/utils/utils.go b/utils/utils.go index 0159f1771..da31b93f2 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -6,7 +6,6 @@ import ( "compress/gzip" "database/sql" "encoding/binary" - "encoding/json" "errors" "fmt" "io" @@ -21,6 +20,7 @@ import ( "sync/atomic" "time" + "github.com/json-iterator/go" "github.com/qiniu/log" ) @@ -296,7 +296,7 @@ func GetExtraInfo() map[string]string { func IsJsonString(s string) bool { var x interface{} - if err := json.Unmarshal([]byte(s), &x); err != nil { + if err := jsoniter.Unmarshal([]byte(s), &x); err != nil { return false } switch x.(type) { diff --git a/vendor/github.com/json-iterator/go/Gopkg.lock b/vendor/github.com/json-iterator/go/Gopkg.lock new file mode 100644 index 000000000..f34f5b4ad --- /dev/null +++ b/vendor/github.com/json-iterator/go/Gopkg.lock @@ -0,0 +1,33 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/davecgh/go-spew" + packages = ["spew"] + revision = "346938d642f2ec3594ed81d874461961cd0faa76" + version = "v1.1.0" + +[[projects]] + branch = "master" + name = "github.com/google/gofuzz" + packages = ["."] + revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1" + +[[projects]] + name = "github.com/pmezard/go-difflib" + packages = ["difflib"] + revision = "792786c7400a136282c1664665ae0a8db921c6c2" + version = "v1.0.0" + +[[projects]] + name = "github.com/stretchr/testify" + packages = ["assert","require"] + revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0" + version = "v1.1.4" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "f8b7cf3941d3792cbbd570bb53c093adaf774334d1162c651565c97a58dc9d09" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/json-iterator/go/Gopkg.toml b/vendor/github.com/json-iterator/go/Gopkg.toml new file mode 100644 index 000000000..0ac55ef87 --- /dev/null +++ b/vendor/github.com/json-iterator/go/Gopkg.toml @@ -0,0 +1,33 @@ +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + + +[[constraint]] + name = "github.com/davecgh/go-spew" + version = "1.1.0" + +[[constraint]] + branch = "master" + name = "github.com/google/gofuzz" + +[[constraint]] + name = "github.com/stretchr/testify" + version = "1.1.4" diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE new file mode 100644 index 000000000..2cf4f5ab2 --- /dev/null +++ b/vendor/github.com/json-iterator/go/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 json-iterator + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md new file mode 100644 index 000000000..3a0d68098 --- /dev/null +++ b/vendor/github.com/json-iterator/go/README.md @@ -0,0 +1,86 @@ +[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge) +[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/json-iterator/go) +[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go) +[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go) +[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go) +[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE) +[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) + +A high-performance 100% compatible drop-in replacement of "encoding/json" + +``` +Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com +``` + +# Benchmark + +![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png) + +Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go + +Raw Result (easyjson requires static code generation) + +| | ns/op | allocation bytes | allocation times | +| --- | --- | --- | --- | +| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op | +| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op | +| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op | +| std encode | 2213 ns/op | 712 B/op | 5 allocs/op | +| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op | +| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op | + +# Usage + +100% compatibility with standard lib + +Replace + +```go +import "encoding/json" +json.Marshal(&data) +``` + +with + +```go +import "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Marshal(&data) +``` + +Replace + +```go +import "encoding/json" +json.Unmarshal(input, &data) +``` + +with + +```go +import "github.com/json-iterator/go" + +var json = jsoniter.ConfigCompatibleWithStandardLibrary +json.Unmarshal(input, &data) +``` + +[More documentation](http://jsoniter.com/migrate-from-go-std.html) + +# How to get + +``` +go get github.com/json-iterator/go +``` + +# Contribution Welcomed ! + +Contributors + +* [thockin](https://github.com/thockin) +* [mattn](https://github.com/mattn) +* [cch123](https://github.com/cch123) +* [Oleg Shaldybin](https://github.com/olegshaldybin) +* [Jason Toffaletti](https://github.com/toffaletti) + +Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby) diff --git a/vendor/github.com/json-iterator/go/build.sh b/vendor/github.com/json-iterator/go/build.sh new file mode 100755 index 000000000..b45ef6883 --- /dev/null +++ b/vendor/github.com/json-iterator/go/build.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e +set -x + +if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then + mkdir -p /tmp/build-golang/src/github.com/json-iterator + ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go +fi +export GOPATH=/tmp/build-golang +go get -u github.com/golang/dep/cmd/dep +cd /tmp/build-golang/src/github.com/json-iterator/go +exec $GOPATH/bin/dep ensure -update diff --git a/vendor/github.com/json-iterator/go/feature_adapter.go b/vendor/github.com/json-iterator/go/feature_adapter.go new file mode 100644 index 000000000..e0ab94807 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_adapter.go @@ -0,0 +1,138 @@ +package jsoniter + +import ( + "bytes" + "io" +) + +// RawMessage to make replace json with jsoniter +type RawMessage []byte + +// Unmarshal adapts to json/encoding Unmarshal API +// +// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v. +// Refer to https://godoc.org/encoding/json#Unmarshal for more information +func Unmarshal(data []byte, v interface{}) error { + return ConfigDefault.Unmarshal(data, v) +} + +func lastNotSpacePos(data []byte) int { + for i := len(data) - 1; i >= 0; i-- { + if data[i] != ' ' && data[i] != '\t' && data[i] != '\r' && data[i] != '\n' { + return i + 1 + } + } + return 0 +} + +// UnmarshalFromString convenient method to read from string instead of []byte +func UnmarshalFromString(str string, v interface{}) error { + return ConfigDefault.UnmarshalFromString(str, v) +} + +// Get quick method to get value from deeply nested JSON structure +func Get(data []byte, path ...interface{}) Any { + return ConfigDefault.Get(data, path...) +} + +// Marshal adapts to json/encoding Marshal API +// +// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API +// Refer to https://godoc.org/encoding/json#Marshal for more information +func Marshal(v interface{}) ([]byte, error) { + return ConfigDefault.Marshal(v) +} + +// MarshalIndent same as json.MarshalIndent. Prefix is not supported. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + return ConfigDefault.MarshalIndent(v, prefix, indent) +} + +// MarshalToString convenient method to write as string instead of []byte +func MarshalToString(v interface{}) (string, error) { + return ConfigDefault.MarshalToString(v) +} + +// NewDecoder adapts to json/stream NewDecoder API. +// +// NewDecoder returns a new decoder that reads from r. +// +// Instead of a json/encoding Decoder, an Decoder is returned +// Refer to https://godoc.org/encoding/json#NewDecoder for more information +func NewDecoder(reader io.Reader) *Decoder { + return ConfigDefault.NewDecoder(reader) +} + +// Decoder reads and decodes JSON values from an input stream. +// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress) +type Decoder struct { + iter *Iterator +} + +// Decode decode JSON into interface{} +func (adapter *Decoder) Decode(obj interface{}) error { + if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil { + if !adapter.iter.loadMore() { + return io.EOF + } + } + adapter.iter.ReadVal(obj) + err := adapter.iter.Error + if err == io.EOF { + return nil + } + return adapter.iter.Error +} + +// More is there more? +func (adapter *Decoder) More() bool { + return adapter.iter.head != adapter.iter.tail +} + +// Buffered remaining buffer +func (adapter *Decoder) Buffered() io.Reader { + remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail] + return bytes.NewReader(remaining) +} + +// UseNumber for number JSON element, use float64 or json.NumberValue (alias of string) +func (adapter *Decoder) UseNumber() { + origCfg := adapter.iter.cfg.configBeforeFrozen + origCfg.UseNumber = true + adapter.iter.cfg = origCfg.Froze().(*frozenConfig) +} + +// NewEncoder same as json.NewEncoder +func NewEncoder(writer io.Writer) *Encoder { + return ConfigDefault.NewEncoder(writer) +} + +// Encoder same as json.Encoder +type Encoder struct { + stream *Stream +} + +// Encode encode interface{} as JSON to io.Writer +func (adapter *Encoder) Encode(val interface{}) error { + adapter.stream.WriteVal(val) + adapter.stream.WriteRaw("\n") + adapter.stream.Flush() + return adapter.stream.Error +} + +// SetIndent set the indention. Prefix is not supported +func (adapter *Encoder) SetIndent(prefix, indent string) { + adapter.stream.cfg.indentionStep = len(indent) +} + +// SetEscapeHTML escape html by default, set to false to disable +func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) { + config := adapter.stream.cfg.configBeforeFrozen + config.EscapeHTML = escapeHTML + adapter.stream.cfg = config.Froze().(*frozenConfig) +} + +// Valid reports whether data is a valid JSON encoding. +func Valid(data []byte) bool { + return ConfigDefault.Valid(data) +} diff --git a/vendor/github.com/json-iterator/go/feature_any.go b/vendor/github.com/json-iterator/go/feature_any.go new file mode 100644 index 000000000..87716d1fc --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any.go @@ -0,0 +1,245 @@ +package jsoniter + +import ( + "errors" + "fmt" + "io" + "reflect" +) + +// Any generic object representation. +// The lazy json implementation holds []byte and parse lazily. +type Any interface { + LastError() error + ValueType() ValueType + MustBeValid() Any + ToBool() bool + ToInt() int + ToInt32() int32 + ToInt64() int64 + ToUint() uint + ToUint32() uint32 + ToUint64() uint64 + ToFloat32() float32 + ToFloat64() float64 + ToString() string + ToVal(val interface{}) + Get(path ...interface{}) Any + // TODO: add Set + Size() int + Keys() []string + GetInterface() interface{} + WriteTo(stream *Stream) +} + +type baseAny struct{} + +func (any *baseAny) Get(path ...interface{}) Any { + return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)} +} + +func (any *baseAny) Size() int { + return 0 +} + +func (any *baseAny) Keys() []string { + return []string{} +} + +func (any *baseAny) ToVal(obj interface{}) { + panic("not implemented") +} + +// WrapInt32 turn int32 into Any interface +func WrapInt32(val int32) Any { + return &int32Any{baseAny{}, val} +} + +// WrapInt64 turn int64 into Any interface +func WrapInt64(val int64) Any { + return &int64Any{baseAny{}, val} +} + +// WrapUint32 turn uint32 into Any interface +func WrapUint32(val uint32) Any { + return &uint32Any{baseAny{}, val} +} + +// WrapUint64 turn uint64 into Any interface +func WrapUint64(val uint64) Any { + return &uint64Any{baseAny{}, val} +} + +// WrapFloat64 turn float64 into Any interface +func WrapFloat64(val float64) Any { + return &floatAny{baseAny{}, val} +} + +// WrapString turn string into Any interface +func WrapString(val string) Any { + return &stringAny{baseAny{}, val} +} + +// Wrap turn a go object into Any interface +func Wrap(val interface{}) Any { + if val == nil { + return &nilAny{} + } + asAny, isAny := val.(Any) + if isAny { + return asAny + } + typ := reflect.TypeOf(val) + switch typ.Kind() { + case reflect.Slice: + return wrapArray(val) + case reflect.Struct: + return wrapStruct(val) + case reflect.Map: + return wrapMap(val) + case reflect.String: + return WrapString(val.(string)) + case reflect.Int: + return WrapInt64(int64(val.(int))) + case reflect.Int8: + return WrapInt32(int32(val.(int8))) + case reflect.Int16: + return WrapInt32(int32(val.(int16))) + case reflect.Int32: + return WrapInt32(val.(int32)) + case reflect.Int64: + return WrapInt64(val.(int64)) + case reflect.Uint: + return WrapUint64(uint64(val.(uint))) + case reflect.Uint8: + return WrapUint32(uint32(val.(uint8))) + case reflect.Uint16: + return WrapUint32(uint32(val.(uint16))) + case reflect.Uint32: + return WrapUint32(uint32(val.(uint32))) + case reflect.Uint64: + return WrapUint64(val.(uint64)) + case reflect.Float32: + return WrapFloat64(float64(val.(float32))) + case reflect.Float64: + return WrapFloat64(val.(float64)) + case reflect.Bool: + if val.(bool) == true { + return &trueAny{} + } + return &falseAny{} + } + return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)} +} + +// ReadAny read next JSON element as an Any object. It is a better json.RawMessage. +func (iter *Iterator) ReadAny() Any { + return iter.readAny() +} + +func (iter *Iterator) readAny() Any { + c := iter.nextToken() + switch c { + case '"': + iter.unreadByte() + return &stringAny{baseAny{}, iter.ReadString()} + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + return &nilAny{} + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + return &trueAny{} + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + return &falseAny{} + case '{': + return iter.readObjectAny() + case '[': + return iter.readArrayAny() + case '-': + return iter.readNumberAny(false) + case 0: + return &invalidAny{baseAny{}, errors.New("input is empty")} + default: + return iter.readNumberAny(true) + } +} + +func (iter *Iterator) readNumberAny(positive bool) Any { + iter.startCapture(iter.head - 1) + iter.skipNumber() + lazyBuf := iter.stopCapture() + return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readObjectAny() Any { + iter.startCapture(iter.head - 1) + iter.skipObject() + lazyBuf := iter.stopCapture() + return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func (iter *Iterator) readArrayAny() Any { + iter.startCapture(iter.head - 1) + iter.skipArray() + lazyBuf := iter.stopCapture() + return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil} +} + +func locateObjectField(iter *Iterator, target string) []byte { + var found []byte + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + if field == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + return true + }) + return found +} + +func locateArrayElement(iter *Iterator, target int) []byte { + var found []byte + n := 0 + iter.ReadArrayCB(func(iter *Iterator) bool { + if n == target { + found = iter.SkipAndReturnBytes() + return false + } + iter.Skip() + n++ + return true + }) + return found +} + +func locatePath(iter *Iterator, path []interface{}) Any { + for i, pathKeyObj := range path { + switch pathKey := pathKeyObj.(type) { + case string: + valueBytes := locateObjectField(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int: + valueBytes := locateArrayElement(iter, pathKey) + if valueBytes == nil { + return newInvalidAny(path[i:]) + } + iter.ResetBytes(valueBytes) + case int32: + if '*' == pathKey { + return iter.readAny().Get(path[i:]...) + } + return newInvalidAny(path[i:]) + default: + return newInvalidAny(path[i:]) + } + } + if iter.Error != nil && iter.Error != io.EOF { + return &invalidAny{baseAny{}, iter.Error} + } + return iter.readAny() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_array.go b/vendor/github.com/json-iterator/go/feature_any_array.go new file mode 100644 index 000000000..0449e9aa4 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_array.go @@ -0,0 +1,278 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type arrayLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *arrayLazyAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayLazyAny) MustBeValid() Any { + return any +} + +func (any *arrayLazyAny) LastError() error { + return any.err +} + +func (any *arrayLazyAny) ToBool() bool { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.ReadArray() +} + +func (any *arrayLazyAny) ToInt() int { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt32() int32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToInt64() int64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint() uint { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint32() uint32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToUint64() uint64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat32() float32 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToFloat64() float64 { + if any.ToBool() { + return 1 + } + return 0 +} + +func (any *arrayLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *arrayLazyAny) ToVal(val interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(val) +} + +func (any *arrayLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateArrayElement(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + arr := make([]Any, 0) + iter.ReadArrayCB(func(iter *Iterator) bool { + found := iter.readAny().Get(path[1:]...) + if found.ValueType() != InvalidValue { + arr = append(arr, found) + } + return true + }) + return wrapArray(arr) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadArrayCB(func(iter *Iterator) bool { + size++ + iter.Skip() + return true + }) + return size +} + +func (any *arrayLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *arrayLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type arrayAny struct { + baseAny + val reflect.Value +} + +func wrapArray(val interface{}) *arrayAny { + return &arrayAny{baseAny{}, reflect.ValueOf(val)} +} + +func (any *arrayAny) ValueType() ValueType { + return ArrayValue +} + +func (any *arrayAny) MustBeValid() Any { + return any +} + +func (any *arrayAny) LastError() error { + return nil +} + +func (any *arrayAny) ToBool() bool { + return any.val.Len() != 0 +} + +func (any *arrayAny) ToInt() int { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt32() int32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToInt64() int64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint() uint { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint32() uint32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToUint64() uint64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat32() float32 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToFloat64() float64 { + if any.val.Len() == 0 { + return 0 + } + return 1 +} + +func (any *arrayAny) ToString() string { + str, _ := MarshalToString(any.val.Interface()) + return str +} + +func (any *arrayAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int: + if firstPath < 0 || firstPath >= any.val.Len() { + return newInvalidAny(path) + } + return Wrap(any.val.Index(firstPath).Interface()) + case int32: + if '*' == firstPath { + mappedAll := make([]Any, 0) + for i := 0; i < any.val.Len(); i++ { + mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll = append(mappedAll, mapped) + } + } + return wrapArray(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *arrayAny) Size() int { + return any.val.Len() +} + +func (any *arrayAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *arrayAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_bool.go b/vendor/github.com/json-iterator/go/feature_any_bool.go new file mode 100644 index 000000000..9452324af --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_bool.go @@ -0,0 +1,137 @@ +package jsoniter + +type trueAny struct { + baseAny +} + +func (any *trueAny) LastError() error { + return nil +} + +func (any *trueAny) ToBool() bool { + return true +} + +func (any *trueAny) ToInt() int { + return 1 +} + +func (any *trueAny) ToInt32() int32 { + return 1 +} + +func (any *trueAny) ToInt64() int64 { + return 1 +} + +func (any *trueAny) ToUint() uint { + return 1 +} + +func (any *trueAny) ToUint32() uint32 { + return 1 +} + +func (any *trueAny) ToUint64() uint64 { + return 1 +} + +func (any *trueAny) ToFloat32() float32 { + return 1 +} + +func (any *trueAny) ToFloat64() float64 { + return 1 +} + +func (any *trueAny) ToString() string { + return "true" +} + +func (any *trueAny) WriteTo(stream *Stream) { + stream.WriteTrue() +} + +func (any *trueAny) Parse() *Iterator { + return nil +} + +func (any *trueAny) GetInterface() interface{} { + return true +} + +func (any *trueAny) ValueType() ValueType { + return BoolValue +} + +func (any *trueAny) MustBeValid() Any { + return any +} + +type falseAny struct { + baseAny +} + +func (any *falseAny) LastError() error { + return nil +} + +func (any *falseAny) ToBool() bool { + return false +} + +func (any *falseAny) ToInt() int { + return 0 +} + +func (any *falseAny) ToInt32() int32 { + return 0 +} + +func (any *falseAny) ToInt64() int64 { + return 0 +} + +func (any *falseAny) ToUint() uint { + return 0 +} + +func (any *falseAny) ToUint32() uint32 { + return 0 +} + +func (any *falseAny) ToUint64() uint64 { + return 0 +} + +func (any *falseAny) ToFloat32() float32 { + return 0 +} + +func (any *falseAny) ToFloat64() float64 { + return 0 +} + +func (any *falseAny) ToString() string { + return "false" +} + +func (any *falseAny) WriteTo(stream *Stream) { + stream.WriteFalse() +} + +func (any *falseAny) Parse() *Iterator { + return nil +} + +func (any *falseAny) GetInterface() interface{} { + return false +} + +func (any *falseAny) ValueType() ValueType { + return BoolValue +} + +func (any *falseAny) MustBeValid() Any { + return any +} diff --git a/vendor/github.com/json-iterator/go/feature_any_float.go b/vendor/github.com/json-iterator/go/feature_any_float.go new file mode 100644 index 000000000..35fdb0949 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_float.go @@ -0,0 +1,83 @@ +package jsoniter + +import ( + "strconv" +) + +type floatAny struct { + baseAny + val float64 +} + +func (any *floatAny) Parse() *Iterator { + return nil +} + +func (any *floatAny) ValueType() ValueType { + return NumberValue +} + +func (any *floatAny) MustBeValid() Any { + return any +} + +func (any *floatAny) LastError() error { + return nil +} + +func (any *floatAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *floatAny) ToInt() int { + return int(any.val) +} + +func (any *floatAny) ToInt32() int32 { + return int32(any.val) +} + +func (any *floatAny) ToInt64() int64 { + return int64(any.val) +} + +func (any *floatAny) ToUint() uint { + if any.val > 0 { + return uint(any.val) + } + return 0 +} + +func (any *floatAny) ToUint32() uint32 { + if any.val > 0 { + return uint32(any.val) + } + return 0 +} + +func (any *floatAny) ToUint64() uint64 { + if any.val > 0 { + return uint64(any.val) + } + return 0 +} + +func (any *floatAny) ToFloat32() float32 { + return float32(any.val) +} + +func (any *floatAny) ToFloat64() float64 { + return any.val +} + +func (any *floatAny) ToString() string { + return strconv.FormatFloat(any.val, 'E', -1, 64) +} + +func (any *floatAny) WriteTo(stream *Stream) { + stream.WriteFloat64(any.val) +} + +func (any *floatAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_int32.go b/vendor/github.com/json-iterator/go/feature_any_int32.go new file mode 100644 index 000000000..1b56f3991 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_int32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int32Any struct { + baseAny + val int32 +} + +func (any *int32Any) LastError() error { + return nil +} + +func (any *int32Any) ValueType() ValueType { + return NumberValue +} + +func (any *int32Any) MustBeValid() Any { + return any +} + +func (any *int32Any) ToBool() bool { + return any.val != 0 +} + +func (any *int32Any) ToInt() int { + return int(any.val) +} + +func (any *int32Any) ToInt32() int32 { + return any.val +} + +func (any *int32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *int32Any) ToUint() uint { + return uint(any.val) +} + +func (any *int32Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *int32Any) WriteTo(stream *Stream) { + stream.WriteInt32(any.val) +} + +func (any *int32Any) Parse() *Iterator { + return nil +} + +func (any *int32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_int64.go b/vendor/github.com/json-iterator/go/feature_any_int64.go new file mode 100644 index 000000000..c440d72b6 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_int64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type int64Any struct { + baseAny + val int64 +} + +func (any *int64Any) LastError() error { + return nil +} + +func (any *int64Any) ValueType() ValueType { + return NumberValue +} + +func (any *int64Any) MustBeValid() Any { + return any +} + +func (any *int64Any) ToBool() bool { + return any.val != 0 +} + +func (any *int64Any) ToInt() int { + return int(any.val) +} + +func (any *int64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *int64Any) ToInt64() int64 { + return any.val +} + +func (any *int64Any) ToUint() uint { + return uint(any.val) +} + +func (any *int64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *int64Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *int64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *int64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *int64Any) ToString() string { + return strconv.FormatInt(any.val, 10) +} + +func (any *int64Any) WriteTo(stream *Stream) { + stream.WriteInt64(any.val) +} + +func (any *int64Any) Parse() *Iterator { + return nil +} + +func (any *int64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_invalid.go b/vendor/github.com/json-iterator/go/feature_any_invalid.go new file mode 100644 index 000000000..1d859eac3 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_invalid.go @@ -0,0 +1,82 @@ +package jsoniter + +import "fmt" + +type invalidAny struct { + baseAny + err error +} + +func newInvalidAny(path []interface{}) *invalidAny { + return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)} +} + +func (any *invalidAny) LastError() error { + return any.err +} + +func (any *invalidAny) ValueType() ValueType { + return InvalidValue +} + +func (any *invalidAny) MustBeValid() Any { + panic(any.err) +} + +func (any *invalidAny) ToBool() bool { + return false +} + +func (any *invalidAny) ToInt() int { + return 0 +} + +func (any *invalidAny) ToInt32() int32 { + return 0 +} + +func (any *invalidAny) ToInt64() int64 { + return 0 +} + +func (any *invalidAny) ToUint() uint { + return 0 +} + +func (any *invalidAny) ToUint32() uint32 { + return 0 +} + +func (any *invalidAny) ToUint64() uint64 { + return 0 +} + +func (any *invalidAny) ToFloat32() float32 { + return 0 +} + +func (any *invalidAny) ToFloat64() float64 { + return 0 +} + +func (any *invalidAny) ToString() string { + return "" +} + +func (any *invalidAny) WriteTo(stream *Stream) { +} + +func (any *invalidAny) Get(path ...interface{}) Any { + if any.err == nil { + return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)} + } + return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)} +} + +func (any *invalidAny) Parse() *Iterator { + return nil +} + +func (any *invalidAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/feature_any_nil.go b/vendor/github.com/json-iterator/go/feature_any_nil.go new file mode 100644 index 000000000..d04cb54c1 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_nil.go @@ -0,0 +1,69 @@ +package jsoniter + +type nilAny struct { + baseAny +} + +func (any *nilAny) LastError() error { + return nil +} + +func (any *nilAny) ValueType() ValueType { + return NilValue +} + +func (any *nilAny) MustBeValid() Any { + return any +} + +func (any *nilAny) ToBool() bool { + return false +} + +func (any *nilAny) ToInt() int { + return 0 +} + +func (any *nilAny) ToInt32() int32 { + return 0 +} + +func (any *nilAny) ToInt64() int64 { + return 0 +} + +func (any *nilAny) ToUint() uint { + return 0 +} + +func (any *nilAny) ToUint32() uint32 { + return 0 +} + +func (any *nilAny) ToUint64() uint64 { + return 0 +} + +func (any *nilAny) ToFloat32() float32 { + return 0 +} + +func (any *nilAny) ToFloat64() float64 { + return 0 +} + +func (any *nilAny) ToString() string { + return "" +} + +func (any *nilAny) WriteTo(stream *Stream) { + stream.WriteNil() +} + +func (any *nilAny) Parse() *Iterator { + return nil +} + +func (any *nilAny) GetInterface() interface{} { + return nil +} diff --git a/vendor/github.com/json-iterator/go/feature_any_number.go b/vendor/github.com/json-iterator/go/feature_any_number.go new file mode 100644 index 000000000..4e1c27641 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_number.go @@ -0,0 +1,104 @@ +package jsoniter + +import "unsafe" + +type numberLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *numberLazyAny) ValueType() ValueType { + return NumberValue +} + +func (any *numberLazyAny) MustBeValid() Any { + return any +} + +func (any *numberLazyAny) LastError() error { + return any.err +} + +func (any *numberLazyAny) ToBool() bool { + return any.ToFloat64() != 0 +} + +func (any *numberLazyAny) ToInt() int { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToInt32() int32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt32() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToInt64() int64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadInt64() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToUint() uint { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToUint32() uint32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint32() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToUint64() uint64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadUint64() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToFloat32() float32 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat32() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToFloat64() float64 { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + val := iter.ReadFloat64() + any.err = iter.Error + return val +} + +func (any *numberLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *numberLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *numberLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_object.go b/vendor/github.com/json-iterator/go/feature_any_object.go new file mode 100644 index 000000000..c44ef5c98 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_object.go @@ -0,0 +1,374 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +type objectLazyAny struct { + baseAny + cfg *frozenConfig + buf []byte + err error +} + +func (any *objectLazyAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectLazyAny) MustBeValid() Any { + return any +} + +func (any *objectLazyAny) LastError() error { + return any.err +} + +func (any *objectLazyAny) ToBool() bool { + return true +} + +func (any *objectLazyAny) ToInt() int { + return 0 +} + +func (any *objectLazyAny) ToInt32() int32 { + return 0 +} + +func (any *objectLazyAny) ToInt64() int64 { + return 0 +} + +func (any *objectLazyAny) ToUint() uint { + return 0 +} + +func (any *objectLazyAny) ToUint32() uint32 { + return 0 +} + +func (any *objectLazyAny) ToUint64() uint64 { + return 0 +} + +func (any *objectLazyAny) ToFloat32() float32 { + return 0 +} + +func (any *objectLazyAny) ToFloat64() float64 { + return 0 +} + +func (any *objectLazyAny) ToString() string { + return *(*string)(unsafe.Pointer(&any.buf)) +} + +func (any *objectLazyAny) ToVal(obj interface{}) { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadVal(obj) +} + +func (any *objectLazyAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + valueBytes := locateObjectField(iter, firstPath) + if valueBytes == nil { + return newInvalidAny(path) + } + iter.ResetBytes(valueBytes) + return locatePath(iter, path[1:]) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + mapped := locatePath(iter, path[1:]) + if mapped.ValueType() != InvalidValue { + mappedAll[field] = mapped + } + return true + }) + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectLazyAny) Keys() []string { + keys := []string{} + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadMapCB(func(iter *Iterator, field string) bool { + iter.Skip() + keys = append(keys, field) + return true + }) + return keys +} + +func (any *objectLazyAny) Size() int { + size := 0 + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + size++ + return true + }) + return size +} + +func (any *objectLazyAny) WriteTo(stream *Stream) { + stream.Write(any.buf) +} + +func (any *objectLazyAny) GetInterface() interface{} { + iter := any.cfg.BorrowIterator(any.buf) + defer any.cfg.ReturnIterator(iter) + return iter.Read() +} + +type objectAny struct { + baseAny + err error + val reflect.Value +} + +func wrapStruct(val interface{}) *objectAny { + return &objectAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *objectAny) ValueType() ValueType { + return ObjectValue +} + +func (any *objectAny) MustBeValid() Any { + return any +} + +func (any *objectAny) Parse() *Iterator { + return nil +} + +func (any *objectAny) LastError() error { + return any.err +} + +func (any *objectAny) ToBool() bool { + return any.val.NumField() != 0 +} + +func (any *objectAny) ToInt() int { + return 0 +} + +func (any *objectAny) ToInt32() int32 { + return 0 +} + +func (any *objectAny) ToInt64() int64 { + return 0 +} + +func (any *objectAny) ToUint() uint { + return 0 +} + +func (any *objectAny) ToUint32() uint32 { + return 0 +} + +func (any *objectAny) ToUint64() uint64 { + return 0 +} + +func (any *objectAny) ToFloat32() float32 { + return 0 +} + +func (any *objectAny) ToFloat64() float64 { + return 0 +} + +func (any *objectAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *objectAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case string: + field := any.val.FieldByName(firstPath) + if !field.IsValid() { + return newInvalidAny(path) + } + return Wrap(field.Interface()) + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for i := 0; i < any.val.NumField(); i++ { + field := any.val.Field(i) + if field.CanInterface() { + mapped := Wrap(field.Interface()).Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[any.val.Type().Field(i).Name] = mapped + } + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + return newInvalidAny(path) + } +} + +func (any *objectAny) Keys() []string { + keys := make([]string, 0, any.val.NumField()) + for i := 0; i < any.val.NumField(); i++ { + keys = append(keys, any.val.Type().Field(i).Name) + } + return keys +} + +func (any *objectAny) Size() int { + return any.val.NumField() +} + +func (any *objectAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *objectAny) GetInterface() interface{} { + return any.val.Interface() +} + +type mapAny struct { + baseAny + err error + val reflect.Value +} + +func wrapMap(val interface{}) *mapAny { + return &mapAny{baseAny{}, nil, reflect.ValueOf(val)} +} + +func (any *mapAny) ValueType() ValueType { + return ObjectValue +} + +func (any *mapAny) MustBeValid() Any { + return any +} + +func (any *mapAny) Parse() *Iterator { + return nil +} + +func (any *mapAny) LastError() error { + return any.err +} + +func (any *mapAny) ToBool() bool { + return true +} + +func (any *mapAny) ToInt() int { + return 0 +} + +func (any *mapAny) ToInt32() int32 { + return 0 +} + +func (any *mapAny) ToInt64() int64 { + return 0 +} + +func (any *mapAny) ToUint() uint { + return 0 +} + +func (any *mapAny) ToUint32() uint32 { + return 0 +} + +func (any *mapAny) ToUint64() uint64 { + return 0 +} + +func (any *mapAny) ToFloat32() float32 { + return 0 +} + +func (any *mapAny) ToFloat64() float64 { + return 0 +} + +func (any *mapAny) ToString() string { + str, err := MarshalToString(any.val.Interface()) + any.err = err + return str +} + +func (any *mapAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + switch firstPath := path[0].(type) { + case int32: + if '*' == firstPath { + mappedAll := map[string]Any{} + for _, key := range any.val.MapKeys() { + keyAsStr := key.String() + element := Wrap(any.val.MapIndex(key).Interface()) + mapped := element.Get(path[1:]...) + if mapped.ValueType() != InvalidValue { + mappedAll[keyAsStr] = mapped + } + } + return wrapMap(mappedAll) + } + return newInvalidAny(path) + default: + value := any.val.MapIndex(reflect.ValueOf(firstPath)) + if !value.IsValid() { + return newInvalidAny(path) + } + return Wrap(value.Interface()) + } +} + +func (any *mapAny) Keys() []string { + keys := make([]string, 0, any.val.Len()) + for _, key := range any.val.MapKeys() { + keys = append(keys, key.String()) + } + return keys +} + +func (any *mapAny) Size() int { + return any.val.Len() +} + +func (any *mapAny) WriteTo(stream *Stream) { + stream.WriteVal(any.val) +} + +func (any *mapAny) GetInterface() interface{} { + return any.val.Interface() +} diff --git a/vendor/github.com/json-iterator/go/feature_any_string.go b/vendor/github.com/json-iterator/go/feature_any_string.go new file mode 100644 index 000000000..abf060bd5 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_string.go @@ -0,0 +1,166 @@ +package jsoniter + +import ( + "fmt" + "strconv" +) + +type stringAny struct { + baseAny + val string +} + +func (any *stringAny) Get(path ...interface{}) Any { + if len(path) == 0 { + return any + } + return &invalidAny{baseAny{}, fmt.Errorf("Get %v from simple value", path)} +} + +func (any *stringAny) Parse() *Iterator { + return nil +} + +func (any *stringAny) ValueType() ValueType { + return StringValue +} + +func (any *stringAny) MustBeValid() Any { + return any +} + +func (any *stringAny) LastError() error { + return nil +} + +func (any *stringAny) ToBool() bool { + str := any.ToString() + if str == "0" { + return false + } + for _, c := range str { + switch c { + case ' ', '\n', '\r', '\t': + default: + return true + } + } + return false +} + +func (any *stringAny) ToInt() int { + return int(any.ToInt64()) + +} + +func (any *stringAny) ToInt32() int32 { + return int32(any.ToInt64()) +} + +func (any *stringAny) ToInt64() int64 { + if any.val == "" { + return 0 + } + + flag := 1 + startPos := 0 + endPos := 0 + if any.val[0] == '+' || any.val[0] == '-' { + startPos = 1 + } + + if any.val[0] == '-' { + flag = -1 + } + + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64) + return int64(flag) * parsed +} + +func (any *stringAny) ToUint() uint { + return uint(any.ToUint64()) +} + +func (any *stringAny) ToUint32() uint32 { + return uint32(any.ToUint64()) +} + +func (any *stringAny) ToUint64() uint64 { + if any.val == "" { + return 0 + } + + startPos := 0 + endPos := 0 + + if any.val[0] == '-' { + return 0 + } + if any.val[0] == '+' { + startPos = 1 + } + + for i := startPos; i < len(any.val); i++ { + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + break + } + } + parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64) + return parsed +} + +func (any *stringAny) ToFloat32() float32 { + return float32(any.ToFloat64()) +} + +func (any *stringAny) ToFloat64() float64 { + if len(any.val) == 0 { + return 0 + } + + // first char invalid + if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') { + return 0 + } + + // extract valid num expression from string + // eg 123true => 123, -12.12xxa => -12.12 + endPos := 1 + for i := 1; i < len(any.val); i++ { + if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' { + endPos = i + 1 + continue + } + + // end position is the first char which is not digit + if any.val[i] >= '0' && any.val[i] <= '9' { + endPos = i + 1 + } else { + endPos = i + break + } + } + parsed, _ := strconv.ParseFloat(any.val[:endPos], 64) + return parsed +} + +func (any *stringAny) ToString() string { + return any.val +} + +func (any *stringAny) WriteTo(stream *Stream) { + stream.WriteString(any.val) +} + +func (any *stringAny) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_uint32.go b/vendor/github.com/json-iterator/go/feature_any_uint32.go new file mode 100644 index 000000000..656bbd33d --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_uint32.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint32Any struct { + baseAny + val uint32 +} + +func (any *uint32Any) LastError() error { + return nil +} + +func (any *uint32Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint32Any) MustBeValid() Any { + return any +} + +func (any *uint32Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint32Any) ToInt() int { + return int(any.val) +} + +func (any *uint32Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint32Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint32Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint32Any) ToUint32() uint32 { + return any.val +} + +func (any *uint32Any) ToUint64() uint64 { + return uint64(any.val) +} + +func (any *uint32Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint32Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint32Any) ToString() string { + return strconv.FormatInt(int64(any.val), 10) +} + +func (any *uint32Any) WriteTo(stream *Stream) { + stream.WriteUint32(any.val) +} + +func (any *uint32Any) Parse() *Iterator { + return nil +} + +func (any *uint32Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_any_uint64.go b/vendor/github.com/json-iterator/go/feature_any_uint64.go new file mode 100644 index 000000000..7df2fce33 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_any_uint64.go @@ -0,0 +1,74 @@ +package jsoniter + +import ( + "strconv" +) + +type uint64Any struct { + baseAny + val uint64 +} + +func (any *uint64Any) LastError() error { + return nil +} + +func (any *uint64Any) ValueType() ValueType { + return NumberValue +} + +func (any *uint64Any) MustBeValid() Any { + return any +} + +func (any *uint64Any) ToBool() bool { + return any.val != 0 +} + +func (any *uint64Any) ToInt() int { + return int(any.val) +} + +func (any *uint64Any) ToInt32() int32 { + return int32(any.val) +} + +func (any *uint64Any) ToInt64() int64 { + return int64(any.val) +} + +func (any *uint64Any) ToUint() uint { + return uint(any.val) +} + +func (any *uint64Any) ToUint32() uint32 { + return uint32(any.val) +} + +func (any *uint64Any) ToUint64() uint64 { + return any.val +} + +func (any *uint64Any) ToFloat32() float32 { + return float32(any.val) +} + +func (any *uint64Any) ToFloat64() float64 { + return float64(any.val) +} + +func (any *uint64Any) ToString() string { + return strconv.FormatUint(any.val, 10) +} + +func (any *uint64Any) WriteTo(stream *Stream) { + stream.WriteUint64(any.val) +} + +func (any *uint64Any) Parse() *Iterator { + return nil +} + +func (any *uint64Any) GetInterface() interface{} { + return any.val +} diff --git a/vendor/github.com/json-iterator/go/feature_config.go b/vendor/github.com/json-iterator/go/feature_config.go new file mode 100644 index 000000000..78a2ce1a5 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_config.go @@ -0,0 +1,348 @@ +package jsoniter + +import ( + "encoding/json" + "errors" + "io" + "reflect" + "sync/atomic" + "unsafe" +) + +// Config customize how the API should behave. +// The API is created from Config by Froze. +type Config struct { + IndentionStep int + MarshalFloatWith6Digits bool + EscapeHTML bool + SortMapKeys bool + UseNumber bool + TagKey string + ValidateJsonRawMessage bool + ObjectFieldMustBeSimpleString bool +} + +type frozenConfig struct { + configBeforeFrozen Config + sortMapKeys bool + indentionStep int + objectFieldMustBeSimpleString bool + decoderCache unsafe.Pointer + encoderCache unsafe.Pointer + extensions []Extension + streamPool chan *Stream + iteratorPool chan *Iterator +} + +// API the public interface of this package. +// Primary Marshal and Unmarshal. +type API interface { + IteratorPool + StreamPool + MarshalToString(v interface{}) (string, error) + Marshal(v interface{}) ([]byte, error) + MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) + UnmarshalFromString(str string, v interface{}) error + Unmarshal(data []byte, v interface{}) error + Get(data []byte, path ...interface{}) Any + NewEncoder(writer io.Writer) *Encoder + NewDecoder(reader io.Reader) *Decoder + Valid(data []byte) bool + RegisterExtension(extension Extension) +} + +// ConfigDefault the default API +var ConfigDefault = Config{ + EscapeHTML: true, +}.Froze() + +// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior +var ConfigCompatibleWithStandardLibrary = Config{ + EscapeHTML: true, + SortMapKeys: true, + ValidateJsonRawMessage: true, +}.Froze() + +// ConfigFastest marshals float with only 6 digits precision +var ConfigFastest = Config{ + EscapeHTML: false, + MarshalFloatWith6Digits: true, // will lose precession + ObjectFieldMustBeSimpleString: true, // do not unescape object field +}.Froze() + +// Froze forge API from config +func (cfg Config) Froze() API { + // TODO: cache frozen config + frozenConfig := &frozenConfig{ + sortMapKeys: cfg.SortMapKeys, + indentionStep: cfg.IndentionStep, + objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString, + streamPool: make(chan *Stream, 16), + iteratorPool: make(chan *Iterator, 16), + } + atomic.StorePointer(&frozenConfig.decoderCache, unsafe.Pointer(&map[string]ValDecoder{})) + atomic.StorePointer(&frozenConfig.encoderCache, unsafe.Pointer(&map[string]ValEncoder{})) + if cfg.MarshalFloatWith6Digits { + frozenConfig.marshalFloatWith6Digits() + } + if cfg.EscapeHTML { + frozenConfig.escapeHTML() + } + if cfg.UseNumber { + frozenConfig.useNumber() + } + if cfg.ValidateJsonRawMessage { + frozenConfig.validateJsonRawMessage() + } + frozenConfig.configBeforeFrozen = cfg + return frozenConfig +} + +func (cfg *frozenConfig) validateJsonRawMessage() { + encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) { + rawMessage := *(*json.RawMessage)(ptr) + iter := cfg.BorrowIterator([]byte(rawMessage)) + iter.Read() + if iter.Error != nil { + stream.WriteRaw("null") + } else { + cfg.ReturnIterator(iter) + stream.WriteRaw(string(rawMessage)) + } + }, func(ptr unsafe.Pointer) bool { + return false + }} + cfg.addEncoderToCache(reflect.TypeOf((*json.RawMessage)(nil)).Elem(), encoder) + cfg.addEncoderToCache(reflect.TypeOf((*RawMessage)(nil)).Elem(), encoder) +} + +func (cfg *frozenConfig) useNumber() { + cfg.addDecoderToCache(reflect.TypeOf((*interface{})(nil)).Elem(), &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) { + if iter.WhatIsNext() == NumberValue { + *((*interface{})(ptr)) = json.Number(iter.readNumberAsString()) + } else { + *((*interface{})(ptr)) = iter.Read() + } + }}) +} +func (cfg *frozenConfig) getTagKey() string { + tagKey := cfg.configBeforeFrozen.TagKey + if tagKey == "" { + return "json" + } + return tagKey +} + +func (cfg *frozenConfig) RegisterExtension(extension Extension) { + cfg.extensions = append(cfg.extensions, extension) +} + +type lossyFloat32Encoder struct { +} + +func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32Lossy(*((*float32)(ptr))) +} + +func (encoder *lossyFloat32Encoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type lossyFloat64Encoder struct { +} + +func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64Lossy(*((*float64)(ptr))) +} + +func (encoder *lossyFloat64Encoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +// EnableLossyFloatMarshalling keeps 10**(-6) precision +// for float variables for better performance. +func (cfg *frozenConfig) marshalFloatWith6Digits() { + // for better performance + cfg.addEncoderToCache(reflect.TypeOf((*float32)(nil)).Elem(), &lossyFloat32Encoder{}) + cfg.addEncoderToCache(reflect.TypeOf((*float64)(nil)).Elem(), &lossyFloat64Encoder{}) +} + +type htmlEscapedStringEncoder struct { +} + +func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteStringWithHTMLEscaped(str) +} + +func (encoder *htmlEscapedStringEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +func (cfg *frozenConfig) escapeHTML() { + cfg.addEncoderToCache(reflect.TypeOf((*string)(nil)).Elem(), &htmlEscapedStringEncoder{}) +} + +func (cfg *frozenConfig) addDecoderToCache(cacheKey reflect.Type, decoder ValDecoder) { + done := false + for !done { + ptr := atomic.LoadPointer(&cfg.decoderCache) + cache := *(*map[reflect.Type]ValDecoder)(ptr) + copied := map[reflect.Type]ValDecoder{} + for k, v := range cache { + copied[k] = v + } + copied[cacheKey] = decoder + done = atomic.CompareAndSwapPointer(&cfg.decoderCache, ptr, unsafe.Pointer(&copied)) + } +} + +func (cfg *frozenConfig) addEncoderToCache(cacheKey reflect.Type, encoder ValEncoder) { + done := false + for !done { + ptr := atomic.LoadPointer(&cfg.encoderCache) + cache := *(*map[reflect.Type]ValEncoder)(ptr) + copied := map[reflect.Type]ValEncoder{} + for k, v := range cache { + copied[k] = v + } + copied[cacheKey] = encoder + done = atomic.CompareAndSwapPointer(&cfg.encoderCache, ptr, unsafe.Pointer(&copied)) + } +} + +func (cfg *frozenConfig) getDecoderFromCache(cacheKey reflect.Type) ValDecoder { + ptr := atomic.LoadPointer(&cfg.decoderCache) + cache := *(*map[reflect.Type]ValDecoder)(ptr) + return cache[cacheKey] +} + +func (cfg *frozenConfig) getEncoderFromCache(cacheKey reflect.Type) ValEncoder { + ptr := atomic.LoadPointer(&cfg.encoderCache) + cache := *(*map[reflect.Type]ValEncoder)(ptr) + return cache[cacheKey] +} + +func (cfg *frozenConfig) cleanDecoders() { + typeDecoders = map[string]ValDecoder{} + fieldDecoders = map[string]ValDecoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) cleanEncoders() { + typeEncoders = map[string]ValEncoder{} + fieldEncoders = map[string]ValEncoder{} + *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig)) +} + +func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return "", stream.Error + } + return string(stream.Buffer()), nil +} + +func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) { + stream := cfg.BorrowStream(nil) + defer cfg.ReturnStream(stream) + stream.WriteVal(v) + if stream.Error != nil { + return nil, stream.Error + } + result := stream.Buffer() + copied := make([]byte, len(result)) + copy(copied, result) + return copied, nil +} + +func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + if prefix != "" { + panic("prefix is not supported") + } + for _, r := range indent { + if r != ' ' { + panic("indent can only be space") + } + } + newCfg := cfg.configBeforeFrozen + newCfg.IndentionStep = len(indent) + return newCfg.Froze().Marshal(v) +} + +func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error { + data := []byte(str) + data = data[:lastNotSpacePos(data)] + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.ReadVal(v) + if iter.head == iter.tail { + iter.loadMore() + } + if iter.Error == io.EOF { + return nil + } + if iter.Error == nil { + iter.ReportError("UnmarshalFromString", "there are bytes left after unmarshal") + } + return iter.Error +} + +func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + return locatePath(iter, path) +} + +func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error { + data = data[:lastNotSpacePos(data)] + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + typ := reflect.TypeOf(v) + if typ.Kind() != reflect.Ptr { + // return non-pointer error + return errors.New("the second param must be ptr type") + } + iter.ReadVal(v) + if iter.head == iter.tail { + iter.loadMore() + } + if iter.Error == io.EOF { + return nil + } + if iter.Error == nil { + iter.ReportError("Unmarshal", "there are bytes left after unmarshal") + } + return iter.Error +} + +func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder { + stream := NewStream(cfg, writer, 512) + return &Encoder{stream} +} + +func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder { + iter := Parse(cfg, reader, 512) + return &Decoder{iter} +} + +func (cfg *frozenConfig) Valid(data []byte) bool { + iter := cfg.BorrowIterator(data) + defer cfg.ReturnIterator(iter) + iter.Skip() + return iter.Error == nil +} diff --git a/vendor/github.com/json-iterator/go/feature_iter.go b/vendor/github.com/json-iterator/go/feature_iter.go new file mode 100644 index 000000000..95ae54fbf --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter.go @@ -0,0 +1,322 @@ +package jsoniter + +import ( + "encoding/json" + "fmt" + "io" +) + +// ValueType the type for JSON element +type ValueType int + +const ( + // InvalidValue invalid JSON element + InvalidValue ValueType = iota + // StringValue JSON element "string" + StringValue + // NumberValue JSON element 100 or 0.10 + NumberValue + // NilValue JSON element null + NilValue + // BoolValue JSON element true or false + BoolValue + // ArrayValue JSON element [] + ArrayValue + // ObjectValue JSON element {} + ObjectValue +) + +var hexDigits []byte +var valueTypes []ValueType + +func init() { + hexDigits = make([]byte, 256) + for i := 0; i < len(hexDigits); i++ { + hexDigits[i] = 255 + } + for i := '0'; i <= '9'; i++ { + hexDigits[i] = byte(i - '0') + } + for i := 'a'; i <= 'f'; i++ { + hexDigits[i] = byte((i - 'a') + 10) + } + for i := 'A'; i <= 'F'; i++ { + hexDigits[i] = byte((i - 'A') + 10) + } + valueTypes = make([]ValueType, 256) + for i := 0; i < len(valueTypes); i++ { + valueTypes[i] = InvalidValue + } + valueTypes['"'] = StringValue + valueTypes['-'] = NumberValue + valueTypes['0'] = NumberValue + valueTypes['1'] = NumberValue + valueTypes['2'] = NumberValue + valueTypes['3'] = NumberValue + valueTypes['4'] = NumberValue + valueTypes['5'] = NumberValue + valueTypes['6'] = NumberValue + valueTypes['7'] = NumberValue + valueTypes['8'] = NumberValue + valueTypes['9'] = NumberValue + valueTypes['t'] = BoolValue + valueTypes['f'] = BoolValue + valueTypes['n'] = NilValue + valueTypes['['] = ArrayValue + valueTypes['{'] = ObjectValue +} + +// Iterator is a io.Reader like object, with JSON specific read functions. +// Error is not returned as return value, but stored as Error member on this iterator instance. +type Iterator struct { + cfg *frozenConfig + reader io.Reader + buf []byte + head int + tail int + captureStartedAt int + captured []byte + Error error + Attachment interface{} // open for customized decoder +} + +// NewIterator creates an empty Iterator instance +func NewIterator(cfg API) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: nil, + head: 0, + tail: 0, + } +} + +// Parse creates an Iterator instance from io.Reader +func Parse(cfg API, reader io.Reader, bufSize int) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: reader, + buf: make([]byte, bufSize), + head: 0, + tail: 0, + } +} + +// ParseBytes creates an Iterator instance from byte array +func ParseBytes(cfg API, input []byte) *Iterator { + return &Iterator{ + cfg: cfg.(*frozenConfig), + reader: nil, + buf: input, + head: 0, + tail: len(input), + } +} + +// ParseString creates an Iterator instance from string +func ParseString(cfg API, input string) *Iterator { + return ParseBytes(cfg, []byte(input)) +} + +// Pool returns a pool can provide more iterator with same configuration +func (iter *Iterator) Pool() IteratorPool { + return iter.cfg +} + +// Reset reuse iterator instance by specifying another reader +func (iter *Iterator) Reset(reader io.Reader) *Iterator { + iter.reader = reader + iter.head = 0 + iter.tail = 0 + return iter +} + +// ResetBytes reuse iterator instance by specifying another byte array as input +func (iter *Iterator) ResetBytes(input []byte) *Iterator { + iter.reader = nil + iter.buf = input + iter.head = 0 + iter.tail = len(input) + return iter +} + +// WhatIsNext gets ValueType of relatively next json element +func (iter *Iterator) WhatIsNext() ValueType { + valueType := valueTypes[iter.nextToken()] + iter.unreadByte() + return valueType +} + +func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + return false + } + return true +} + +func (iter *Iterator) isObjectEnd() bool { + c := iter.nextToken() + if c == ',' { + return false + } + if c == '}' { + return true + } + iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c})) + return true +} + +func (iter *Iterator) nextToken() byte { + // a variation of skip whitespaces, returning the next non-whitespace token + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\t', '\r': + continue + } + iter.head = i + 1 + return c + } + if !iter.loadMore() { + return 0 + } + } +} + +// ReportError record a error in iterator instance with current position. +func (iter *Iterator) ReportError(operation string, msg string) { + if iter.Error != nil { + if iter.Error != io.EOF { + return + } + } + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + peekEnd := iter.head + 10 + if peekEnd > iter.tail { + peekEnd = iter.tail + } + parsing := string(iter.buf[peekStart:peekEnd]) + contextStart := iter.head - 50 + if contextStart < 0 { + contextStart = 0 + } + contextEnd := iter.head + 50 + if contextEnd > iter.tail { + contextEnd = iter.tail + } + context := string(iter.buf[contextStart:contextEnd]) + iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...", + operation, msg, iter.head-peekStart, parsing, context) +} + +// CurrentBuffer gets current buffer as string for debugging purpose +func (iter *Iterator) CurrentBuffer() string { + peekStart := iter.head - 10 + if peekStart < 0 { + peekStart = 0 + } + return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head, + string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail])) +} + +func (iter *Iterator) readByte() (ret byte) { + if iter.head == iter.tail { + if iter.loadMore() { + ret = iter.buf[iter.head] + iter.head++ + return ret + } + return 0 + } + ret = iter.buf[iter.head] + iter.head++ + return ret +} + +func (iter *Iterator) loadMore() bool { + if iter.reader == nil { + if iter.Error == nil { + iter.head = iter.tail + iter.Error = io.EOF + } + return false + } + if iter.captured != nil { + iter.captured = append(iter.captured, + iter.buf[iter.captureStartedAt:iter.tail]...) + iter.captureStartedAt = 0 + } + for { + n, err := iter.reader.Read(iter.buf) + if n == 0 { + if err != nil { + if iter.Error == nil { + iter.Error = err + } + return false + } + } else { + iter.head = 0 + iter.tail = n + return true + } + } +} + +func (iter *Iterator) unreadByte() { + if iter.Error != nil { + return + } + iter.head-- + return +} + +// Read read the next JSON element as generic interface{}. +func (iter *Iterator) Read() interface{} { + valueType := iter.WhatIsNext() + switch valueType { + case StringValue: + return iter.ReadString() + case NumberValue: + if iter.cfg.configBeforeFrozen.UseNumber { + return json.Number(iter.readNumberAsString()) + } + return iter.ReadFloat64() + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + return nil + case BoolValue: + return iter.ReadBool() + case ArrayValue: + arr := []interface{}{} + iter.ReadArrayCB(func(iter *Iterator) bool { + var elem interface{} + iter.ReadVal(&elem) + arr = append(arr, elem) + return true + }) + return arr + case ObjectValue: + obj := map[string]interface{}{} + iter.ReadMapCB(func(Iter *Iterator, field string) bool { + var elem interface{} + iter.ReadVal(&elem) + obj[field] = elem + return true + }) + return obj + default: + iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType)) + return nil + } +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_array.go b/vendor/github.com/json-iterator/go/feature_iter_array.go new file mode 100644 index 000000000..6188cb457 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_array.go @@ -0,0 +1,58 @@ +package jsoniter + +// ReadArray read array element, tells if the array has more element to read. +func (iter *Iterator) ReadArray() (ret bool) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return false // null + case '[': + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + return true + } + return false + case ']': + return false + case ',': + return true + default: + iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c})) + return + } +} + +// ReadArrayCB read array with callback +func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) { + c := iter.nextToken() + if c == '[' { + c = iter.nextToken() + if c != ']' { + iter.unreadByte() + if !callback(iter) { + return false + } + c = iter.nextToken() + for c == ',' { + if !callback(iter) { + return false + } + c = iter.nextToken() + } + if c != ']' { + iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c})) + return false + } + return true + } + return true + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c})) + return false +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_float.go b/vendor/github.com/json-iterator/go/feature_iter_float.go new file mode 100644 index 000000000..4f883c095 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_float.go @@ -0,0 +1,347 @@ +package jsoniter + +import ( + "encoding/json" + "io" + "math/big" + "strconv" + "strings" + "unsafe" +) + +var floatDigits []int8 + +const invalidCharForNumber = int8(-1) +const endOfNumber = int8(-2) +const dotInNumber = int8(-3) + +func init() { + floatDigits = make([]int8, 256) + for i := 0; i < len(floatDigits); i++ { + floatDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + floatDigits[i] = i - int8('0') + } + floatDigits[','] = endOfNumber + floatDigits[']'] = endOfNumber + floatDigits['}'] = endOfNumber + floatDigits[' '] = endOfNumber + floatDigits['\t'] = endOfNumber + floatDigits['\n'] = endOfNumber + floatDigits['.'] = dotInNumber +} + +// ReadBigFloat read big.Float +func (iter *Iterator) ReadBigFloat() (ret *big.Float) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + prec := 64 + if len(str) > prec { + prec = len(str) + } + val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero) + if err != nil { + iter.Error = err + return nil + } + return val +} + +// ReadBigInt read big.Int +func (iter *Iterator) ReadBigInt() (ret *big.Int) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return nil + } + ret = big.NewInt(0) + var success bool + ret, success = ret.SetString(str, 10) + if !success { + iter.ReportError("ReadBigInt", "invalid big int") + return nil + } + return ret +} + +//ReadFloat32 read float32 +func (iter *Iterator) ReadFloat32() (ret float32) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat32() + } + iter.unreadByte() + return iter.readPositiveFloat32() +} + +func (iter *Iterator) readPositiveFloat32() (ret float32) { + value := uint64(0) + c := byte(' ') + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c = iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.ReportError("readFloat32", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat32", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat32SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat32", "leading zero is invalid") + return + } + } + value = uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat32SlowPath() + case endOfNumber: + iter.head = i + return float32(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat32SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float32(float64(value) / float64(pow10[decimalPlaces])) + } + // too many decimal places + return iter.readFloat32SlowPath() + case invalidCharForNumber: + fallthrough + case dotInNumber: + return iter.readFloat32SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat32SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + } + } + return iter.readFloat32SlowPath() +} + +func (iter *Iterator) readNumberAsString() (ret string) { + strBuf := [16]byte{} + str := strBuf[0:0] +load_loop: + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + str = append(str, c) + continue + default: + iter.head = i + break load_loop + } + } + if !iter.loadMore() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + return + } + if len(str) == 0 { + iter.ReportError("readNumberAsString", "invalid number") + } + return *(*string)(unsafe.Pointer(&str)) +} + +func (iter *Iterator) readFloat32SlowPath() (ret float32) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat32SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 32) + if err != nil { + iter.Error = err + return + } + return float32(val) +} + +// ReadFloat64 read float64 +func (iter *Iterator) ReadFloat64() (ret float64) { + c := iter.nextToken() + if c == '-' { + return -iter.readPositiveFloat64() + } + iter.unreadByte() + return iter.readPositiveFloat64() +} + +func (iter *Iterator) readPositiveFloat64() (ret float64) { + value := uint64(0) + c := byte(' ') + i := iter.head + // first char + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c = iter.buf[i] + i++ + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.ReportError("readFloat64", "empty number") + return + case dotInNumber: + iter.ReportError("readFloat64", "leading dot is invalid") + return + case 0: + if i == iter.tail { + return iter.readFloat64SlowPath() + } + c = iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.ReportError("readFloat64", "leading zero is invalid") + return + } + } + value = uint64(ind) + // chars before dot +non_decimal_loop: + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case invalidCharForNumber: + return iter.readFloat64SlowPath() + case endOfNumber: + iter.head = i + return float64(value) + case dotInNumber: + break non_decimal_loop + } + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind; + } + // chars after dot + if c == '.' { + i++ + decimalPlaces := 0 + if i == iter.tail { + return iter.readFloat64SlowPath() + } + for ; i < iter.tail; i++ { + c = iter.buf[i] + ind := floatDigits[c] + switch ind { + case endOfNumber: + if decimalPlaces > 0 && decimalPlaces < len(pow10) { + iter.head = i + return float64(value) / float64(pow10[decimalPlaces]) + } + // too many decimal places + return iter.readFloat64SlowPath() + case invalidCharForNumber: + fallthrough + case dotInNumber: + return iter.readFloat64SlowPath() + } + decimalPlaces++ + if value > uint64SafeToMultiple10 { + return iter.readFloat64SlowPath() + } + value = (value << 3) + (value << 1) + uint64(ind) + } + } + return iter.readFloat64SlowPath() +} + +func (iter *Iterator) readFloat64SlowPath() (ret float64) { + str := iter.readNumberAsString() + if iter.Error != nil && iter.Error != io.EOF { + return + } + errMsg := validateFloat(str) + if errMsg != "" { + iter.ReportError("readFloat64SlowPath", errMsg) + return + } + val, err := strconv.ParseFloat(str, 64) + if err != nil { + iter.Error = err + return + } + return val +} + +func validateFloat(str string) string { + // strconv.ParseFloat is not validating `1.` or `1.e1` + if len(str) == 0 { + return "empty number" + } + if str[0] == '-' { + return "-- is not valid" + } + dotPos := strings.IndexByte(str, '.') + if dotPos != -1 { + if dotPos == len(str)-1 { + return "dot can not be last character" + } + switch str[dotPos+1] { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + return "missing digit after dot" + } + } + return "" +} + +// ReadNumber read json.Number +func (iter *Iterator) ReadNumber() (ret json.Number) { + return json.Number(iter.readNumberAsString()) +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_int.go b/vendor/github.com/json-iterator/go/feature_iter_int.go new file mode 100644 index 000000000..4781c6393 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_int.go @@ -0,0 +1,339 @@ +package jsoniter + +import ( + "math" + "strconv" +) + +var intDigits []int8 + +const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1 +const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1 + +func init() { + intDigits = make([]int8, 256) + for i := 0; i < len(intDigits); i++ { + intDigits[i] = invalidCharForNumber + } + for i := int8('0'); i <= int8('9'); i++ { + intDigits[i] = i - int8('0') + } +} + +// ReadUint read uint +func (iter *Iterator) ReadUint() uint { + return uint(iter.ReadUint64()) +} + +// ReadInt read int +func (iter *Iterator) ReadInt() int { + return int(iter.ReadInt64()) +} + +// ReadInt8 read int8 +func (iter *Iterator) ReadInt8() (ret int8) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt8+1 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int8(val) + } + val := iter.readUint32(c) + if val > math.MaxInt8 { + iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int8(val) +} + +// ReadUint8 read uint8 +func (iter *Iterator) ReadUint8() (ret uint8) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint8 { + iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint8(val) +} + +// ReadInt16 read int16 +func (iter *Iterator) ReadInt16() (ret int16) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt16+1 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int16(val) + } + val := iter.readUint32(c) + if val > math.MaxInt16 { + iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int16(val) +} + +// ReadUint16 read uint16 +func (iter *Iterator) ReadUint16() (ret uint16) { + val := iter.readUint32(iter.nextToken()) + if val > math.MaxUint16 { + iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return uint16(val) +} + +// ReadInt32 read int32 +func (iter *Iterator) ReadInt32() (ret int32) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint32(iter.readByte()) + if val > math.MaxInt32+1 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return -int32(val) + } + val := iter.readUint32(c) + if val > math.MaxInt32 { + iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10)) + return + } + return int32(val) +} + +// ReadUint32 read uint32 +func (iter *Iterator) ReadUint32() (ret uint32) { + return iter.readUint32(iter.nextToken()) +} + +func (iter *Iterator) readUint32(c byte) (ret uint32) { + ind := intDigits[c] + if ind == 0 { + iter.assertInteger() + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint32(ind) + if iter.tail-iter.head > 10 { + i := iter.head + ind2 := intDigits[iter.buf[i]] + if ind2 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + i++ + ind3 := intDigits[iter.buf[i]] + if ind3 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10 + uint32(ind2) + } + //iter.head = i + 1 + //value = value * 100 + uint32(ind2) * 10 + uint32(ind3) + i++ + ind4 := intDigits[iter.buf[i]] + if ind4 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100 + uint32(ind2)*10 + uint32(ind3) + } + i++ + ind5 := intDigits[iter.buf[i]] + if ind5 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4) + } + i++ + ind6 := intDigits[iter.buf[i]] + if ind6 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5) + } + i++ + ind7 := intDigits[iter.buf[i]] + if ind7 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6) + } + i++ + ind8 := intDigits[iter.buf[i]] + if ind8 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7) + } + i++ + ind9 := intDigits[iter.buf[i]] + value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8) + iter.head = i + if ind9 == invalidCharForNumber { + iter.assertInteger() + return value + } + } + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + if value > uint32SafeToMultiply10 { + value2 := (value << 3) + (value << 1) + uint32(ind) + if value2 < value { + iter.ReportError("readUint32", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint32(ind) + } + if !iter.loadMore() { + iter.assertInteger() + return value + } + } +} + +// ReadInt64 read int64 +func (iter *Iterator) ReadInt64() (ret int64) { + c := iter.nextToken() + if c == '-' { + val := iter.readUint64(iter.readByte()) + if val > math.MaxInt64+1 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return -int64(val) + } + val := iter.readUint64(c) + if val > math.MaxInt64 { + iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10)) + return + } + return int64(val) +} + +// ReadUint64 read uint64 +func (iter *Iterator) ReadUint64() uint64 { + return iter.readUint64(iter.nextToken()) +} + +func (iter *Iterator) readUint64(c byte) (ret uint64) { + ind := intDigits[c] + if ind == 0 { + iter.assertInteger() + return 0 // single zero + } + if ind == invalidCharForNumber { + iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)})) + return + } + value := uint64(ind) + if iter.tail-iter.head > 10 { + i := iter.head + ind2 := intDigits[iter.buf[i]] + if ind2 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + i++ + ind3 := intDigits[iter.buf[i]] + if ind3 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10 + uint64(ind2) + } + //iter.head = i + 1 + //value = value * 100 + uint32(ind2) * 10 + uint32(ind3) + i++ + ind4 := intDigits[iter.buf[i]] + if ind4 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100 + uint64(ind2)*10 + uint64(ind3) + } + i++ + ind5 := intDigits[iter.buf[i]] + if ind5 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4) + } + i++ + ind6 := intDigits[iter.buf[i]] + if ind6 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5) + } + i++ + ind7 := intDigits[iter.buf[i]] + if ind7 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6) + } + i++ + ind8 := intDigits[iter.buf[i]] + if ind8 == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7) + } + i++ + ind9 := intDigits[iter.buf[i]] + value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8) + iter.head = i + if ind9 == invalidCharForNumber { + iter.assertInteger() + return value + } + } + for { + for i := iter.head; i < iter.tail; i++ { + ind = intDigits[iter.buf[i]] + if ind == invalidCharForNumber { + iter.head = i + iter.assertInteger() + return value + } + if value > uint64SafeToMultiple10 { + value2 := (value << 3) + (value << 1) + uint64(ind) + if value2 < value { + iter.ReportError("readUint64", "overflow") + return + } + value = value2 + continue + } + value = (value << 3) + (value << 1) + uint64(ind) + } + if !iter.loadMore() { + iter.assertInteger() + return value + } + } +} + +func (iter *Iterator) assertInteger() { + if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' { + iter.ReportError("assertInteger", "can not decode float as int") + } +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_object.go b/vendor/github.com/json-iterator/go/feature_iter_object.go new file mode 100644 index 000000000..dfd91fa60 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_object.go @@ -0,0 +1,267 @@ +package jsoniter + +import ( + "fmt" + "unicode" + "unsafe" +) + +// ReadObject read one field from object. +// If object ended, returns empty string. +// Otherwise, returns the field name. +func (iter *Iterator) ReadObject() (ret string) { + c := iter.nextToken() + switch c { + case 'n': + iter.skipThreeBytes('u', 'l', 'l') + return "" // null + case '{': + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + if iter.cfg.objectFieldMustBeSimpleString { + return string(iter.readObjectFieldAsBytes()) + } else { + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + } + } + if c == '}' { + return "" // end of object + } + iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c})) + return + case ',': + if iter.cfg.objectFieldMustBeSimpleString { + return string(iter.readObjectFieldAsBytes()) + } else { + field := iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + return field + } + case '}': + return "" // end of object + default: + iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c}))) + return + } +} + +func (iter *Iterator) readFieldHash() int32 { + hash := int64(0x811c9dc5) + c := iter.nextToken() + if c == '"' { + for { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + b := iter.buf[i] + if !iter.cfg.objectFieldMustBeSimpleString && b == '\\' { + iter.head = i + for _, b := range iter.readStringSlowPath() { + if 'A' <= b && b <= 'Z' { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return int32(hash) + } + if b == '"' { + iter.head = i + 1 + c = iter.nextToken() + if c != ':' { + iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c})) + return 0 + } + return int32(hash) + } + if 'A' <= b && b <= 'Z' { + b += 'a' - 'A' + } + hash ^= int64(b) + hash *= 0x1000193 + } + if !iter.loadMore() { + iter.ReportError("readFieldHash", `incomplete field name`) + return 0 + } + } + } + iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c})) + return 0 +} + +func calcHash(str string) int32 { + hash := int64(0x811c9dc5) + for _, b := range str { + hash ^= int64(unicode.ToLower(b)) + hash *= 0x1000193 + } + return int32(hash) +} + +// ReadObjectCB read object with callback, the key is ascii only and field name not copied +func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + var fieldBytes []byte + var field string + if c == '{' { + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes = iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + for c == ',' { + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes = iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c = iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadObjectCB", `object not ended with }`) + return false + } + return true + } + if c == '}' { + return true + } + iter.ReportError("ReadObjectCB", `expect " after }, but found `+string([]byte{c})) + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +// ReadMapCB read map with callback, the key can be any string +func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool { + c := iter.nextToken() + if c == '{' { + c = iter.nextToken() + if c == '"' { + iter.unreadByte() + field := iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return false + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + for c == ',' { + field = iter.ReadString() + if iter.nextToken() != ':' { + iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c})) + return false + } + if !callback(iter, field) { + return false + } + c = iter.nextToken() + } + if c != '}' { + iter.ReportError("ReadMapCB", `object not ended with }`) + return false + } + return true + } + if c == '}' { + return true + } + iter.ReportError("ReadMapCB", `expect " after }, but found `+string([]byte{c})) + return false + } + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return true // null + } + iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectStart() bool { + c := iter.nextToken() + if c == '{' { + c = iter.nextToken() + if c == '}' { + return false + } + iter.unreadByte() + return true + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return false + } + iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c})) + return false +} + +func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) { + str := iter.ReadStringAsSlice() + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if iter.buf[iter.head] != ':' { + iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]})) + return + } + iter.head++ + if iter.skipWhitespacesWithoutLoadMore() { + if ret == nil { + ret = make([]byte, len(str)) + copy(ret, str) + } + if !iter.loadMore() { + return + } + } + if ret == nil { + return str + } + return ret +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip.go b/vendor/github.com/json-iterator/go/feature_iter_skip.go new file mode 100644 index 000000000..f58beb913 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_skip.go @@ -0,0 +1,129 @@ +package jsoniter + +import "fmt" + +// ReadNil reads a json object as nil and +// returns whether it's a nil or not +func (iter *Iterator) ReadNil() (ret bool) { + c := iter.nextToken() + if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') // null + return true + } + iter.unreadByte() + return false +} + +// ReadBool reads a json object as BoolValue +func (iter *Iterator) ReadBool() (ret bool) { + c := iter.nextToken() + if c == 't' { + iter.skipThreeBytes('r', 'u', 'e') + return true + } + if c == 'f' { + iter.skipFourBytes('a', 'l', 's', 'e') + return false + } + iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c})) + return +} + +// SkipAndReturnBytes skip next JSON element, and return its content as []byte. +// The []byte can be kept, it is a copy of data. +func (iter *Iterator) SkipAndReturnBytes() []byte { + iter.startCapture(iter.head) + iter.Skip() + return iter.stopCapture() +} + +type captureBuffer struct { + startedAt int + captured []byte +} + +func (iter *Iterator) startCapture(captureStartedAt int) { + if iter.captured != nil { + panic("already in capture mode") + } + iter.captureStartedAt = captureStartedAt + iter.captured = make([]byte, 0, 32) +} + +func (iter *Iterator) stopCapture() []byte { + if iter.captured == nil { + panic("not in capture mode") + } + captured := iter.captured + remaining := iter.buf[iter.captureStartedAt:iter.head] + iter.captureStartedAt = -1 + iter.captured = nil + if len(captured) == 0 { + copied := make([]byte, len(remaining)) + copy(copied, remaining) + return copied + } + captured = append(captured, remaining...) + return captured +} + +// Skip skips a json object and positions to relatively the next json object +func (iter *Iterator) Skip() { + c := iter.nextToken() + switch c { + case '"': + iter.skipString() + case 'n': + iter.skipThreeBytes('u', 'l', 'l') // null + case 't': + iter.skipThreeBytes('r', 'u', 'e') // true + case 'f': + iter.skipFourBytes('a', 'l', 's', 'e') // false + case '0': + iter.unreadByte() + iter.ReadFloat32() + case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9': + iter.skipNumber() + case '[': + iter.skipArray() + case '{': + iter.skipObject() + default: + iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c)) + return + } +} + +func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } + if iter.readByte() != b4 { + iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4}))) + return + } +} + +func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) { + if iter.readByte() != b1 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b2 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } + if iter.readByte() != b3 { + iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3}))) + return + } +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go new file mode 100644 index 000000000..8fcdc3b69 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_skip_sloppy.go @@ -0,0 +1,144 @@ +//+build jsoniter_sloppy + +package jsoniter + +// sloppy but faster implementation, do not validate the input json + +func (iter *Iterator) skipNumber() { + for { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case ' ', '\n', '\r', '\t', ',', '}', ']': + iter.head = i + return + } + } + if !iter.loadMore() { + return + } + } +} + +func (iter *Iterator) skipArray() { + level := 1 + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '[': // If open symbol, increase level + level++ + case ']': // If close symbol, increase level + level-- + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete array") + return + } + } +} + +func (iter *Iterator) skipObject() { + level := 1 + for { + for i := iter.head; i < iter.tail; i++ { + switch iter.buf[i] { + case '"': // If inside string, skip it + iter.head = i + 1 + iter.skipString() + i = iter.head - 1 // it will be i++ soon + case '{': // If open symbol, increase level + level++ + case '}': // If close symbol, increase level + level-- + + // If we have returned to the original level, we're done + if level == 0 { + iter.head = i + 1 + return + } + } + } + if !iter.loadMore() { + iter.ReportError("skipObject", "incomplete object") + return + } + } +} + +func (iter *Iterator) skipString() { + for { + end, escaped := iter.findStringEnd() + if end == -1 { + if !iter.loadMore() { + iter.ReportError("skipString", "incomplete string") + return + } + if escaped { + iter.head = 1 // skip the first char as last char read is \ + } + } else { + iter.head = end + return + } + } +} + +// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go +// Tries to find the end of string +// Support if string contains escaped quote symbols. +func (iter *Iterator) findStringEnd() (int, bool) { + escaped := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + if !escaped { + return i + 1, false + } + j := i - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return i + 1, true + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + } + } else if c == '\\' { + escaped = true + } + } + j := iter.tail - 1 + for { + if j < iter.head || iter.buf[j] != '\\' { + // even number of backslashes + // either end of buffer, or " found + return -1, false // do not end with \ + } + j-- + if j < iter.head || iter.buf[j] != '\\' { + // odd number of backslashes + // it is \" or \\\" + break + } + j-- + + } + return -1, true // end with \ +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go b/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go new file mode 100644 index 000000000..f67bc2e83 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_skip_strict.go @@ -0,0 +1,89 @@ +//+build !jsoniter_sloppy + +package jsoniter + +import "fmt" + +func (iter *Iterator) skipNumber() { + if !iter.trySkipNumber() { + iter.unreadByte() + iter.ReadFloat32() + } +} + +func (iter *Iterator) trySkipNumber() bool { + dotFound := false + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + case '.': + if dotFound { + iter.ReportError("validateNumber", `more than one dot found in number`) + return true // already failed + } + if i+1 == iter.tail { + return false + } + c = iter.buf[i+1] + switch c { + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9': + default: + iter.ReportError("validateNumber", `missing digit after dot`) + return true // already failed + } + dotFound = true + default: + switch c { + case ',', ']', '}', ' ', '\t', '\n', '\r': + if iter.head == i { + return false // if - without following digits + } + iter.head = i + return true // must be valid + } + return false // may be invalid + } + } + return false +} + +func (iter *Iterator) skipString() { + if !iter.trySkipString() { + iter.unreadByte() + iter.ReadString() + } +} + +func (iter *Iterator) trySkipString() bool { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + iter.head = i + 1 + return true // valid + } else if c == '\\' { + return false + } else if c < ' ' { + iter.ReportError("trySkipString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return true // already failed + } + } + return false +} + +func (iter *Iterator) skipObject() { + iter.unreadByte() + iter.ReadObjectCB(func(iter *Iterator, field string) bool { + iter.Skip() + return true + }) +} + +func (iter *Iterator) skipArray() { + iter.unreadByte() + iter.ReadArrayCB(func(iter *Iterator) bool { + iter.Skip() + return true + }) +} diff --git a/vendor/github.com/json-iterator/go/feature_iter_string.go b/vendor/github.com/json-iterator/go/feature_iter_string.go new file mode 100644 index 000000000..adc487ea8 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_iter_string.go @@ -0,0 +1,215 @@ +package jsoniter + +import ( + "fmt" + "unicode/utf16" +) + +// ReadString read string from iterator +func (iter *Iterator) ReadString() (ret string) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + c := iter.buf[i] + if c == '"' { + ret = string(iter.buf[iter.head:i]) + iter.head = i + 1 + return ret + } else if c == '\\' { + break + } else if c < ' ' { + iter.ReportError("ReadString", + fmt.Sprintf(`invalid control character found: %d`, c)) + return + } + } + return iter.readStringSlowPath() + } else if c == 'n' { + iter.skipThreeBytes('u', 'l', 'l') + return "" + } + iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readStringSlowPath() (ret string) { + var str []byte + var c byte + for iter.Error == nil { + c = iter.readByte() + if c == '"' { + return string(str) + } + if c == '\\' { + c = iter.readByte() + str = iter.readEscapedChar(c, str) + } else { + str = append(str, c) + } + } + iter.ReportError("readStringSlowPath", "unexpected end of input") + return +} + +func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte { + switch c { + case 'u': + r := iter.readU4() + if utf16.IsSurrogate(r) { + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != '\\' { + iter.unreadByte() + str = appendRune(str, r) + return str + } + c = iter.readByte() + if iter.Error != nil { + return nil + } + if c != 'u' { + str = appendRune(str, r) + return iter.readEscapedChar(c, str) + } + r2 := iter.readU4() + if iter.Error != nil { + return nil + } + combined := utf16.DecodeRune(r, r2) + if combined == '\uFFFD' { + str = appendRune(str, r) + str = appendRune(str, r2) + } else { + str = appendRune(str, combined) + } + } else { + str = appendRune(str, r) + } + case '"': + str = append(str, '"') + case '\\': + str = append(str, '\\') + case '/': + str = append(str, '/') + case 'b': + str = append(str, '\b') + case 'f': + str = append(str, '\f') + case 'n': + str = append(str, '\n') + case 'r': + str = append(str, '\r') + case 't': + str = append(str, '\t') + default: + iter.ReportError("readEscapedChar", + `invalid escape char after \`) + return nil + } + return str +} + +// ReadStringAsSlice read string from iterator without copying into string form. +// The []byte can not be kept, as it will change after next iterator call. +func (iter *Iterator) ReadStringAsSlice() (ret []byte) { + c := iter.nextToken() + if c == '"' { + for i := iter.head; i < iter.tail; i++ { + // require ascii string and no escape + // for: field name, base64, number + if iter.buf[i] == '"' { + // fast path: reuse the underlying buffer + ret = iter.buf[iter.head:i] + iter.head = i + 1 + return ret + } + } + readLen := iter.tail - iter.head + copied := make([]byte, readLen, readLen*2) + copy(copied, iter.buf[iter.head:iter.tail]) + iter.head = iter.tail + for iter.Error == nil { + c := iter.readByte() + if c == '"' { + return copied + } + copied = append(copied, c) + } + return copied + } + iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c})) + return +} + +func (iter *Iterator) readU4() (ret rune) { + for i := 0; i < 4; i++ { + c := iter.readByte() + if iter.Error != nil { + return + } + if c >= '0' && c <= '9' { + ret = ret*16 + rune(c-'0') + } else if c >= 'a' && c <= 'f' { + ret = ret*16 + rune(c-'a'+10) + } else if c >= 'A' && c <= 'F' { + ret = ret*16 + rune(c-'A'+10) + } else { + iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c})) + return + } + } + return ret +} + +const ( + t1 = 0x00 // 0000 0000 + tx = 0x80 // 1000 0000 + t2 = 0xC0 // 1100 0000 + t3 = 0xE0 // 1110 0000 + t4 = 0xF0 // 1111 0000 + t5 = 0xF8 // 1111 1000 + + maskx = 0x3F // 0011 1111 + mask2 = 0x1F // 0001 1111 + mask3 = 0x0F // 0000 1111 + mask4 = 0x07 // 0000 0111 + + rune1Max = 1<<7 - 1 + rune2Max = 1<<11 - 1 + rune3Max = 1<<16 - 1 + + surrogateMin = 0xD800 + surrogateMax = 0xDFFF + + maxRune = '\U0010FFFF' // Maximum valid Unicode code point. + runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character" +) + +func appendRune(p []byte, r rune) []byte { + // Negative values are erroneous. Making it unsigned addresses the problem. + switch i := uint32(r); { + case i <= rune1Max: + p = append(p, byte(r)) + return p + case i <= rune2Max: + p = append(p, t2|byte(r>>6)) + p = append(p, tx|byte(r)&maskx) + return p + case i > maxRune, surrogateMin <= i && i <= surrogateMax: + r = runeError + fallthrough + case i <= rune3Max: + p = append(p, t3|byte(r>>12)) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + default: + p = append(p, t4|byte(r>>18)) + p = append(p, tx|byte(r>>12)&maskx) + p = append(p, tx|byte(r>>6)&maskx) + p = append(p, tx|byte(r)&maskx) + return p + } +} diff --git a/vendor/github.com/json-iterator/go/feature_json_number.go b/vendor/github.com/json-iterator/go/feature_json_number.go new file mode 100644 index 000000000..e187b200a --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_json_number.go @@ -0,0 +1,31 @@ +package jsoniter + +import ( + "encoding/json" + "strconv" +) + +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +func CastJsonNumber(val interface{}) (string, bool) { + switch typedVal := val.(type) { + case json.Number: + return string(typedVal), true + case Number: + return string(typedVal), true + } + return "", false +} diff --git a/vendor/github.com/json-iterator/go/feature_pool.go b/vendor/github.com/json-iterator/go/feature_pool.go new file mode 100644 index 000000000..52d38e685 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_pool.go @@ -0,0 +1,59 @@ +package jsoniter + +import ( + "io" +) + +// IteratorPool a thread safe pool of iterators with same configuration +type IteratorPool interface { + BorrowIterator(data []byte) *Iterator + ReturnIterator(iter *Iterator) +} + +// StreamPool a thread safe pool of streams with same configuration +type StreamPool interface { + BorrowStream(writer io.Writer) *Stream + ReturnStream(stream *Stream) +} + +func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream { + select { + case stream := <-cfg.streamPool: + stream.Reset(writer) + return stream + default: + return NewStream(cfg, writer, 512) + } +} + +func (cfg *frozenConfig) ReturnStream(stream *Stream) { + stream.Error = nil + stream.Attachment = nil + select { + case cfg.streamPool <- stream: + return + default: + return + } +} + +func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator { + select { + case iter := <-cfg.iteratorPool: + iter.ResetBytes(data) + return iter + default: + return ParseBytes(cfg, data) + } +} + +func (cfg *frozenConfig) ReturnIterator(iter *Iterator) { + iter.Error = nil + iter.Attachment = nil + select { + case cfg.iteratorPool <- iter: + return + default: + return + } +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect.go b/vendor/github.com/json-iterator/go/feature_reflect.go new file mode 100644 index 000000000..75d533b07 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect.go @@ -0,0 +1,607 @@ +package jsoniter + +import ( + "encoding" + "encoding/json" + "fmt" + "reflect" + "time" + "unsafe" +) + +// ValDecoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValDecoder with json.Decoder. +// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link). +// +// Reflection on type to create decoders, which is then cached +// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions +// 1. create instance of new value, for example *int will need a int to be allocated +// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New +// 3. assignment to map, both key and value will be reflect.Value +// For a simple struct binding, it will be reflect.Value free and allocation free +type ValDecoder interface { + Decode(ptr unsafe.Pointer, iter *Iterator) +} + +// ValEncoder is an internal type registered to cache as needed. +// Don't confuse jsoniter.ValEncoder with json.Encoder. +// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link). +type ValEncoder interface { + IsEmpty(ptr unsafe.Pointer) bool + Encode(ptr unsafe.Pointer, stream *Stream) + EncodeInterface(val interface{}, stream *Stream) +} + +type checkIsEmpty interface { + IsEmpty(ptr unsafe.Pointer) bool +} + +// WriteToStream the default implementation for TypeEncoder method EncodeInterface +func WriteToStream(val interface{}, stream *Stream, encoder ValEncoder) { + e := (*emptyInterface)(unsafe.Pointer(&val)) + if e.word == nil { + stream.WriteNil() + return + } + if reflect.TypeOf(val).Kind() == reflect.Ptr { + encoder.Encode(unsafe.Pointer(&e.word), stream) + } else { + encoder.Encode(e.word, stream) + } +} + +var jsonNumberType reflect.Type +var jsoniterNumberType reflect.Type +var jsonRawMessageType reflect.Type +var jsoniterRawMessageType reflect.Type +var anyType reflect.Type +var marshalerType reflect.Type +var unmarshalerType reflect.Type +var textMarshalerType reflect.Type +var textUnmarshalerType reflect.Type + +func init() { + jsonNumberType = reflect.TypeOf((*json.Number)(nil)).Elem() + jsoniterNumberType = reflect.TypeOf((*Number)(nil)).Elem() + jsonRawMessageType = reflect.TypeOf((*json.RawMessage)(nil)).Elem() + jsoniterRawMessageType = reflect.TypeOf((*RawMessage)(nil)).Elem() + anyType = reflect.TypeOf((*Any)(nil)).Elem() + marshalerType = reflect.TypeOf((*json.Marshaler)(nil)).Elem() + unmarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem() + textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem() + textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem() +} + +// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal +func (iter *Iterator) ReadVal(obj interface{}) { + typ := reflect.TypeOf(obj) + cacheKey := typ.Elem() + decoder := decoderOfType(iter.cfg, "", cacheKey) + e := (*emptyInterface)(unsafe.Pointer(&obj)) + if e.word == nil { + iter.ReportError("ReadVal", "can not read into nil pointer") + return + } + decoder.Decode(e.word, iter) +} + +// WriteVal copy the go interface into underlying JSON, same as json.Marshal +func (stream *Stream) WriteVal(val interface{}) { + if nil == val { + stream.WriteNil() + return + } + typ := reflect.TypeOf(val) + cacheKey := typ + encoder := encoderOfType(stream.cfg, "", cacheKey) + encoder.EncodeInterface(val, stream) +} + +func decoderOfType(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder { + cacheKey := typ + decoder := cfg.getDecoderFromCache(cacheKey) + if decoder != nil { + return decoder + } + decoder = getTypeDecoderFromExtension(cfg, typ) + if decoder != nil { + cfg.addDecoderToCache(cacheKey, decoder) + return decoder + } + decoder = &placeholderDecoder{cfg: cfg, cacheKey: cacheKey} + cfg.addDecoderToCache(cacheKey, decoder) + decoder = createDecoderOfType(cfg, prefix, typ) + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + for _, extension := range cfg.extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + cfg.addDecoderToCache(cacheKey, decoder) + return decoder +} + +func createDecoderOfType(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder { + typeName := typ.String() + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{} + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{} + } + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{} + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{} + } + if typ.Implements(unmarshalerType) { + templateInterface := reflect.New(typ).Elem().Interface() + var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)} + if typ.Kind() == reflect.Ptr { + decoder = &OptionalDecoder{typ.Elem(), decoder} + } + return decoder + } + if reflect.PtrTo(typ).Implements(unmarshalerType) { + templateInterface := reflect.New(typ).Interface() + var decoder ValDecoder = &unmarshalerDecoder{extractInterface(templateInterface)} + return decoder + } + if typ.Implements(textUnmarshalerType) { + templateInterface := reflect.New(typ).Elem().Interface() + var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)} + if typ.Kind() == reflect.Ptr { + decoder = &OptionalDecoder{typ.Elem(), decoder} + } + return decoder + } + if reflect.PtrTo(typ).Implements(textUnmarshalerType) { + templateInterface := reflect.New(typ).Interface() + var decoder ValDecoder = &textUnmarshalerDecoder{extractInterface(templateInterface)} + return decoder + } + if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { + sliceDecoder := decoderOfSlice(cfg, prefix, typ) + return &base64Codec{sliceDecoder: sliceDecoder} + } + if typ.Implements(anyType) { + return &anyCodec{} + } + switch typ.Kind() { + case reflect.String: + if typeName != "string" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*string)(nil)).Elem()) + } + return &stringCodec{} + case reflect.Int: + if typeName != "int" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*int)(nil)).Elem()) + } + return &intCodec{} + case reflect.Int8: + if typeName != "int8" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*int8)(nil)).Elem()) + } + return &int8Codec{} + case reflect.Int16: + if typeName != "int16" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*int16)(nil)).Elem()) + } + return &int16Codec{} + case reflect.Int32: + if typeName != "int32" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*int32)(nil)).Elem()) + } + return &int32Codec{} + case reflect.Int64: + if typeName != "int64" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*int64)(nil)).Elem()) + } + return &int64Codec{} + case reflect.Uint: + if typeName != "uint" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*uint)(nil)).Elem()) + } + return &uintCodec{} + case reflect.Uint8: + if typeName != "uint8" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*uint8)(nil)).Elem()) + } + return &uint8Codec{} + case reflect.Uint16: + if typeName != "uint16" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*uint16)(nil)).Elem()) + } + return &uint16Codec{} + case reflect.Uint32: + if typeName != "uint32" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*uint32)(nil)).Elem()) + } + return &uint32Codec{} + case reflect.Uintptr: + if typeName != "uintptr" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*uintptr)(nil)).Elem()) + } + return &uintptrCodec{} + case reflect.Uint64: + if typeName != "uint64" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*uint64)(nil)).Elem()) + } + return &uint64Codec{} + case reflect.Float32: + if typeName != "float32" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*float32)(nil)).Elem()) + } + return &float32Codec{} + case reflect.Float64: + if typeName != "float64" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*float64)(nil)).Elem()) + } + return &float64Codec{} + case reflect.Bool: + if typeName != "bool" { + return decoderOfType(cfg, prefix, reflect.TypeOf((*bool)(nil)).Elem()) + } + return &boolCodec{} + case reflect.Interface: + if typ.NumMethod() == 0 { + return &emptyInterfaceCodec{} + } + return &nonEmptyInterfaceCodec{} + case reflect.Struct: + return decoderOfStruct(cfg, prefix, typ) + case reflect.Array: + return decoderOfArray(cfg, prefix, typ) + case reflect.Slice: + return decoderOfSlice(cfg, prefix, typ) + case reflect.Map: + return decoderOfMap(cfg, prefix, typ) + case reflect.Ptr: + return decoderOfOptional(cfg, prefix, typ) + default: + return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", prefix, typ.String())} + } +} + +func encoderOfType(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + cacheKey := typ + encoder := cfg.getEncoderFromCache(cacheKey) + if encoder != nil { + return encoder + } + encoder = getTypeEncoderFromExtension(cfg, typ) + if encoder != nil { + cfg.addEncoderToCache(cacheKey, encoder) + return encoder + } + encoder = &placeholderEncoder{cfg: cfg, cacheKey: cacheKey} + cfg.addEncoderToCache(cacheKey, encoder) + encoder = createEncoderOfType(cfg, prefix, typ) + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + for _, extension := range cfg.extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + cfg.addEncoderToCache(cacheKey, encoder) + return encoder +} + +func createEncoderOfType(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + if typ == jsonRawMessageType { + return &jsonRawMessageCodec{} + } + if typ == jsoniterRawMessageType { + return &jsoniterRawMessageCodec{} + } + if typ.AssignableTo(jsonNumberType) { + return &jsonNumberCodec{} + } + if typ.AssignableTo(jsoniterNumberType) { + return &jsoniterNumberCodec{} + } + if typ.Implements(marshalerType) { + checkIsEmpty := createCheckIsEmpty(cfg, typ) + templateInterface := reflect.New(typ).Elem().Interface() + var encoder ValEncoder = &marshalerEncoder{ + templateInterface: extractInterface(templateInterface), + checkIsEmpty: checkIsEmpty, + } + if typ.Kind() == reflect.Ptr { + encoder = &OptionalEncoder{encoder} + } + return encoder + } + if reflect.PtrTo(typ).Implements(marshalerType) { + checkIsEmpty := createCheckIsEmpty(cfg, reflect.PtrTo(typ)) + templateInterface := reflect.New(typ).Interface() + var encoder ValEncoder = &marshalerEncoder{ + templateInterface: extractInterface(templateInterface), + checkIsEmpty: checkIsEmpty, + } + return encoder + } + if typ.Implements(textMarshalerType) { + checkIsEmpty := createCheckIsEmpty(cfg, typ) + templateInterface := reflect.New(typ).Elem().Interface() + var encoder ValEncoder = &textMarshalerEncoder{ + templateInterface: extractInterface(templateInterface), + checkIsEmpty: checkIsEmpty, + } + if typ.Kind() == reflect.Ptr { + encoder = &OptionalEncoder{encoder} + } + return encoder + } + if typ.Kind() == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 { + return &base64Codec{} + } + if typ.Implements(anyType) { + return &anyCodec{} + } + return createEncoderOfSimpleType(cfg, prefix, typ) +} + +func createCheckIsEmpty(cfg *frozenConfig, typ reflect.Type) checkIsEmpty { + kind := typ.Kind() + switch kind { + case reflect.String: + return &stringCodec{} + case reflect.Int: + return &intCodec{} + case reflect.Int8: + return &int8Codec{} + case reflect.Int16: + return &int16Codec{} + case reflect.Int32: + return &int32Codec{} + case reflect.Int64: + return &int64Codec{} + case reflect.Uint: + return &uintCodec{} + case reflect.Uint8: + return &uint8Codec{} + case reflect.Uint16: + return &uint16Codec{} + case reflect.Uint32: + return &uint32Codec{} + case reflect.Uintptr: + return &uintptrCodec{} + case reflect.Uint64: + return &uint64Codec{} + case reflect.Float32: + return &float32Codec{} + case reflect.Float64: + return &float64Codec{} + case reflect.Bool: + return &boolCodec{} + case reflect.Interface: + if typ.NumMethod() == 0 { + return &emptyInterfaceCodec{} + } + return &nonEmptyInterfaceCodec{} + case reflect.Struct: + return &structEncoder{typ: typ} + case reflect.Array: + return &arrayEncoder{} + case reflect.Slice: + return &sliceEncoder{} + case reflect.Map: + return encoderOfMap(cfg, "", typ) + case reflect.Ptr: + return &OptionalEncoder{} + default: + return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)} + } +} + +func createEncoderOfSimpleType(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + typeName := typ.String() + kind := typ.Kind() + switch kind { + case reflect.String: + if typeName != "string" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*string)(nil)).Elem()) + } + return &stringCodec{} + case reflect.Int: + if typeName != "int" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*int)(nil)).Elem()) + } + return &intCodec{} + case reflect.Int8: + if typeName != "int8" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*int8)(nil)).Elem()) + } + return &int8Codec{} + case reflect.Int16: + if typeName != "int16" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*int16)(nil)).Elem()) + } + return &int16Codec{} + case reflect.Int32: + if typeName != "int32" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*int32)(nil)).Elem()) + } + return &int32Codec{} + case reflect.Int64: + if typeName != "int64" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*int64)(nil)).Elem()) + } + return &int64Codec{} + case reflect.Uint: + if typeName != "uint" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*uint)(nil)).Elem()) + } + return &uintCodec{} + case reflect.Uint8: + if typeName != "uint8" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*uint8)(nil)).Elem()) + } + return &uint8Codec{} + case reflect.Uint16: + if typeName != "uint16" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*uint16)(nil)).Elem()) + } + return &uint16Codec{} + case reflect.Uint32: + if typeName != "uint32" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*uint32)(nil)).Elem()) + } + return &uint32Codec{} + case reflect.Uintptr: + if typeName != "uintptr" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*uintptr)(nil)).Elem()) + } + return &uintptrCodec{} + case reflect.Uint64: + if typeName != "uint64" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*uint64)(nil)).Elem()) + } + return &uint64Codec{} + case reflect.Float32: + if typeName != "float32" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*float32)(nil)).Elem()) + } + return &float32Codec{} + case reflect.Float64: + if typeName != "float64" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*float64)(nil)).Elem()) + } + return &float64Codec{} + case reflect.Bool: + if typeName != "bool" { + return encoderOfType(cfg, prefix, reflect.TypeOf((*bool)(nil)).Elem()) + } + return &boolCodec{} + case reflect.Interface: + if typ.NumMethod() == 0 { + return &emptyInterfaceCodec{} + } + return &nonEmptyInterfaceCodec{} + case reflect.Struct: + return encoderOfStruct(cfg, prefix, typ) + case reflect.Array: + return encoderOfArray(cfg, prefix, typ) + case reflect.Slice: + return encoderOfSlice(cfg, prefix, typ) + case reflect.Map: + return encoderOfMap(cfg, prefix, typ) + case reflect.Ptr: + return encoderOfOptional(cfg, prefix, typ) + default: + return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", prefix, typ.String())} + } +} + +type placeholderEncoder struct { + cfg *frozenConfig + cacheKey reflect.Type +} + +func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.getRealEncoder().Encode(ptr, stream) +} + +func (encoder *placeholderEncoder) EncodeInterface(val interface{}, stream *Stream) { + encoder.getRealEncoder().EncodeInterface(val, stream) +} + +func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.getRealEncoder().IsEmpty(ptr) +} + +func (encoder *placeholderEncoder) getRealEncoder() ValEncoder { + for i := 0; i < 500; i++ { + realDecoder := encoder.cfg.getEncoderFromCache(encoder.cacheKey) + _, isPlaceholder := realDecoder.(*placeholderEncoder) + if isPlaceholder { + time.Sleep(10 * time.Millisecond) + } else { + return realDecoder + } + } + panic(fmt.Sprintf("real encoder not found for cache key: %v", encoder.cacheKey)) +} + +type placeholderDecoder struct { + cfg *frozenConfig + cacheKey reflect.Type +} + +func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + for i := 0; i < 500; i++ { + realDecoder := decoder.cfg.getDecoderFromCache(decoder.cacheKey) + _, isPlaceholder := realDecoder.(*placeholderDecoder) + if isPlaceholder { + time.Sleep(10 * time.Millisecond) + } else { + realDecoder.Decode(ptr, iter) + return + } + } + panic(fmt.Sprintf("real decoder not found for cache key: %v", decoder.cacheKey)) +} + +type lazyErrorDecoder struct { + err error +} + +func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.WhatIsNext() != NilValue { + if iter.Error == nil { + iter.Error = decoder.err + } + } else { + iter.Skip() + } +} + +type lazyErrorEncoder struct { + err error +} + +func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if ptr == nil { + stream.WriteNil() + } else if stream.Error == nil { + stream.Error = encoder.err + } +} + +func (encoder *lazyErrorEncoder) EncodeInterface(val interface{}, stream *Stream) { + if val == nil { + stream.WriteNil() + } else if stream.Error == nil { + stream.Error = encoder.err + } +} + +func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +func extractInterface(val interface{}) emptyInterface { + return *((*emptyInterface)(unsafe.Pointer(&val))) +} + +// emptyInterface is the header for an interface{} value. +type emptyInterface struct { + typ unsafe.Pointer + word unsafe.Pointer +} + +// emptyInterface is the header for an interface with method (not interface{}) +type nonEmptyInterface struct { + // see ../runtime/iface.go:/Itab + itab *struct { + ityp unsafe.Pointer // static interface type + typ unsafe.Pointer // dynamic concrete type + link unsafe.Pointer + bad int32 + unused int32 + fun [100000]unsafe.Pointer // method table + } + word unsafe.Pointer +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_array.go b/vendor/github.com/json-iterator/go/feature_reflect_array.go new file mode 100644 index 000000000..f4e211dc7 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_array.go @@ -0,0 +1,93 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "unsafe" +) + +func decoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder { + decoder := decoderOfType(cfg, prefix+"[array]->", typ.Elem()) + return &arrayDecoder{typ, typ.Elem(), decoder} +} + +func encoderOfArray(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + encoder := encoderOfType(cfg, prefix+"[array]->", typ.Elem()) + if typ.Elem().Kind() == reflect.Map { + encoder = &OptionalEncoder{encoder} + } + return &arrayEncoder{typ, typ.Elem(), encoder} +} + +type arrayEncoder struct { + arrayType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder +} + +func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteArrayStart() + elemPtr := unsafe.Pointer(ptr) + encoder.elemEncoder.Encode(elemPtr, stream) + for i := 1; i < encoder.arrayType.Len(); i++ { + stream.WriteMore() + elemPtr = unsafe.Pointer(uintptr(elemPtr) + encoder.elemType.Size()) + encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error()) + } +} + +func (encoder *arrayEncoder) EncodeInterface(val interface{}, stream *Stream) { + // special optimization for interface{} + e := (*emptyInterface)(unsafe.Pointer(&val)) + if e.word == nil { + stream.WriteArrayStart() + stream.WriteNil() + stream.WriteArrayEnd() + return + } + elemType := encoder.arrayType.Elem() + if encoder.arrayType.Len() == 1 && (elemType.Kind() == reflect.Ptr || elemType.Kind() == reflect.Map) { + ptr := uintptr(e.word) + e.word = unsafe.Pointer(&ptr) + } + if reflect.TypeOf(val).Kind() == reflect.Ptr { + encoder.Encode(unsafe.Pointer(&e.word), stream) + } else { + encoder.Encode(e.word, stream) + } +} + +func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type arrayDecoder struct { + arrayType reflect.Type + elemType reflect.Type + elemDecoder ValDecoder +} + +func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error()) + } +} + +func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + offset := uintptr(0) + iter.ReadArrayCB(func(iter *Iterator) bool { + if offset < decoder.arrayType.Size() { + decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(ptr)+offset), iter) + offset += decoder.elemType.Size() + } else { + iter.Skip() + } + return true + }) +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_extension.go b/vendor/github.com/json-iterator/go/feature_reflect_extension.go new file mode 100644 index 000000000..96d4cda19 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_extension.go @@ -0,0 +1,421 @@ +package jsoniter + +import ( + "fmt" + "reflect" + "sort" + "strings" + "unicode" + "unsafe" +) + +var typeDecoders = map[string]ValDecoder{} +var fieldDecoders = map[string]ValDecoder{} +var typeEncoders = map[string]ValEncoder{} +var fieldEncoders = map[string]ValEncoder{} +var extensions = []Extension{} + +// StructDescriptor describe how should we encode/decode the struct +type StructDescriptor struct { + onePtrEmbedded bool + onePtrOptimization bool + Type reflect.Type + Fields []*Binding +} + +// GetField get one field from the descriptor by its name. +// Can not use map here to keep field orders. +func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding { + for _, binding := range structDescriptor.Fields { + if binding.Field.Name == fieldName { + return binding + } + } + return nil +} + +// Binding describe how should we encode/decode the struct field +type Binding struct { + levels []int + Field *reflect.StructField + FromNames []string + ToNames []string + Encoder ValEncoder + Decoder ValDecoder +} + +// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder. +// Can also rename fields by UpdateStructDescriptor. +type Extension interface { + UpdateStructDescriptor(structDescriptor *StructDescriptor) + CreateDecoder(typ reflect.Type) ValDecoder + CreateEncoder(typ reflect.Type) ValEncoder + DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder + DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder +} + +// DummyExtension embed this type get dummy implementation for all methods of Extension +type DummyExtension struct { +} + +// UpdateStructDescriptor No-op +func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) { +} + +// CreateDecoder No-op +func (extension *DummyExtension) CreateDecoder(typ reflect.Type) ValDecoder { + return nil +} + +// CreateEncoder No-op +func (extension *DummyExtension) CreateEncoder(typ reflect.Type) ValEncoder { + return nil +} + +// DecorateDecoder No-op +func (extension *DummyExtension) DecorateDecoder(typ reflect.Type, decoder ValDecoder) ValDecoder { + return decoder +} + +// DecorateEncoder No-op +func (extension *DummyExtension) DecorateEncoder(typ reflect.Type, encoder ValEncoder) ValEncoder { + return encoder +} + +type funcDecoder struct { + fun DecoderFunc +} + +func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.fun(ptr, iter) +} + +type funcEncoder struct { + fun EncoderFunc + isEmptyFunc func(ptr unsafe.Pointer) bool +} + +func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + encoder.fun(ptr, stream) +} + +func (encoder *funcEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool { + if encoder.isEmptyFunc == nil { + return false + } + return encoder.isEmptyFunc(ptr) +} + +// DecoderFunc the function form of TypeDecoder +type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator) + +// EncoderFunc the function form of TypeEncoder +type EncoderFunc func(ptr unsafe.Pointer, stream *Stream) + +// RegisterTypeDecoderFunc register TypeDecoder for a type with function +func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) { + typeDecoders[typ] = &funcDecoder{fun} +} + +// RegisterTypeDecoder register TypeDecoder for a typ +func RegisterTypeDecoder(typ string, decoder ValDecoder) { + typeDecoders[typ] = decoder +} + +// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function +func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) { + RegisterFieldDecoder(typ, field, &funcDecoder{fun}) +} + +// RegisterFieldDecoder register TypeDecoder for a struct field +func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) { + fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder +} + +// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function +func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc} +} + +// RegisterTypeEncoder register TypeEncoder for a type +func RegisterTypeEncoder(typ string, encoder ValEncoder) { + typeEncoders[typ] = encoder +} + +// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function +func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) { + RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc}) +} + +// RegisterFieldEncoder register TypeEncoder for a struct field +func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) { + fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder +} + +// RegisterExtension register extension +func RegisterExtension(extension Extension) { + extensions = append(extensions, extension) +} + +func getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder { + decoder := _getTypeDecoderFromExtension(cfg, typ) + if decoder != nil { + for _, extension := range extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + for _, extension := range cfg.extensions { + decoder = extension.DecorateDecoder(typ, decoder) + } + } + return decoder +} +func _getTypeDecoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValDecoder { + for _, extension := range extensions { + decoder := extension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + } + for _, extension := range cfg.extensions { + decoder := extension.CreateDecoder(typ) + if decoder != nil { + return decoder + } + } + typeName := typ.String() + decoder := typeDecoders[typeName] + if decoder != nil { + return decoder + } + if typ.Kind() == reflect.Ptr { + decoder := typeDecoders[typ.Elem().String()] + if decoder != nil { + return &OptionalDecoder{typ.Elem(), decoder} + } + } + return nil +} + +func getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder { + encoder := _getTypeEncoderFromExtension(cfg, typ) + if encoder != nil { + for _, extension := range extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + for _, extension := range cfg.extensions { + encoder = extension.DecorateEncoder(typ, encoder) + } + } + return encoder +} + +func _getTypeEncoderFromExtension(cfg *frozenConfig, typ reflect.Type) ValEncoder { + for _, extension := range extensions { + encoder := extension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + } + for _, extension := range cfg.extensions { + encoder := extension.CreateEncoder(typ) + if encoder != nil { + return encoder + } + } + typeName := typ.String() + encoder := typeEncoders[typeName] + if encoder != nil { + return encoder + } + if typ.Kind() == reflect.Ptr { + encoder := typeEncoders[typ.Elem().String()] + if encoder != nil { + return &OptionalEncoder{encoder} + } + } + return nil +} + +func describeStruct(cfg *frozenConfig, prefix string, typ reflect.Type) *StructDescriptor { + embeddedBindings := []*Binding{} + bindings := []*Binding{} + for i := 0; i < typ.NumField(); i++ { + field := typ.Field(i) + tag := field.Tag.Get(cfg.getTagKey()) + tagParts := strings.Split(tag, ",") + if tag == "-" { + continue + } + if field.Anonymous && (tag == "" || tagParts[0] == "") { + if field.Type.Kind() == reflect.Struct { + structDescriptor := describeStruct(cfg, prefix, field.Type) + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty} + binding.Decoder = &structFieldDecoder{&field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } else if field.Type.Kind() == reflect.Ptr && field.Type.Elem().Kind() == reflect.Struct { + structDescriptor := describeStruct(cfg, prefix, field.Type.Elem()) + for _, binding := range structDescriptor.Fields { + binding.levels = append([]int{i}, binding.levels...) + omitempty := binding.Encoder.(*structFieldEncoder).omitempty + binding.Encoder = &OptionalEncoder{binding.Encoder} + binding.Encoder = &structFieldEncoder{&field, binding.Encoder, omitempty} + binding.Decoder = &deferenceDecoder{field.Type.Elem(), binding.Decoder} + binding.Decoder = &structFieldDecoder{&field, binding.Decoder} + embeddedBindings = append(embeddedBindings, binding) + } + continue + } + } + fieldNames := calcFieldNames(field.Name, tagParts[0], tag) + fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name) + decoder := fieldDecoders[fieldCacheKey] + if decoder == nil { + decoder = decoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type) + } + encoder := fieldEncoders[fieldCacheKey] + if encoder == nil { + encoder = encoderOfType(cfg, prefix+typ.String()+"."+field.Name+"->", field.Type) + // map is stored as pointer in the struct, + // and treat nil or empty map as empty field + if encoder != nil && field.Type.Kind() == reflect.Map { + encoder = &optionalMapEncoder{encoder} + } + } + binding := &Binding{ + Field: &field, + FromNames: fieldNames, + ToNames: fieldNames, + Decoder: decoder, + Encoder: encoder, + } + binding.levels = []int{i} + bindings = append(bindings, binding) + } + return createStructDescriptor(cfg, typ, bindings, embeddedBindings) +} +func createStructDescriptor(cfg *frozenConfig, typ reflect.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor { + onePtrEmbedded := false + onePtrOptimization := false + if typ.NumField() == 1 { + firstField := typ.Field(0) + switch firstField.Type.Kind() { + case reflect.Ptr: + if firstField.Anonymous && firstField.Type.Elem().Kind() == reflect.Struct { + onePtrEmbedded = true + } + fallthrough + case reflect.Map: + onePtrOptimization = true + case reflect.Struct: + onePtrOptimization = isStructOnePtr(firstField.Type) + } + } + structDescriptor := &StructDescriptor{ + onePtrEmbedded: onePtrEmbedded, + onePtrOptimization: onePtrOptimization, + Type: typ, + Fields: bindings, + } + for _, extension := range extensions { + extension.UpdateStructDescriptor(structDescriptor) + } + for _, extension := range cfg.extensions { + extension.UpdateStructDescriptor(structDescriptor) + } + processTags(structDescriptor, cfg) + // merge normal & embedded bindings & sort with original order + allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...)) + sort.Sort(allBindings) + structDescriptor.Fields = allBindings + return structDescriptor +} + +func isStructOnePtr(typ reflect.Type) bool { + if typ.NumField() == 1 { + firstField := typ.Field(0) + switch firstField.Type.Kind() { + case reflect.Ptr: + return true + case reflect.Map: + return true + case reflect.Struct: + return isStructOnePtr(firstField.Type) + } + } + return false +} + +type sortableBindings []*Binding + +func (bindings sortableBindings) Len() int { + return len(bindings) +} + +func (bindings sortableBindings) Less(i, j int) bool { + left := bindings[i].levels + right := bindings[j].levels + k := 0 + for { + if left[k] < right[k] { + return true + } else if left[k] > right[k] { + return false + } + k++ + } +} + +func (bindings sortableBindings) Swap(i, j int) { + bindings[i], bindings[j] = bindings[j], bindings[i] +} + +func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) { + for _, binding := range structDescriptor.Fields { + shouldOmitEmpty := false + tagParts := strings.Split(binding.Field.Tag.Get(cfg.getTagKey()), ",") + for _, tagPart := range tagParts[1:] { + if tagPart == "omitempty" { + shouldOmitEmpty = true + } else if tagPart == "string" { + if binding.Field.Type.Kind() == reflect.String { + binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg} + binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg} + } else { + binding.Decoder = &stringModeNumberDecoder{binding.Decoder} + binding.Encoder = &stringModeNumberEncoder{binding.Encoder} + } + } + } + binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder} + binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty} + } +} + +func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string { + // ignore? + if wholeTag == "-" { + return []string{} + } + // rename? + var fieldNames []string + if tagProvidedFieldName == "" { + fieldNames = []string{originalFieldName} + } else { + fieldNames = []string{tagProvidedFieldName} + } + // private? + isNotExported := unicode.IsLower(rune(originalFieldName[0])) + if isNotExported { + fieldNames = []string{} + } + return fieldNames +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_map.go b/vendor/github.com/json-iterator/go/feature_reflect_map.go new file mode 100644 index 000000000..cb40538f5 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_map.go @@ -0,0 +1,260 @@ +package jsoniter + +import ( + "encoding" + "encoding/json" + "reflect" + "sort" + "strconv" + "unsafe" +) + +func decoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder { + decoder := decoderOfType(cfg, prefix+"[map]->", typ.Elem()) + mapInterface := reflect.New(typ).Interface() + return &mapDecoder{typ, typ.Key(), typ.Elem(), decoder, extractInterface(mapInterface)} +} + +func encoderOfMap(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + elemType := typ.Elem() + encoder := encoderOfType(cfg, prefix+"[map]->", elemType) + mapInterface := reflect.New(typ).Elem().Interface() + if cfg.sortMapKeys { + return &sortKeysMapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))} + } + return &mapEncoder{typ, elemType, encoder, *((*emptyInterface)(unsafe.Pointer(&mapInterface)))} +} + +type mapDecoder struct { + mapType reflect.Type + keyType reflect.Type + elemType reflect.Type + elemDecoder ValDecoder + mapInterface emptyInterface +} + +func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + // dark magic to cast unsafe.Pointer back to interface{} using reflect.Type + mapInterface := decoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface).Elem() + if iter.ReadNil() { + realVal.Set(reflect.Zero(decoder.mapType)) + return + } + if realVal.IsNil() { + realVal.Set(reflect.MakeMap(realVal.Type())) + } + iter.ReadMapCB(func(iter *Iterator, keyStr string) bool { + elem := reflect.New(decoder.elemType) + decoder.elemDecoder.Decode(unsafe.Pointer(elem.Pointer()), iter) + // to put into map, we have to use reflection + keyType := decoder.keyType + // TODO: remove this from loop + switch { + case keyType.Kind() == reflect.String: + realVal.SetMapIndex(reflect.ValueOf(keyStr).Convert(keyType), elem.Elem()) + return true + case keyType.Implements(textUnmarshalerType): + textUnmarshaler := reflect.New(keyType.Elem()).Interface().(encoding.TextUnmarshaler) + err := textUnmarshaler.UnmarshalText([]byte(keyStr)) + if err != nil { + iter.ReportError("read map key as TextUnmarshaler", err.Error()) + return false + } + realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler), elem.Elem()) + return true + case reflect.PtrTo(keyType).Implements(textUnmarshalerType): + textUnmarshaler := reflect.New(keyType).Interface().(encoding.TextUnmarshaler) + err := textUnmarshaler.UnmarshalText([]byte(keyStr)) + if err != nil { + iter.ReportError("read map key as TextUnmarshaler", err.Error()) + return false + } + realVal.SetMapIndex(reflect.ValueOf(textUnmarshaler).Elem(), elem.Elem()) + return true + default: + switch keyType.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + n, err := strconv.ParseInt(keyStr, 10, 64) + if err != nil || reflect.Zero(keyType).OverflowInt(n) { + iter.ReportError("read map key as int64", "read int64 failed") + return false + } + realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem()) + return true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + n, err := strconv.ParseUint(keyStr, 10, 64) + if err != nil || reflect.Zero(keyType).OverflowUint(n) { + iter.ReportError("read map key as uint64", "read uint64 failed") + return false + } + realVal.SetMapIndex(reflect.ValueOf(n).Convert(keyType), elem.Elem()) + return true + } + } + iter.ReportError("read map key", "unexpected map key type "+keyType.String()) + return true + }) +} + +type mapEncoder struct { + mapType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder + mapInterface emptyInterface +} + +func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + stream.WriteObjectStart() + for i, key := range realVal.MapKeys() { + if i != 0 { + stream.WriteMore() + } + encodeMapKey(key, stream) + if stream.indention > 0 { + stream.writeTwoBytes(byte(':'), byte(' ')) + } else { + stream.writeByte(':') + } + val := realVal.MapIndex(key).Interface() + encoder.elemEncoder.EncodeInterface(val, stream) + } + stream.WriteObjectEnd() +} + +func encodeMapKey(key reflect.Value, stream *Stream) { + if key.Kind() == reflect.String { + stream.WriteString(key.String()) + return + } + if tm, ok := key.Interface().(encoding.TextMarshaler); ok { + buf, err := tm.MarshalText() + if err != nil { + stream.Error = err + return + } + stream.writeByte('"') + stream.Write(buf) + stream.writeByte('"') + return + } + switch key.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + stream.writeByte('"') + stream.WriteInt64(key.Int()) + stream.writeByte('"') + return + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + stream.writeByte('"') + stream.WriteUint64(key.Uint()) + stream.writeByte('"') + return + } + stream.Error = &json.UnsupportedTypeError{Type: key.Type()} +} + +func (encoder *mapEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + return realVal.Len() == 0 +} + +type sortKeysMapEncoder struct { + mapType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder + mapInterface emptyInterface +} + +func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + + // Extract and sort the keys. + keys := realVal.MapKeys() + sv := stringValues(make([]reflectWithString, len(keys))) + for i, v := range keys { + sv[i].v = v + if err := sv[i].resolve(); err != nil { + stream.Error = err + return + } + } + sort.Sort(sv) + + stream.WriteObjectStart() + for i, key := range sv { + if i != 0 { + stream.WriteMore() + } + stream.WriteVal(key.s) // might need html escape, so can not WriteString directly + if stream.indention > 0 { + stream.writeTwoBytes(byte(':'), byte(' ')) + } else { + stream.writeByte(':') + } + val := realVal.MapIndex(key.v).Interface() + encoder.elemEncoder.EncodeInterface(val, stream) + } + stream.WriteObjectEnd() +} + +// stringValues is a slice of reflect.Value holding *reflect.StringValue. +// It implements the methods to sort by string. +type stringValues []reflectWithString + +type reflectWithString struct { + v reflect.Value + s string +} + +func (w *reflectWithString) resolve() error { + if w.v.Kind() == reflect.String { + w.s = w.v.String() + return nil + } + if tm, ok := w.v.Interface().(encoding.TextMarshaler); ok { + buf, err := tm.MarshalText() + w.s = string(buf) + return err + } + switch w.v.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + w.s = strconv.FormatInt(w.v.Int(), 10) + return nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + w.s = strconv.FormatUint(w.v.Uint(), 10) + return nil + } + return &json.UnsupportedTypeError{Type: w.v.Type()} +} + +func (sv stringValues) Len() int { return len(sv) } +func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } +func (sv stringValues) Less(i, j int) bool { return sv[i].s < sv[j].s } + +func (encoder *sortKeysMapEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + mapInterface := encoder.mapInterface + mapInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&mapInterface)) + realVal := reflect.ValueOf(*realInterface) + return realVal.Len() == 0 +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_native.go b/vendor/github.com/json-iterator/go/feature_reflect_native.go new file mode 100644 index 000000000..95bd1e87c --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_native.go @@ -0,0 +1,764 @@ +package jsoniter + +import ( + "encoding" + "encoding/base64" + "encoding/json" + "reflect" + "unsafe" +) + +type stringCodec struct { +} + +func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*string)(ptr)) = iter.ReadString() +} + +func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + str := *((*string)(ptr)) + stream.WriteString(str) +} + +func (codec *stringCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*string)(ptr)) == "" +} + +type intCodec struct { +} + +func (codec *intCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int)(ptr)) = iter.ReadInt() + } +} + +func (codec *intCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt(*((*int)(ptr))) +} + +func (codec *intCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *intCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int)(ptr)) == 0 +} + +type uintptrCodec struct { +} + +func (codec *uintptrCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uintptr)(ptr)) = uintptr(iter.ReadUint64()) + } +} + +func (codec *uintptrCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint64(uint64(*((*uintptr)(ptr)))) +} + +func (codec *uintptrCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uintptrCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uintptr)(ptr)) == 0 +} + +type int8Codec struct { +} + +func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int8)(ptr)) = iter.ReadInt8() + } +} + +func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt8(*((*int8)(ptr))) +} + +func (codec *int8Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int8)(ptr)) == 0 +} + +type int16Codec struct { +} + +func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int16)(ptr)) = iter.ReadInt16() + } +} + +func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt16(*((*int16)(ptr))) +} + +func (codec *int16Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int16)(ptr)) == 0 +} + +type int32Codec struct { +} + +func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int32)(ptr)) = iter.ReadInt32() + } +} + +func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt32(*((*int32)(ptr))) +} + +func (codec *int32Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int32)(ptr)) == 0 +} + +type int64Codec struct { +} + +func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*int64)(ptr)) = iter.ReadInt64() + } +} + +func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteInt64(*((*int64)(ptr))) +} + +func (codec *int64Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*int64)(ptr)) == 0 +} + +type uintCodec struct { +} + +func (codec *uintCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint)(ptr)) = iter.ReadUint() + return + } +} + +func (codec *uintCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint(*((*uint)(ptr))) +} + +func (codec *uintCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uintCodec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint)(ptr)) == 0 +} + +type uint8Codec struct { +} + +func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint8)(ptr)) = iter.ReadUint8() + } +} + +func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint8(*((*uint8)(ptr))) +} + +func (codec *uint8Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint8)(ptr)) == 0 +} + +type uint16Codec struct { +} + +func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint16)(ptr)) = iter.ReadUint16() + } +} + +func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint16(*((*uint16)(ptr))) +} + +func (codec *uint16Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint16)(ptr)) == 0 +} + +type uint32Codec struct { +} + +func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint32)(ptr)) = iter.ReadUint32() + } +} + +func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint32(*((*uint32)(ptr))) +} + +func (codec *uint32Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint32)(ptr)) == 0 +} + +type uint64Codec struct { +} + +func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*uint64)(ptr)) = iter.ReadUint64() + } +} + +func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteUint64(*((*uint64)(ptr))) +} + +func (codec *uint64Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*uint64)(ptr)) == 0 +} + +type float32Codec struct { +} + +func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float32)(ptr)) = iter.ReadFloat32() + } +} + +func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat32(*((*float32)(ptr))) +} + +func (codec *float32Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float32)(ptr)) == 0 +} + +type float64Codec struct { +} + +func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*float64)(ptr)) = iter.ReadFloat64() + } +} + +func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteFloat64(*((*float64)(ptr))) +} + +func (codec *float64Codec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return *((*float64)(ptr)) == 0 +} + +type boolCodec struct { +} + +func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.ReadNil() { + *((*bool)(ptr)) = iter.ReadBool() + } +} + +func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteBool(*((*bool)(ptr))) +} + +func (codec *boolCodec) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, codec) +} + +func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool { + return !(*((*bool)(ptr))) +} + +type emptyInterfaceCodec struct { +} + +func (codec *emptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + existing := *((*interface{})(ptr)) + + // Checking for both typed and untyped nil pointers. + if existing != nil && + reflect.TypeOf(existing).Kind() == reflect.Ptr && + !reflect.ValueOf(existing).IsNil() { + + var ptrToExisting interface{} + for { + elem := reflect.ValueOf(existing).Elem() + if elem.Kind() != reflect.Ptr || elem.IsNil() { + break + } + ptrToExisting = existing + existing = elem.Interface() + } + + if iter.ReadNil() { + if ptrToExisting != nil { + nilPtr := reflect.Zero(reflect.TypeOf(ptrToExisting).Elem()) + reflect.ValueOf(ptrToExisting).Elem().Set(nilPtr) + } else { + *((*interface{})(ptr)) = nil + } + } else { + iter.ReadVal(existing) + } + + return + } + + if iter.ReadNil() { + *((*interface{})(ptr)) = nil + } else { + *((*interface{})(ptr)) = iter.Read() + } +} + +func (codec *emptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteVal(*((*interface{})(ptr))) +} + +func (codec *emptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteVal(val) +} + +func (codec *emptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool { + emptyInterface := (*emptyInterface)(ptr) + return emptyInterface.typ == nil +} + +type nonEmptyInterfaceCodec struct { +} + +func (codec *nonEmptyInterfaceCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + nonEmptyInterface := (*nonEmptyInterface)(ptr) + if nonEmptyInterface.itab == nil { + iter.ReportError("read non-empty interface", "do not know which concrete type to decode to") + return + } + var i interface{} + e := (*emptyInterface)(unsafe.Pointer(&i)) + e.typ = nonEmptyInterface.itab.typ + e.word = nonEmptyInterface.word + iter.ReadVal(&i) + if e.word == nil { + nonEmptyInterface.itab = nil + } + nonEmptyInterface.word = e.word +} + +func (codec *nonEmptyInterfaceCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + nonEmptyInterface := (*nonEmptyInterface)(ptr) + var i interface{} + if nonEmptyInterface.itab != nil { + e := (*emptyInterface)(unsafe.Pointer(&i)) + e.typ = nonEmptyInterface.itab.typ + e.word = nonEmptyInterface.word + } + stream.WriteVal(i) +} + +func (codec *nonEmptyInterfaceCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteVal(val) +} + +func (codec *nonEmptyInterfaceCodec) IsEmpty(ptr unsafe.Pointer) bool { + nonEmptyInterface := (*nonEmptyInterface)(ptr) + return nonEmptyInterface.word == nil +} + +type anyCodec struct { +} + +func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*Any)(ptr)) = iter.ReadAny() +} + +func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + (*((*Any)(ptr))).WriteTo(stream) +} + +func (codec *anyCodec) EncodeInterface(val interface{}, stream *Stream) { + (val.(Any)).WriteTo(stream) +} + +func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool { + return (*((*Any)(ptr))).Size() == 0 +} + +type jsonNumberCodec struct { +} + +func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*json.Number)(ptr)) = json.Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*json.Number)(ptr)) = "" + default: + *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*json.Number)(ptr)))) +} + +func (codec *jsonNumberCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(json.Number))) +} + +func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.Number)(ptr))) == 0 +} + +type jsoniterNumberCodec struct { +} + +func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + switch iter.WhatIsNext() { + case StringValue: + *((*Number)(ptr)) = Number(iter.ReadString()) + case NilValue: + iter.skipFourBytes('n', 'u', 'l', 'l') + *((*Number)(ptr)) = "" + default: + *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString())) + } +} + +func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*Number)(ptr)))) +} + +func (codec *jsoniterNumberCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(Number))) +} + +func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*Number)(ptr))) == 0 +} + +type jsonRawMessageCodec struct { +} + +func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes()) +} + +func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*json.RawMessage)(ptr)))) +} + +func (codec *jsonRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(json.RawMessage))) +} + +func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*json.RawMessage)(ptr))) == 0 +} + +type jsoniterRawMessageCodec struct { +} + +func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) { + *((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes()) +} + +func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteRaw(string(*((*RawMessage)(ptr)))) +} + +func (codec *jsoniterRawMessageCodec) EncodeInterface(val interface{}, stream *Stream) { + stream.WriteRaw(string(val.(RawMessage))) +} + +func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*RawMessage)(ptr))) == 0 +} + +type base64Codec struct { + sliceDecoder ValDecoder +} + +func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + ptrSlice := (*sliceHeader)(ptr) + ptrSlice.Len = 0 + ptrSlice.Cap = 0 + ptrSlice.Data = nil + return + } + switch iter.WhatIsNext() { + case StringValue: + encoding := base64.StdEncoding + src := iter.SkipAndReturnBytes() + src = src[1 : len(src)-1] + decodedLen := encoding.DecodedLen(len(src)) + dst := make([]byte, decodedLen) + len, err := encoding.Decode(dst, src) + if err != nil { + iter.ReportError("decode base64", err.Error()) + } else { + dst = dst[:len] + dstSlice := (*sliceHeader)(unsafe.Pointer(&dst)) + ptrSlice := (*sliceHeader)(ptr) + ptrSlice.Data = dstSlice.Data + ptrSlice.Cap = dstSlice.Cap + ptrSlice.Len = dstSlice.Len + } + case ArrayValue: + codec.sliceDecoder.Decode(ptr, iter) + default: + iter.ReportError("base64Codec", "invalid input") + } +} + +func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) { + src := *((*[]byte)(ptr)) + if len(src) == 0 { + stream.WriteNil() + return + } + encoding := base64.StdEncoding + stream.writeByte('"') + toGrow := encoding.EncodedLen(len(src)) + stream.ensure(toGrow) + encoding.Encode(stream.buf[stream.n:], src) + stream.n += toGrow + stream.writeByte('"') +} + +func (codec *base64Codec) EncodeInterface(val interface{}, stream *Stream) { + ptr := extractInterface(val).word + src := *((*[]byte)(ptr)) + if len(src) == 0 { + stream.WriteNil() + return + } + encoding := base64.StdEncoding + stream.writeByte('"') + toGrow := encoding.EncodedLen(len(src)) + stream.ensure(toGrow) + encoding.Encode(stream.buf[stream.n:], src) + stream.n += toGrow + stream.writeByte('"') +} + +func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool { + return len(*((*[]byte)(ptr))) == 0 +} + +type stringModeNumberDecoder struct { + elemDecoder ValDecoder +} + +func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + c := iter.nextToken() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } + decoder.elemDecoder.Decode(ptr, iter) + if iter.Error != nil { + return + } + c = iter.readByte() + if c != '"' { + iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c})) + return + } +} + +type stringModeStringDecoder struct { + elemDecoder ValDecoder + cfg *frozenConfig +} + +func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.elemDecoder.Decode(ptr, iter) + str := *((*string)(ptr)) + tempIter := decoder.cfg.BorrowIterator([]byte(str)) + defer decoder.cfg.ReturnIterator(tempIter) + *((*string)(ptr)) = tempIter.ReadString() +} + +type stringModeNumberEncoder struct { + elemEncoder ValEncoder +} + +func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.writeByte('"') + encoder.elemEncoder.Encode(ptr, stream) + stream.writeByte('"') +} + +func (encoder *stringModeNumberEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} + +type stringModeStringEncoder struct { + elemEncoder ValEncoder + cfg *frozenConfig +} + +func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + tempStream := encoder.cfg.BorrowStream(nil) + defer encoder.cfg.ReturnStream(tempStream) + encoder.elemEncoder.Encode(ptr, tempStream) + stream.WriteString(string(tempStream.Buffer())) +} + +func (encoder *stringModeStringEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.elemEncoder.IsEmpty(ptr) +} + +type marshalerEncoder struct { + templateInterface emptyInterface + checkIsEmpty checkIsEmpty +} + +func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + templateInterface := encoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + marshaler, ok := (*realInterface).(json.Marshaler) + if !ok { + stream.WriteVal(nil) + return + } + + bytes, err := marshaler.MarshalJSON() + if err != nil { + stream.Error = err + } else { + stream.Write(bytes) + } +} +func (encoder *marshalerEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type textMarshalerEncoder struct { + templateInterface emptyInterface + checkIsEmpty checkIsEmpty +} + +func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + templateInterface := encoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + marshaler := (*realInterface).(encoding.TextMarshaler) + bytes, err := marshaler.MarshalText() + if err != nil { + stream.Error = err + } else { + stream.WriteString(string(bytes)) + } +} + +func (encoder *textMarshalerEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return encoder.checkIsEmpty.IsEmpty(ptr) +} + +type unmarshalerDecoder struct { + templateInterface emptyInterface +} + +func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + templateInterface := decoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + unmarshaler := (*realInterface).(json.Unmarshaler) + iter.nextToken() + iter.unreadByte() // skip spaces + bytes := iter.SkipAndReturnBytes() + err := unmarshaler.UnmarshalJSON(bytes) + if err != nil { + iter.ReportError("unmarshalerDecoder", err.Error()) + } +} + +type textUnmarshalerDecoder struct { + templateInterface emptyInterface +} + +func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + templateInterface := decoder.templateInterface + templateInterface.word = ptr + realInterface := (*interface{})(unsafe.Pointer(&templateInterface)) + unmarshaler := (*realInterface).(encoding.TextUnmarshaler) + str := iter.ReadString() + err := unmarshaler.UnmarshalText([]byte(str)) + if err != nil { + iter.ReportError("textUnmarshalerDecoder", err.Error()) + } +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_object.go b/vendor/github.com/json-iterator/go/feature_reflect_object.go new file mode 100644 index 000000000..036545cbe --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_object.go @@ -0,0 +1,195 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "strings" + "unsafe" +) + +func encoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + type bindingTo struct { + binding *Binding + toName string + ignored bool + } + orderedBindings := []*bindingTo{} + structDescriptor := describeStruct(cfg, prefix, typ) + for _, binding := range structDescriptor.Fields { + for _, toName := range binding.ToNames { + new := &bindingTo{ + binding: binding, + toName: toName, + } + for _, old := range orderedBindings { + if old.toName != toName { + continue + } + old.ignored, new.ignored = resolveConflictBinding(cfg, old.binding, new.binding) + } + orderedBindings = append(orderedBindings, new) + } + } + if len(orderedBindings) == 0 { + return &emptyStructEncoder{} + } + finalOrderedFields := []structFieldTo{} + for _, bindingTo := range orderedBindings { + if !bindingTo.ignored { + finalOrderedFields = append(finalOrderedFields, structFieldTo{ + encoder: bindingTo.binding.Encoder.(*structFieldEncoder), + toName: bindingTo.toName, + }) + } + } + return &structEncoder{typ, structDescriptor.onePtrEmbedded, + structDescriptor.onePtrOptimization, finalOrderedFields} +} + +func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) { + newTagged := new.Field.Tag.Get(cfg.getTagKey()) != "" + oldTagged := old.Field.Tag.Get(cfg.getTagKey()) != "" + if newTagged { + if oldTagged { + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } else { + return true, false + } + } else { + if oldTagged { + return true, false + } + if len(old.levels) > len(new.levels) { + return true, false + } else if len(new.levels) > len(old.levels) { + return false, true + } else { + return true, true + } + } +} + +func decoderOfStruct(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder { + bindings := map[string]*Binding{} + structDescriptor := describeStruct(cfg, prefix, typ) + for _, binding := range structDescriptor.Fields { + for _, fromName := range binding.FromNames { + old := bindings[fromName] + if old == nil { + bindings[fromName] = binding + continue + } + ignoreOld, ignoreNew := resolveConflictBinding(cfg, old, binding) + if ignoreOld { + delete(bindings, fromName) + } + if !ignoreNew { + bindings[fromName] = binding + } + } + } + fields := map[string]*structFieldDecoder{} + for k, binding := range bindings { + fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder) + } + return createStructDecoder(typ, fields) +} + +type structFieldEncoder struct { + field *reflect.StructField + fieldEncoder ValEncoder + omitempty bool +} + +func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + fieldPtr := unsafe.Pointer(uintptr(ptr) + encoder.field.Offset) + encoder.fieldEncoder.Encode(fieldPtr, stream) + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%s: %s", encoder.field.Name, stream.Error.Error()) + } +} + +func (encoder *structFieldEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool { + fieldPtr := unsafe.Pointer(uintptr(ptr) + encoder.field.Offset) + return encoder.fieldEncoder.IsEmpty(fieldPtr) +} + +type structEncoder struct { + typ reflect.Type + onePtrEmbedded bool + onePtrOptimization bool + fields []structFieldTo +} + +type structFieldTo struct { + encoder *structFieldEncoder + toName string +} + +func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteObjectStart() + isNotFirst := false + for _, field := range encoder.fields { + if field.encoder.omitempty && field.encoder.IsEmpty(ptr) { + continue + } + if isNotFirst { + stream.WriteMore() + } + stream.WriteObjectField(field.toName) + field.encoder.Encode(ptr, stream) + isNotFirst = true + } + stream.WriteObjectEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error()) + } +} + +func (encoder *structEncoder) EncodeInterface(val interface{}, stream *Stream) { + e := (*emptyInterface)(unsafe.Pointer(&val)) + if encoder.onePtrOptimization { + if e.word == nil && encoder.onePtrEmbedded { + stream.WriteObjectStart() + stream.WriteObjectEnd() + return + } + ptr := uintptr(e.word) + e.word = unsafe.Pointer(&ptr) + } + if reflect.TypeOf(val).Kind() == reflect.Ptr { + encoder.Encode(unsafe.Pointer(&e.word), stream) + } else { + encoder.Encode(e.word, stream) + } +} + +func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} + +type emptyStructEncoder struct { +} + +func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + stream.WriteEmptyObject() +} + +func (encoder *emptyStructEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return false +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_optional.go b/vendor/github.com/json-iterator/go/feature_reflect_optional.go new file mode 100644 index 000000000..fc8e9bc7c --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_optional.go @@ -0,0 +1,104 @@ +package jsoniter + +import ( + "reflect" + "unsafe" +) + +func decoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder { + elemType := typ.Elem() + decoder := decoderOfType(cfg, prefix, elemType) + return &OptionalDecoder{elemType, decoder} +} + +func encoderOfOptional(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + elemType := typ.Elem() + elemEncoder := encoderOfType(cfg, prefix, elemType) + encoder := &OptionalEncoder{elemEncoder} + if elemType.Kind() == reflect.Map { + encoder = &OptionalEncoder{encoder} + } + return encoder +} + +type OptionalDecoder struct { + ValueType reflect.Type + ValueDecoder ValDecoder +} + +func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if iter.ReadNil() { + *((*unsafe.Pointer)(ptr)) = nil + } else { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + value := reflect.New(decoder.ValueType) + newPtr := extractInterface(value.Interface()).word + decoder.ValueDecoder.Decode(newPtr, iter) + *((*uintptr)(ptr)) = uintptr(newPtr) + } else { + //reuse existing instance + decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } + } +} + +type deferenceDecoder struct { + // only to deference a pointer + valueType reflect.Type + valueDecoder ValDecoder +} + +func (decoder *deferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if *((*unsafe.Pointer)(ptr)) == nil { + //pointer to null, we have to allocate memory to hold the value + value := reflect.New(decoder.valueType) + newPtr := extractInterface(value.Interface()).word + decoder.valueDecoder.Decode(newPtr, iter) + *((*uintptr)(ptr)) = uintptr(newPtr) + } else { + //reuse existing instance + decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter) + } +} + +type OptionalEncoder struct { + ValueEncoder ValEncoder +} + +func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *OptionalEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool { + return *((*unsafe.Pointer)(ptr)) == nil +} + +type optionalMapEncoder struct { + valueEncoder ValEncoder +} + +func (encoder *optionalMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + if *((*unsafe.Pointer)(ptr)) == nil { + stream.WriteNil() + } else { + encoder.valueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream) + } +} + +func (encoder *optionalMapEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *optionalMapEncoder) IsEmpty(ptr unsafe.Pointer) bool { + p := *((*unsafe.Pointer)(ptr)) + return p == nil || encoder.valueEncoder.IsEmpty(p) +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_slice.go b/vendor/github.com/json-iterator/go/feature_reflect_slice.go new file mode 100644 index 000000000..2b6990262 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_slice.go @@ -0,0 +1,141 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "unsafe" +) + +func decoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValDecoder { + decoder := decoderOfType(cfg, prefix+"[slice]->", typ.Elem()) + return &sliceDecoder{typ, typ.Elem(), decoder} +} + +func encoderOfSlice(cfg *frozenConfig, prefix string, typ reflect.Type) ValEncoder { + encoder := encoderOfType(cfg, prefix+"[slice]->", typ.Elem()) + if typ.Elem().Kind() == reflect.Map { + encoder = &OptionalEncoder{encoder} + } + return &sliceEncoder{typ, typ.Elem(), encoder} +} + +type sliceEncoder struct { + sliceType reflect.Type + elemType reflect.Type + elemEncoder ValEncoder +} + +func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) { + slice := (*sliceHeader)(ptr) + if slice.Data == nil { + stream.WriteNil() + return + } + if slice.Len == 0 { + stream.WriteEmptyArray() + return + } + stream.WriteArrayStart() + elemPtr := unsafe.Pointer(slice.Data) + encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream) + for i := 1; i < slice.Len; i++ { + stream.WriteMore() + elemPtr = unsafe.Pointer(uintptr(elemPtr) + encoder.elemType.Size()) + encoder.elemEncoder.Encode(unsafe.Pointer(elemPtr), stream) + } + stream.WriteArrayEnd() + if stream.Error != nil && stream.Error != io.EOF { + stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error()) + } +} + +func (encoder *sliceEncoder) EncodeInterface(val interface{}, stream *Stream) { + WriteToStream(val, stream, encoder) +} + +func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool { + slice := (*sliceHeader)(ptr) + return slice.Len == 0 +} + +type sliceDecoder struct { + sliceType reflect.Type + elemType reflect.Type + elemDecoder ValDecoder +} + +// sliceHeader is a safe version of SliceHeader used within this package. +type sliceHeader struct { + Data unsafe.Pointer + Len int + Cap int +} + +func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + decoder.doDecode(ptr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error()) + } +} + +func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) { + slice := (*sliceHeader)(ptr) + if iter.ReadNil() { + slice.Len = 0 + slice.Cap = 0 + slice.Data = nil + return + } + reuseSlice(slice, decoder.sliceType, 4) + slice.Len = 0 + offset := uintptr(0) + iter.ReadArrayCB(func(iter *Iterator) bool { + growOne(slice, decoder.sliceType, decoder.elemType) + decoder.elemDecoder.Decode(unsafe.Pointer(uintptr(slice.Data)+offset), iter) + offset += decoder.elemType.Size() + return true + }) +} + +// grow grows the slice s so that it can hold extra more values, allocating +// more capacity if needed. It also returns the old and new slice lengths. +func growOne(slice *sliceHeader, sliceType reflect.Type, elementType reflect.Type) { + newLen := slice.Len + 1 + if newLen <= slice.Cap { + slice.Len = newLen + return + } + newCap := slice.Cap + if newCap == 0 { + newCap = 1 + } else { + for newCap < newLen { + if slice.Len < 1024 { + newCap += newCap + } else { + newCap += newCap / 4 + } + } + } + newVal := reflect.MakeSlice(sliceType, newLen, newCap) + dst := unsafe.Pointer(newVal.Pointer()) + // copy old array into new array + originalBytesCount := slice.Len * int(elementType.Size()) + srcSliceHeader := (unsafe.Pointer)(&sliceHeader{slice.Data, originalBytesCount, originalBytesCount}) + dstSliceHeader := (unsafe.Pointer)(&sliceHeader{dst, originalBytesCount, originalBytesCount}) + copy(*(*[]byte)(dstSliceHeader), *(*[]byte)(srcSliceHeader)) + slice.Data = dst + slice.Len = newLen + slice.Cap = newCap +} + +func reuseSlice(slice *sliceHeader, sliceType reflect.Type, expectedCap int) { + if expectedCap <= slice.Cap { + return + } + newVal := reflect.MakeSlice(sliceType, 0, expectedCap) + dst := unsafe.Pointer(newVal.Pointer()) + slice.Data = dst + slice.Cap = expectedCap +} diff --git a/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go new file mode 100644 index 000000000..d30804855 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_reflect_struct_decoder.go @@ -0,0 +1,966 @@ +package jsoniter + +import ( + "fmt" + "io" + "reflect" + "strings" + "unsafe" +) + +func createStructDecoder(typ reflect.Type, fields map[string]*structFieldDecoder) ValDecoder { + knownHash := map[int32]struct{}{ + 0: {}, + } + switch len(fields) { + case 0: + return &skipObjectDecoder{typ} + case 1: + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder} + } + case 2: + var fieldHash1 int32 + var fieldHash2 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldHash1 == 0 { + fieldHash1 = fieldHash + fieldDecoder1 = fieldDecoder + } else { + fieldHash2 = fieldHash + fieldDecoder2 = fieldDecoder + } + } + return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2} + case 3: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } + } + return &threeFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3} + case 4: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } + } + return &fourFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4} + case 5: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } + } + return &fiveFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5} + case 6: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } + } + return &sixFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6} + case 7: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } + } + return &sevenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7} + case 8: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldName8 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } + } + return &eightFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8} + case 9: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldName8 int32 + var fieldName9 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } + } + return &nineFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8, + fieldName9, fieldDecoder9} + case 10: + var fieldName1 int32 + var fieldName2 int32 + var fieldName3 int32 + var fieldName4 int32 + var fieldName5 int32 + var fieldName6 int32 + var fieldName7 int32 + var fieldName8 int32 + var fieldName9 int32 + var fieldName10 int32 + var fieldDecoder1 *structFieldDecoder + var fieldDecoder2 *structFieldDecoder + var fieldDecoder3 *structFieldDecoder + var fieldDecoder4 *structFieldDecoder + var fieldDecoder5 *structFieldDecoder + var fieldDecoder6 *structFieldDecoder + var fieldDecoder7 *structFieldDecoder + var fieldDecoder8 *structFieldDecoder + var fieldDecoder9 *structFieldDecoder + var fieldDecoder10 *structFieldDecoder + for fieldName, fieldDecoder := range fields { + fieldHash := calcHash(fieldName) + _, known := knownHash[fieldHash] + if known { + return &generalStructDecoder{typ, fields} + } + knownHash[fieldHash] = struct{}{} + if fieldName1 == 0 { + fieldName1 = fieldHash + fieldDecoder1 = fieldDecoder + } else if fieldName2 == 0 { + fieldName2 = fieldHash + fieldDecoder2 = fieldDecoder + } else if fieldName3 == 0 { + fieldName3 = fieldHash + fieldDecoder3 = fieldDecoder + } else if fieldName4 == 0 { + fieldName4 = fieldHash + fieldDecoder4 = fieldDecoder + } else if fieldName5 == 0 { + fieldName5 = fieldHash + fieldDecoder5 = fieldDecoder + } else if fieldName6 == 0 { + fieldName6 = fieldHash + fieldDecoder6 = fieldDecoder + } else if fieldName7 == 0 { + fieldName7 = fieldHash + fieldDecoder7 = fieldDecoder + } else if fieldName8 == 0 { + fieldName8 = fieldHash + fieldDecoder8 = fieldDecoder + } else if fieldName9 == 0 { + fieldName9 = fieldHash + fieldDecoder9 = fieldDecoder + } else { + fieldName10 = fieldHash + fieldDecoder10 = fieldDecoder + } + } + return &tenFieldsStructDecoder{typ, + fieldName1, fieldDecoder1, + fieldName2, fieldDecoder2, + fieldName3, fieldDecoder3, + fieldName4, fieldDecoder4, + fieldName5, fieldDecoder5, + fieldName6, fieldDecoder6, + fieldName7, fieldDecoder7, + fieldName8, fieldDecoder8, + fieldName9, fieldDecoder9, + fieldName10, fieldDecoder10} + } + return &generalStructDecoder{typ, fields} +} + +type generalStructDecoder struct { + typ reflect.Type + fields map[string]*structFieldDecoder +} + +func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + var fieldBytes []byte + var field string + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes = iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + fieldDecoder := decoder.fields[strings.ToLower(field)] + if fieldDecoder == nil { + iter.Skip() + } else { + fieldDecoder.Decode(ptr, iter) + } + for iter.nextToken() == ',' { + if iter.cfg.objectFieldMustBeSimpleString { + fieldBytes := iter.readObjectFieldAsBytes() + field = *(*string)(unsafe.Pointer(&fieldBytes)) + } else { + field = iter.ReadString() + c := iter.nextToken() + if c != ':' { + iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c})) + } + } + fieldDecoder = decoder.fields[strings.ToLower(field)] + if fieldDecoder == nil { + iter.Skip() + } else { + fieldDecoder.Decode(ptr, iter) + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type skipObjectDecoder struct { + typ reflect.Type +} + +func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + valueType := iter.WhatIsNext() + if valueType != ObjectValue && valueType != NilValue { + iter.ReportError("skipObjectDecoder", "expect object or null") + return + } + iter.Skip() +} + +type oneFieldStructDecoder struct { + typ reflect.Type + fieldHash int32 + fieldDecoder *structFieldDecoder +} + +func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + if iter.readFieldHash() == decoder.fieldHash { + decoder.fieldDecoder.Decode(ptr, iter) + } else { + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type twoFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder +} + +func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type threeFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder +} + +func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type fourFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder +} + +func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type fiveFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder +} + +func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type sixFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder +} + +func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type sevenFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder +} + +func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type eightFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder + fieldHash8 int32 + fieldDecoder8 *structFieldDecoder +} + +func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type nineFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder + fieldHash8 int32 + fieldDecoder8 *structFieldDecoder + fieldHash9 int32 + fieldDecoder9 *structFieldDecoder +} + +func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type tenFieldsStructDecoder struct { + typ reflect.Type + fieldHash1 int32 + fieldDecoder1 *structFieldDecoder + fieldHash2 int32 + fieldDecoder2 *structFieldDecoder + fieldHash3 int32 + fieldDecoder3 *structFieldDecoder + fieldHash4 int32 + fieldDecoder4 *structFieldDecoder + fieldHash5 int32 + fieldDecoder5 *structFieldDecoder + fieldHash6 int32 + fieldDecoder6 *structFieldDecoder + fieldHash7 int32 + fieldDecoder7 *structFieldDecoder + fieldHash8 int32 + fieldDecoder8 *structFieldDecoder + fieldHash9 int32 + fieldDecoder9 *structFieldDecoder + fieldHash10 int32 + fieldDecoder10 *structFieldDecoder +} + +func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + if !iter.readObjectStart() { + return + } + for { + switch iter.readFieldHash() { + case decoder.fieldHash1: + decoder.fieldDecoder1.Decode(ptr, iter) + case decoder.fieldHash2: + decoder.fieldDecoder2.Decode(ptr, iter) + case decoder.fieldHash3: + decoder.fieldDecoder3.Decode(ptr, iter) + case decoder.fieldHash4: + decoder.fieldDecoder4.Decode(ptr, iter) + case decoder.fieldHash5: + decoder.fieldDecoder5.Decode(ptr, iter) + case decoder.fieldHash6: + decoder.fieldDecoder6.Decode(ptr, iter) + case decoder.fieldHash7: + decoder.fieldDecoder7.Decode(ptr, iter) + case decoder.fieldHash8: + decoder.fieldDecoder8.Decode(ptr, iter) + case decoder.fieldHash9: + decoder.fieldDecoder9.Decode(ptr, iter) + case decoder.fieldHash10: + decoder.fieldDecoder10.Decode(ptr, iter) + default: + iter.Skip() + } + if iter.isObjectEnd() { + break + } + } + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error()) + } +} + +type structFieldDecoder struct { + field *reflect.StructField + fieldDecoder ValDecoder +} + +func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) { + fieldPtr := unsafe.Pointer(uintptr(ptr) + decoder.field.Offset) + decoder.fieldDecoder.Decode(fieldPtr, iter) + if iter.Error != nil && iter.Error != io.EOF { + iter.Error = fmt.Errorf("%s: %s", decoder.field.Name, iter.Error.Error()) + } +} diff --git a/vendor/github.com/json-iterator/go/feature_stream.go b/vendor/github.com/json-iterator/go/feature_stream.go new file mode 100644 index 000000000..97355eb5b --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream.go @@ -0,0 +1,308 @@ +package jsoniter + +import ( + "io" +) + +// stream is a io.Writer like object, with JSON specific write functions. +// Error is not returned as return value, but stored as Error member on this stream instance. +type Stream struct { + cfg *frozenConfig + out io.Writer + buf []byte + n int + Error error + indention int + Attachment interface{} // open for customized encoder +} + +// NewStream create new stream instance. +// cfg can be jsoniter.ConfigDefault. +// out can be nil if write to internal buffer. +// bufSize is the initial size for the internal buffer in bytes. +func NewStream(cfg API, out io.Writer, bufSize int) *Stream { + return &Stream{ + cfg: cfg.(*frozenConfig), + out: out, + buf: make([]byte, bufSize), + n: 0, + Error: nil, + indention: 0, + } +} + +// Pool returns a pool can provide more stream with same configuration +func (stream *Stream) Pool() StreamPool { + return stream.cfg +} + +// Reset reuse this stream instance by assign a new writer +func (stream *Stream) Reset(out io.Writer) { + stream.out = out + stream.n = 0 +} + +// Available returns how many bytes are unused in the buffer. +func (stream *Stream) Available() int { + return len(stream.buf) - stream.n +} + +// Buffered returns the number of bytes that have been written into the current buffer. +func (stream *Stream) Buffered() int { + return stream.n +} + +// Buffer if writer is nil, use this method to take the result +func (stream *Stream) Buffer() []byte { + return stream.buf[:stream.n] +} + +// Write writes the contents of p into the buffer. +// It returns the number of bytes written. +// If nn < len(p), it also returns an error explaining +// why the write is short. +func (stream *Stream) Write(p []byte) (nn int, err error) { + for len(p) > stream.Available() && stream.Error == nil { + if stream.out == nil { + stream.growAtLeast(len(p)) + } else { + var n int + if stream.Buffered() == 0 { + // Large write, empty buffer. + // Write directly from p to avoid copy. + n, stream.Error = stream.out.Write(p) + } else { + n = copy(stream.buf[stream.n:], p) + stream.n += n + stream.Flush() + } + nn += n + p = p[n:] + } + } + if stream.Error != nil { + return nn, stream.Error + } + n := copy(stream.buf[stream.n:], p) + stream.n += n + nn += n + return nn, nil +} + +// WriteByte writes a single byte. +func (stream *Stream) writeByte(c byte) { + if stream.Error != nil { + return + } + if stream.Available() < 1 { + stream.growAtLeast(1) + } + stream.buf[stream.n] = c + stream.n++ +} + +func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 2 { + stream.growAtLeast(2) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.n += 2 +} + +func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 3 { + stream.growAtLeast(3) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.buf[stream.n+2] = c3 + stream.n += 3 +} + +func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 4 { + stream.growAtLeast(4) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.buf[stream.n+2] = c3 + stream.buf[stream.n+3] = c4 + stream.n += 4 +} + +func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) { + if stream.Error != nil { + return + } + if stream.Available() < 5 { + stream.growAtLeast(5) + } + stream.buf[stream.n] = c1 + stream.buf[stream.n+1] = c2 + stream.buf[stream.n+2] = c3 + stream.buf[stream.n+3] = c4 + stream.buf[stream.n+4] = c5 + stream.n += 5 +} + +// Flush writes any buffered data to the underlying io.Writer. +func (stream *Stream) Flush() error { + if stream.out == nil { + return nil + } + if stream.Error != nil { + return stream.Error + } + if stream.n == 0 { + return nil + } + n, err := stream.out.Write(stream.buf[0:stream.n]) + if n < stream.n && err == nil { + err = io.ErrShortWrite + } + if err != nil { + if n > 0 && n < stream.n { + copy(stream.buf[0:stream.n-n], stream.buf[n:stream.n]) + } + stream.n -= n + stream.Error = err + return err + } + stream.n = 0 + return nil +} + +func (stream *Stream) ensure(minimal int) { + available := stream.Available() + if available < minimal { + stream.growAtLeast(minimal) + } +} + +func (stream *Stream) growAtLeast(minimal int) { + if stream.out != nil { + stream.Flush() + if stream.Available() >= minimal { + return + } + } + toGrow := len(stream.buf) + if toGrow < minimal { + toGrow = minimal + } + newBuf := make([]byte, len(stream.buf)+toGrow) + copy(newBuf, stream.Buffer()) + stream.buf = newBuf +} + +// WriteRaw write string out without quotes, just like []byte +func (stream *Stream) WriteRaw(s string) { + stream.ensure(len(s)) + if stream.Error != nil { + return + } + n := copy(stream.buf[stream.n:], s) + stream.n += n +} + +// WriteNil write null to stream +func (stream *Stream) WriteNil() { + stream.writeFourBytes('n', 'u', 'l', 'l') +} + +// WriteTrue write true to stream +func (stream *Stream) WriteTrue() { + stream.writeFourBytes('t', 'r', 'u', 'e') +} + +// WriteFalse write false to stream +func (stream *Stream) WriteFalse() { + stream.writeFiveBytes('f', 'a', 'l', 's', 'e') +} + +// WriteBool write true or false into stream +func (stream *Stream) WriteBool(val bool) { + if val { + stream.WriteTrue() + } else { + stream.WriteFalse() + } +} + +// WriteObjectStart write { with possible indention +func (stream *Stream) WriteObjectStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('{') + stream.writeIndention(0) +} + +// WriteObjectField write "field": with possible indention +func (stream *Stream) WriteObjectField(field string) { + stream.WriteString(field) + if stream.indention > 0 { + stream.writeTwoBytes(':', ' ') + } else { + stream.writeByte(':') + } +} + +// WriteObjectEnd write } with possible indention +func (stream *Stream) WriteObjectEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte('}') +} + +// WriteEmptyObject write {} +func (stream *Stream) WriteEmptyObject() { + stream.writeByte('{') + stream.writeByte('}') +} + +// WriteMore write , with possible indention +func (stream *Stream) WriteMore() { + stream.writeByte(',') + stream.writeIndention(0) +} + +// WriteArrayStart write [ with possible indention +func (stream *Stream) WriteArrayStart() { + stream.indention += stream.cfg.indentionStep + stream.writeByte('[') + stream.writeIndention(0) +} + +// WriteEmptyArray write [] +func (stream *Stream) WriteEmptyArray() { + stream.writeTwoBytes('[', ']') +} + +// WriteArrayEnd write ] with possible indention +func (stream *Stream) WriteArrayEnd() { + stream.writeIndention(stream.cfg.indentionStep) + stream.indention -= stream.cfg.indentionStep + stream.writeByte(']') +} + +func (stream *Stream) writeIndention(delta int) { + if stream.indention == 0 { + return + } + stream.writeByte('\n') + toWrite := stream.indention - delta + stream.ensure(toWrite) + for i := 0; i < toWrite && stream.n < len(stream.buf); i++ { + stream.buf[stream.n] = ' ' + stream.n++ + } +} diff --git a/vendor/github.com/json-iterator/go/feature_stream_float.go b/vendor/github.com/json-iterator/go/feature_stream_float.go new file mode 100644 index 000000000..9a404e11d --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream_float.go @@ -0,0 +1,96 @@ +package jsoniter + +import ( + "math" + "strconv" +) + +var pow10 []uint64 + +func init() { + pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000} +} + +// WriteFloat32 write float32 to stream +func (stream *Stream) WriteFloat32(val float32) { + abs := math.Abs(float64(val)) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if float32(abs) < 1e-6 || float32(abs) >= 1e21 { + fmt = 'e' + } + } + stream.WriteRaw(strconv.FormatFloat(float64(val), fmt, -1, 32)) +} + +// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat32Lossy(val float32) { + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat32(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(float64(val)*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + stream.ensure(10) + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[stream.n-1] == '0' { + stream.n-- + } +} + +// WriteFloat64 write float64 to stream +func (stream *Stream) WriteFloat64(val float64) { + abs := math.Abs(val) + fmt := byte('f') + // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. + if abs != 0 { + if abs < 1e-6 || abs >= 1e21 { + fmt = 'e' + } + } + stream.WriteRaw(strconv.FormatFloat(float64(val), fmt, -1, 64)) +} + +// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster +func (stream *Stream) WriteFloat64Lossy(val float64) { + if val < 0 { + stream.writeByte('-') + val = -val + } + if val > 0x4ffffff { + stream.WriteFloat64(val) + return + } + precision := 6 + exp := uint64(1000000) // 6 + lval := uint64(val*float64(exp) + 0.5) + stream.WriteUint64(lval / exp) + fval := lval % exp + if fval == 0 { + return + } + stream.writeByte('.') + stream.ensure(10) + for p := precision - 1; p > 0 && fval < pow10[p]; p-- { + stream.writeByte('0') + } + stream.WriteUint64(fval) + for stream.buf[stream.n-1] == '0' { + stream.n-- + } +} diff --git a/vendor/github.com/json-iterator/go/feature_stream_int.go b/vendor/github.com/json-iterator/go/feature_stream_int.go new file mode 100644 index 000000000..7cfd522c1 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream_int.go @@ -0,0 +1,320 @@ +package jsoniter + +var digits []uint32 + +func init() { + digits = make([]uint32, 1000) + for i := uint32(0); i < 1000; i++ { + digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0' + if i < 10 { + digits[i] += 2 << 24 + } else if i < 100 { + digits[i] += 1 << 24 + } + } +} + +func writeFirstBuf(buf []byte, v uint32, n int) int { + start := v >> 24 + if start == 0 { + buf[n] = byte(v >> 16) + n++ + buf[n] = byte(v >> 8) + n++ + } else if start == 1 { + buf[n] = byte(v >> 8) + n++ + } + buf[n] = byte(v) + n++ + return n +} + +func writeBuf(buf []byte, v uint32, n int) { + buf[n] = byte(v >> 16) + buf[n+1] = byte(v >> 8) + buf[n+2] = byte(v) +} + +// WriteUint8 write uint8 to stream +func (stream *Stream) WriteUint8(val uint8) { + stream.ensure(3) + stream.n = writeFirstBuf(stream.buf, digits[val], stream.n) +} + +// WriteInt8 write int8 to stream +func (stream *Stream) WriteInt8(nval int8) { + stream.ensure(4) + n := stream.n + var val uint8 + if nval < 0 { + val = uint8(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint8(nval) + } + stream.n = writeFirstBuf(stream.buf, digits[val], n) +} + +// WriteUint16 write uint16 to stream +func (stream *Stream) WriteUint16(val uint16) { + stream.ensure(5) + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], stream.n) + return + } + r1 := val - q1*1000 + n := writeFirstBuf(stream.buf, digits[q1], stream.n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return +} + +// WriteInt16 write int16 to stream +func (stream *Stream) WriteInt16(nval int16) { + stream.ensure(6) + n := stream.n + var val uint16 + if nval < 0 { + val = uint16(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint16(nval) + } + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + n = writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return +} + +// WriteUint32 write uint32 to stream +func (stream *Stream) WriteUint32(val uint32) { + stream.ensure(10) + n := stream.n + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + } else { + r3 := q2 - q3*1000 + stream.buf[n] = byte(q3 + '0') + n++ + writeBuf(stream.buf, digits[r3], n) + n += 3 + } + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 +} + +// WriteInt32 write int32 to stream +func (stream *Stream) WriteInt32(nval int32) { + stream.ensure(11) + n := stream.n + var val uint32 + if nval < 0 { + val = uint32(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint32(nval) + } + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + } else { + r3 := q2 - q3*1000 + stream.buf[n] = byte(q3 + '0') + n++ + writeBuf(stream.buf, digits[r3], n) + n += 3 + } + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 +} + +// WriteUint64 write uint64 to stream +func (stream *Stream) WriteUint64(val uint64) { + stream.ensure(20) + n := stream.n + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 + return + } + r3 := q2 - q3*1000 + q4 := q3 / 1000 + if q4 == 0 { + n = writeFirstBuf(stream.buf, digits[q3], n) + writeBuf(stream.buf, digits[r3], n) + writeBuf(stream.buf, digits[r2], n+3) + writeBuf(stream.buf, digits[r1], n+6) + stream.n = n + 9 + return + } + r4 := q3 - q4*1000 + q5 := q4 / 1000 + if q5 == 0 { + n = writeFirstBuf(stream.buf, digits[q4], n) + writeBuf(stream.buf, digits[r4], n) + writeBuf(stream.buf, digits[r3], n+3) + writeBuf(stream.buf, digits[r2], n+6) + writeBuf(stream.buf, digits[r1], n+9) + stream.n = n + 12 + return + } + r5 := q4 - q5*1000 + q6 := q5 / 1000 + if q6 == 0 { + n = writeFirstBuf(stream.buf, digits[q5], n) + } else { + n = writeFirstBuf(stream.buf, digits[q6], n) + r6 := q5 - q6*1000 + writeBuf(stream.buf, digits[r6], n) + n += 3 + } + writeBuf(stream.buf, digits[r5], n) + writeBuf(stream.buf, digits[r4], n+3) + writeBuf(stream.buf, digits[r3], n+6) + writeBuf(stream.buf, digits[r2], n+9) + writeBuf(stream.buf, digits[r1], n+12) + stream.n = n + 15 +} + +// WriteInt64 write int64 to stream +func (stream *Stream) WriteInt64(nval int64) { + stream.ensure(20) + n := stream.n + var val uint64 + if nval < 0 { + val = uint64(-nval) + stream.buf[n] = '-' + n++ + } else { + val = uint64(nval) + } + q1 := val / 1000 + if q1 == 0 { + stream.n = writeFirstBuf(stream.buf, digits[val], n) + return + } + r1 := val - q1*1000 + q2 := q1 / 1000 + if q2 == 0 { + n := writeFirstBuf(stream.buf, digits[q1], n) + writeBuf(stream.buf, digits[r1], n) + stream.n = n + 3 + return + } + r2 := q1 - q2*1000 + q3 := q2 / 1000 + if q3 == 0 { + n = writeFirstBuf(stream.buf, digits[q2], n) + writeBuf(stream.buf, digits[r2], n) + writeBuf(stream.buf, digits[r1], n+3) + stream.n = n + 6 + return + } + r3 := q2 - q3*1000 + q4 := q3 / 1000 + if q4 == 0 { + n = writeFirstBuf(stream.buf, digits[q3], n) + writeBuf(stream.buf, digits[r3], n) + writeBuf(stream.buf, digits[r2], n+3) + writeBuf(stream.buf, digits[r1], n+6) + stream.n = n + 9 + return + } + r4 := q3 - q4*1000 + q5 := q4 / 1000 + if q5 == 0 { + n = writeFirstBuf(stream.buf, digits[q4], n) + writeBuf(stream.buf, digits[r4], n) + writeBuf(stream.buf, digits[r3], n+3) + writeBuf(stream.buf, digits[r2], n+6) + writeBuf(stream.buf, digits[r1], n+9) + stream.n = n + 12 + return + } + r5 := q4 - q5*1000 + q6 := q5 / 1000 + if q6 == 0 { + n = writeFirstBuf(stream.buf, digits[q5], n) + } else { + stream.buf[n] = byte(q6 + '0') + n++ + r6 := q5 - q6*1000 + writeBuf(stream.buf, digits[r6], n) + n += 3 + } + writeBuf(stream.buf, digits[r5], n) + writeBuf(stream.buf, digits[r4], n+3) + writeBuf(stream.buf, digits[r3], n+6) + writeBuf(stream.buf, digits[r2], n+9) + writeBuf(stream.buf, digits[r1], n+12) + stream.n = n + 15 +} + +// WriteInt write int to stream +func (stream *Stream) WriteInt(val int) { + stream.WriteInt64(int64(val)) +} + +// WriteUint write uint to stream +func (stream *Stream) WriteUint(val uint) { + stream.WriteUint64(uint64(val)) +} diff --git a/vendor/github.com/json-iterator/go/feature_stream_string.go b/vendor/github.com/json-iterator/go/feature_stream_string.go new file mode 100644 index 000000000..334282f05 --- /dev/null +++ b/vendor/github.com/json-iterator/go/feature_stream_string.go @@ -0,0 +1,396 @@ +package jsoniter + +import ( + "unicode/utf8" +) + +// htmlSafeSet holds the value true if the ASCII character with the given +// array position can be safely represented inside a JSON string, embedded +// inside of HTML