diff --git a/go.mod b/go.mod
index df86d5317..44db4d317 100644
--- a/go.mod
+++ b/go.mod
@@ -4,7 +4,7 @@ go 1.20
require (
cloud.google.com/go/storage v1.32.0
- github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1
+ github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.2
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.1.0
github.com/VictoriaMetrics/fastcache v1.12.1
@@ -14,8 +14,8 @@ require (
github.com/VictoriaMetrics/metrics v1.24.0
github.com/VictoriaMetrics/metricsql v0.64.0
github.com/aws/aws-sdk-go-v2 v1.21.0
- github.com/aws/aws-sdk-go-v2/config v1.18.37
- github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.81
+ github.com/aws/aws-sdk-go-v2/config v1.18.39
+ github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.83
github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5
github.com/bmatcuk/doublestar/v4 v4.6.0
github.com/cespare/xxhash/v2 v2.2.0
@@ -25,7 +25,7 @@ require (
github.com/googleapis/gax-go/v2 v2.12.0
github.com/influxdata/influxdb v1.11.2
github.com/klauspost/compress v1.16.7
- github.com/prometheus/prometheus v0.46.0
+ github.com/prometheus/prometheus v0.47.0
github.com/urfave/cli/v2 v2.25.7
github.com/valyala/fastjson v1.6.4
github.com/valyala/fastrand v1.1.0
@@ -33,9 +33,9 @@ require (
github.com/valyala/gozstd v1.20.1
github.com/valyala/histogram v1.2.0
github.com/valyala/quicktemplate v1.7.0
- golang.org/x/net v0.14.0
- golang.org/x/oauth2 v0.11.0
- golang.org/x/sys v0.11.0
+ golang.org/x/net v0.15.0
+ golang.org/x/oauth2 v0.12.0
+ golang.org/x/sys v0.12.0
google.golang.org/api v0.138.0
gopkg.in/yaml.v2 v2.4.0
)
@@ -47,12 +47,12 @@ require (
cloud.google.com/go/iam v1.1.2 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1 // indirect
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 // indirect
- github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 // indirect
+ github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
- github.com/aws/aws-sdk-go v1.44.333 // indirect
+ github.com/aws/aws-sdk-go v1.45.4 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13 // indirect
- github.com/aws/aws-sdk-go-v2/credentials v1.13.35 // indirect
+ github.com/aws/aws-sdk-go-v2/credentials v1.13.37 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 // indirect
@@ -62,8 +62,8 @@ require (
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.36 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.35 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.4 // indirect
- github.com/aws/aws-sdk-go-v2/service/sso v1.13.5 // indirect
- github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.5 // indirect
+ github.com/aws/aws-sdk-go-v2/service/sso v1.13.6 // indirect
+ github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.6 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.21.5 // indirect
github.com/aws/smithy-go v1.14.2 // indirect
github.com/beorn7/perks v1.0.1 // indirect
@@ -80,17 +80,20 @@ require (
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/google/go-cmp v0.5.9 // indirect
- github.com/google/s2a-go v0.1.5 // indirect
+ github.com/google/s2a-go v0.1.7 // indirect
github.com/google/uuid v1.3.1 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.2.5 // indirect
github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/jpillora/backoff v1.0.0 // indirect
+ github.com/json-iterator/go v1.1.12 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.19 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
+ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+ github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect
@@ -107,23 +110,26 @@ require (
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
go.opencensus.io v0.24.0 // indirect
+ go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 // indirect
+ go.opentelemetry.io/collector/semconv v0.84.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.43.0 // indirect
go.opentelemetry.io/otel v1.17.0 // indirect
go.opentelemetry.io/otel/metric v1.17.0 // indirect
go.opentelemetry.io/otel/trace v1.17.0 // indirect
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/goleak v1.2.1 // indirect
- golang.org/x/crypto v0.12.0 // indirect
- golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 // indirect
+ go.uber.org/multierr v1.11.0 // indirect
+ golang.org/x/crypto v0.13.0 // indirect
+ golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
golang.org/x/sync v0.3.0 // indirect
- golang.org/x/text v0.12.0 // indirect
+ golang.org/x/text v0.13.0 // indirect
golang.org/x/time v0.3.0 // indirect
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
- google.golang.org/appengine v1.6.7 // indirect
+ google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto v0.0.0-20230822172742-b8732ec3820d // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20230822172742-b8732ec3820d // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d // indirect
- google.golang.org/grpc v1.57.0 // indirect
+ google.golang.org/grpc v1.58.0 // indirect
google.golang.org/protobuf v1.31.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
diff --git a/go.sum b/go.sum
index bdba8f831..6fac55c99 100644
--- a/go.sum
+++ b/go.sum
@@ -42,8 +42,8 @@ cloud.google.com/go/storage v1.32.0 h1:5w6DxEGOnktmJHarxAOUywxVW9lbNWIzlzzUltG/3
cloud.google.com/go/storage v1.32.0/go.mod h1:Hhh/dogNRGca7IWv1RC2YqEn0c0G77ctA/OxflYkiD8=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/Azure/azure-sdk-for-go v65.0.0+incompatible h1:HzKLt3kIwMm4KeJYTdx9EbjRYTySD/t8i1Ee/W5EGXw=
-github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1 h1:/iHxaJhsFr0+xVFfbMr5vxz848jyiWuIEDhYq3y5odY=
-github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.1/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.2 h1:t5+QXLCK9SVi0PPdaY0PrFvYUo24KwA0QwxnaHRSVd4=
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.7.2/go.mod h1:bjGvMhVMb+EEm3VRNQawDMUyMMjo+S5ewNjflkep/0Q=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1 h1:LNHhpdK7hzUcx/k1LIcuh5k7k1LGIWLQfCjaneSj7Fc=
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.1/go.mod h1:uE9zaUfEQT/nbQjVi2IblCG9iaLtZsuYZ8ne+PuQ02M=
github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0 h1:sXr+ck84g/ZlZUOZiNELInmMgOsuGwdjjVkEIde0OtY=
@@ -59,8 +59,8 @@ github.com/Azure/go-autorest/autorest/to v0.4.0 h1:oXVqrxakqqV1UZdSazDOPOLvOIz+X
github.com/Azure/go-autorest/autorest/validation v0.3.1 h1:AgyqjAd94fwNAoTjl/WQXg4VvFeRFpO+UhNyRXqF1ac=
github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg=
github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo=
-github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1 h1:WpB/QDNLpMw72xHJc34BNNykqSOeEJDAWkhf0u12/Jk=
-github.com/AzureAD/microsoft-authentication-library-for-go v1.1.1/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
+github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0 h1:hVeq+yCyUi+MsoO/CU95yqCIcdzra5ovzk8Q2BBpV2M=
+github.com/AzureAD/microsoft-authentication-library-for-go v1.2.0/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
@@ -85,23 +85,22 @@ github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156 h1:eMwmnE/GDgah
github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM=
github.com/andybalholm/brotli v1.0.2/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=
github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
-github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/aws/aws-sdk-go v1.38.35/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
-github.com/aws/aws-sdk-go v1.44.333 h1:X0j5TGXtHLZzDB/uRcGKLG77ERFtxYQtXefs+Apf2PU=
-github.com/aws/aws-sdk-go v1.44.333/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
+github.com/aws/aws-sdk-go v1.45.4 h1:6B8oTYNEncxga8EV1C6Q4iJNnpDIqLEigy0v0oh2qYw=
+github.com/aws/aws-sdk-go v1.45.4/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
github.com/aws/aws-sdk-go-v2 v1.21.0 h1:gMT0IW+03wtYJhRqTVYn0wLzwdnK9sRMcxmtfGzRdJc=
github.com/aws/aws-sdk-go-v2 v1.21.0/go.mod h1:/RfNgGmRxI+iFOB1OeJUyxiU+9s88k3pfHvDagGEp0M=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13 h1:OPLEkmhXf6xFPiz0bLeDArZIDx1NNS4oJyG4nv3Gct0=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.13/go.mod h1:gpAbvyDGQFozTEmlTFO8XcQKHzubdq0LzRyJpG6MiXM=
-github.com/aws/aws-sdk-go-v2/config v1.18.37 h1:RNAfbPqw1CstCooHaTPhScz7z1PyocQj0UL+l95CgzI=
-github.com/aws/aws-sdk-go-v2/config v1.18.37/go.mod h1:8AnEFxW9/XGKCbjYDCJy7iltVNyEI9Iu9qC21UzhhgQ=
-github.com/aws/aws-sdk-go-v2/credentials v1.13.35 h1:QpsNitYJu0GgvMBLUIYu9H4yryA5kMksjeIVQfgXrt8=
-github.com/aws/aws-sdk-go-v2/credentials v1.13.35/go.mod h1:o7rCaLtvK0hUggAGclf76mNGGkaG5a9KWlp+d9IpcV8=
+github.com/aws/aws-sdk-go-v2/config v1.18.39 h1:oPVyh6fuu/u4OiW4qcuQyEtk7U7uuNBmHmJSLg1AJsQ=
+github.com/aws/aws-sdk-go-v2/config v1.18.39/go.mod h1:+NH/ZigdPckFpgB1TRcRuWCB/Kbbvkxc/iNAKTq5RhE=
+github.com/aws/aws-sdk-go-v2/credentials v1.13.37 h1:BvEdm09+ZEh2XtN+PVHPcYwKY3wIeB6pw7vPRM4M9/U=
+github.com/aws/aws-sdk-go-v2/credentials v1.13.37/go.mod h1:ACLrdkd4CLZyXOghZ8IYumQbcooAcp2jo/s2xsFH8IM=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11 h1:uDZJF1hu0EVT/4bogChk8DyjSF6fof6uL/0Y26Ma7Fg=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.11/go.mod h1:TEPP4tENqBGO99KwVpV9MlOX4NSrSLP8u3KRy2CDwA8=
-github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.81 h1:PQ9zoe2GEoTVSVPuNtjNrKeVPvyVPWesETMPb7KB3Fk=
-github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.81/go.mod h1:EztVLIU9xGitjdZ1TyHWL9IcNx4952FlqKJe6GLG2z4=
+github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.83 h1:wcluDLIQ0uYaxv0fCWQRimbXkPdTgWHUD21j1CzXEwc=
+github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.83/go.mod h1:nGCBuon134gW67yAtxHKV73x+tAcY/xG4ZPNPDB1h/I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41 h1:22dGT7PneFMx4+b3pz7lMTRyN8ZKH7M2cW4GP9yUS2g=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.41/go.mod h1:CrObHAuPneJBlfEJ5T3szXOUkLEThaGfvnhTf33buas=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.35 h1:SijA0mgjV8E+8G45ltVHs0fvKpTj8xmZJ3VwhGKtUSI=
@@ -120,10 +119,10 @@ github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.4 h1:v0jkRigbSD6uOd
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.15.4/go.mod h1:LhTyt8J04LL+9cIt7pYJ5lbS/U98ZmXovLOR/4LUsk8=
github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5 h1:A42xdtStObqy7NGvzZKpnyNXvoOmm+FENobZ0/ssHWk=
github.com/aws/aws-sdk-go-v2/service/s3 v1.38.5/go.mod h1:rDGMZA7f4pbmTtPOk5v5UM2lmX6UAbRnMDJeDvnH7AM=
-github.com/aws/aws-sdk-go-v2/service/sso v1.13.5 h1:oCvTFSDi67AX0pOX3PuPdGFewvLRU2zzFSrTsgURNo0=
-github.com/aws/aws-sdk-go-v2/service/sso v1.13.5/go.mod h1:fIAwKQKBFu90pBxx07BFOMJLpRUGu8VOzLJakeY+0K4=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.5 h1:dnInJb4S0oy8aQuri1mV6ipLlnZPfnsDNB9BGO9PDNY=
-github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.5/go.mod h1:yygr8ACQRY2PrEcy3xsUI357stq2AxnFM6DIsR9lij4=
+github.com/aws/aws-sdk-go-v2/service/sso v1.13.6 h1:2PylFCfKCEDv6PeSN09pC/VUiRd10wi1VfHG5FrW0/g=
+github.com/aws/aws-sdk-go-v2/service/sso v1.13.6/go.mod h1:fIAwKQKBFu90pBxx07BFOMJLpRUGu8VOzLJakeY+0K4=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.6 h1:pSB560BbVj9ZlJZF4WYj5zsytWHWKxg+NgyGV4B2L58=
+github.com/aws/aws-sdk-go-v2/service/ssooidc v1.15.6/go.mod h1:yygr8ACQRY2PrEcy3xsUI357stq2AxnFM6DIsR9lij4=
github.com/aws/aws-sdk-go-v2/service/sts v1.21.5 h1:CQBFElb0LS8RojMJlxRSo/HXipvTZW2S44Lt9Mk2aYQ=
github.com/aws/aws-sdk-go-v2/service/sts v1.21.5/go.mod h1:VC7JDqsqiwXukYEDjoHh9U0fOJtNWh04FPQz4ct4GGU=
github.com/aws/smithy-go v1.14.2 h1:MJU9hqBGbvWZdApzpvoF2WAIJDbtjK2NDJSiJP7HblQ=
@@ -145,11 +144,6 @@ github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5P
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
-github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
-github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
-github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
-github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
-github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+gqO04wryn5h75LSazbRlnya1k=
github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
@@ -170,8 +164,6 @@ github.com/emicklei/go-restful/v3 v3.10.2 h1:hIovbnmBTLjHXkqEBUz3HGpXZdM7ZrE9fJI
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
-github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
-github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
github.com/envoyproxy/go-control-plane v0.11.1 h1:wSUXTlLfiAQRWs2F+p+EKOY9rUyis1MyGqJ2DIk5HpM=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/envoyproxy/protoc-gen-validate v1.0.2 h1:QkIBuU5k+x7/QXPvPPnWXWlCdaBFApVqftFV6k087DA=
@@ -180,7 +172,6 @@ github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBD
github.com/felixge/httpsnoop v1.0.3 h1:s/nj+GCswXYzN5v2DpNMuMQYe+0DDwt5WVCU6CWBdXk=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
-github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@@ -277,8 +268,8 @@ github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hf
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
-github.com/google/s2a-go v0.1.5 h1:8IYp3w9nysqv3JH+NJgXJzGbDHzLOTj43BmSkp+O7qg=
-github.com/google/s2a-go v0.1.5/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A=
+github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
+github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4=
github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
@@ -292,7 +283,6 @@ github.com/gophercloud/gophercloud v1.5.0 h1:cDN6XFCLKiiqvYpjQLq9AiM7RDRbIC9450W
github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc=
github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd h1:PpuIBO5P3e9hpqBD0O/HjhShYuM6XE0i/lbE6J94kww=
github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd/go.mod h1:M5qHK+eWfAv8VR/265dIuEpL3fNfeC21tXXp9itM24A=
-github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/hashicorp/consul/api v1.22.0 h1:ydEvDooB/A0c/xpsBd8GSt7P2/zYPBui4KrNip0xGjE=
github.com/hashicorp/cronexpr v1.1.2 h1:wG/ZYIKT+RT3QkOdgYc+xsKWVRgnxJ1OJtjjy84fJ9A=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
@@ -324,6 +314,7 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
@@ -366,6 +357,7 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
@@ -410,17 +402,16 @@ github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
github.com/prometheus/procfs v0.11.1 h1:xRC8Iq1yyca5ypa9n1EZnWZkt7dwcoRPQwX/5gwaUuI=
github.com/prometheus/procfs v0.11.1/go.mod h1:eesXgaPo1q7lBpVMoMy0ZOFTth9hBn4W/y0/p/ScXhY=
-github.com/prometheus/prometheus v0.46.0 h1:9JSdXnsuT6YsbODEhSQMwxNkGwPExfmzqG73vCMk/Kw=
-github.com/prometheus/prometheus v0.46.0/go.mod h1:10L5IJE5CEsjee1FnOcVswYXlPIscDWWt3IJ2UDYrz4=
+github.com/prometheus/prometheus v0.47.0 h1:tIJJKZGlmrMVsvIt6rMfB8he7CRHEc8ZxS5ubcZtbkM=
+github.com/prometheus/prometheus v0.47.0/go.mod h1:J/bmOSjgH7lFxz2gZhrWEZs2i64vMS+HIuZfmYNhJ/M=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
-github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
-github.com/scaleway/scaleway-sdk-go v1.0.0-beta.19 h1:+1H+N9QFl2Sfvia0FBYfMrHYHYhmpZxhSE0wpPL2lYs=
+github.com/scaleway/scaleway-sdk-go v1.0.0-beta.20 h1:a9hSJdJcd16e0HoMsnFvaHvxB3pxSD+SC7+CISp7xY0=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
@@ -433,7 +424,6 @@ github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpE
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
-github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
@@ -473,6 +463,10 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
+go.opentelemetry.io/collector/pdata v1.0.0-rcv0014 h1:iT5qH0NLmkGeIdDtnBogYDx7L58t6CaWGL378DEo2QY=
+go.opentelemetry.io/collector/pdata v1.0.0-rcv0014/go.mod h1:BRvDrx43kiSoUx3mr7SoA7h9B8+OY99mUK+CZSQFWW4=
+go.opentelemetry.io/collector/semconv v0.84.0 h1:sI1B8ebHhfJPd87iyba66TDnluVFvYu8CEpSjKHqIDc=
+go.opentelemetry.io/collector/semconv v0.84.0/go.mod h1:j/8THcqVxFna1FpvA2zYIsUperEtOaRaqoLYIN4doWw=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.43.0 h1:HKORGpiOY0R0nAPtKx/ub8/7XoHhRooP8yNRkuPfelI=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.43.0/go.mod h1:e+y1M74SYXo/FcIx3UATwth2+5dDkM8dBi7eXg1tbw8=
go.opentelemetry.io/otel v1.17.0 h1:MW+phZ6WZ5/uk2nd93ANk/6yJ+dVrvNWUjGhnnFU5jM=
@@ -481,11 +475,12 @@ go.opentelemetry.io/otel/metric v1.17.0 h1:iG6LGVz5Gh+IuO0jmgvpTB6YVrCGngi8QGm+p
go.opentelemetry.io/otel/metric v1.17.0/go.mod h1:h4skoxdZI17AxwITdmdZjjYJQH5nzijUUjm+wtPph5o=
go.opentelemetry.io/otel/trace v1.17.0 h1:/SWhSRHmDPOImIAetP1QAeMnZYiQXrTy4fMMYOdSKWQ=
go.opentelemetry.io/otel/trace v1.17.0/go.mod h1:I/4vKTgFclIsXRVucpH25X0mpFSczM7aHeaz0ZBLWjY=
-go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
+go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
+go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@@ -494,9 +489,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
-golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
-golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk=
-golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
+golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
+golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -507,8 +501,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
-golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1 h1:MGwJjxBy0HJshjDNfLsYO8xppfqWlA5ZT9OhtUUhTNw=
-golang.org/x/exp v0.0.0-20230713183714-613f0c0eb8a1/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
+golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
+golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -564,19 +558,18 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
-golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
-golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14=
-golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
+golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
+golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
-golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU=
-golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk=
+golang.org/x/oauth2 v0.12.0 h1:smVPGxink+n1ZI5pkQa8y6fZT0RW0MgCO5bFpepy4B4=
+golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -637,12 +630,12 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM=
-golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
+golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
-golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0=
+golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -652,8 +645,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc=
-golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
+golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
+golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -702,7 +695,7 @@ golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc
golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
-golang.org/x/tools v0.11.0 h1:EMCa6U9S2LtZXLAMoWiR/R8dAQFRqbAitmbJ2UKhoi8=
+golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -733,8 +726,8 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
-google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
-google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
+google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
@@ -758,7 +751,6 @@ google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfG
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
-google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
@@ -783,12 +775,9 @@ google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKa
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
-google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
-google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
-google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=
-google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw=
-google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo=
+google.golang.org/grpc v1.58.0 h1:32JY8YpPMSR45K+c3o6b8VL73V+rR8k+DeMIr4vRH8o=
+google.golang.org/grpc v1.58.0/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -813,7 +802,6 @@ gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md
index cd7c0da31..93589e325 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md
+++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md
@@ -1,5 +1,11 @@
# Release History
+## 1.7.2 (2023-09-06)
+
+### Bugs Fixed
+
+* Fix default HTTP transport to work in WASM modules.
+
## 1.7.1 (2023-08-14)
## Bugs Fixed
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go
index 577435a49..eb815a2a7 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go
@@ -32,5 +32,5 @@ const (
Module = "azcore"
// Version is the semantic version (see http://semver.org) of this module.
- Version = "v1.7.1"
+ Version = "v1.7.2"
)
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_other.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_other.go
new file mode 100644
index 000000000..1c75d771f
--- /dev/null
+++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_other.go
@@ -0,0 +1,15 @@
+//go:build !wasm
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "net"
+)
+
+func defaultTransportDialContext(dialer *net.Dialer) func(context.Context, string, string) (net.Conn, error) {
+ return dialer.DialContext
+}
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_wasm.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_wasm.go
new file mode 100644
index 000000000..3dc9eeecd
--- /dev/null
+++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_wasm.go
@@ -0,0 +1,15 @@
+//go:build (js && wasm) || wasip1
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "net"
+)
+
+func defaultTransportDialContext(dialer *net.Dialer) func(context.Context, string, string) (net.Conn, error) {
+ return nil
+}
diff --git a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_http_client.go b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_http_client.go
index dbb9fa7f8..589d09f2c 100644
--- a/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_http_client.go
+++ b/vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_http_client.go
@@ -18,10 +18,10 @@ var defaultHTTPClient *http.Client
func init() {
defaultTransport := &http.Transport{
Proxy: http.ProxyFromEnvironment,
- DialContext: (&net.Dialer{
+ DialContext: defaultTransportDialContext(&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
- }).DialContext,
+ }),
ForceAttemptHTTP2: true,
MaxIdleConns: 100,
IdleConnTimeout: 90 * time.Second,
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential/confidential.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential/confidential.go
index 6612feb4b..1841d146f 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential/confidential.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential/confidential.go
@@ -59,6 +59,8 @@ added, it doesn't exist in real life. As such I've put a PEM decoder into here.
// For details see https://aka.ms/msal-net-authenticationresult
type AuthResult = base.AuthResult
+type AuthenticationScheme = authority.AuthenticationScheme
+
type Account = shared.Account
// CertFromPEM converts a PEM file (.pem or .key) for use with [NewCredFromCert]. The file
@@ -454,6 +456,33 @@ func WithClaims(claims string) interface {
}
}
+// WithAuthenticationScheme is an extensibility mechanism designed to be used only by Azure Arc for proof of possession access tokens.
+func WithAuthenticationScheme(authnScheme AuthenticationScheme) interface {
+ AcquireSilentOption
+ AcquireByCredentialOption
+ options.CallOption
+} {
+ return struct {
+ AcquireSilentOption
+ AcquireByCredentialOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenSilentOptions:
+ t.authnScheme = authnScheme
+ case *acquireTokenByCredentialOptions:
+ t.authnScheme = authnScheme
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
// WithTenantID specifies a tenant for a single authentication. It may be different than the tenant set in [New].
// This option is valid for any token acquisition method.
func WithTenantID(tenantID string) interface {
@@ -499,6 +528,7 @@ func WithTenantID(tenantID string) interface {
type acquireTokenSilentOptions struct {
account Account
claims, tenantID string
+ authnScheme AuthenticationScheme
}
// AcquireSilentOption is implemented by options for AcquireTokenSilent
@@ -549,6 +579,7 @@ func (cca Client) AcquireTokenSilent(ctx context.Context, scopes []string, opts
Credential: cca.cred,
IsAppCache: o.account.IsZero(),
TenantID: o.tenantID,
+ AuthnScheme: o.authnScheme,
}
return cca.base.AcquireTokenSilent(ctx, silentParameters)
@@ -614,6 +645,7 @@ func (cca Client) AcquireTokenByAuthCode(ctx context.Context, code string, redir
// acquireTokenByCredentialOptions contains optional configuration for AcquireTokenByCredential
type acquireTokenByCredentialOptions struct {
claims, tenantID string
+ authnScheme AuthenticationScheme
}
// AcquireByCredentialOption is implemented by options for AcquireTokenByCredential
@@ -637,7 +669,9 @@ func (cca Client) AcquireTokenByCredential(ctx context.Context, scopes []string,
authParams.Scopes = scopes
authParams.AuthorizationType = authority.ATClientCredentials
authParams.Claims = o.claims
-
+ if o.authnScheme != nil {
+ authParams.AuthnScheme = o.authnScheme
+ }
token, err := cca.base.Token.Credential(ctx, authParams, cca.cred)
if err != nil {
return AuthResult{}, err
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/base.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/base.go
index 5f68384f6..09a0d92f5 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/base.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/base.go
@@ -54,6 +54,7 @@ type AcquireTokenSilentParameters struct {
UserAssertion string
AuthorizationType authority.AuthorizeType
Claims string
+ AuthnScheme authority.AuthenticationScheme
}
// AcquireTokenAuthCodeParameters contains the parameters required to acquire an access token using the auth code flow.
@@ -289,6 +290,9 @@ func (b Client) AcquireTokenSilent(ctx context.Context, silent AcquireTokenSilen
authParams.AuthorizationType = silent.AuthorizationType
authParams.Claims = silent.Claims
authParams.UserAssertion = silent.UserAssertion
+ if silent.AuthnScheme != nil {
+ authParams.AuthnScheme = silent.AuthnScheme
+ }
m := b.pmanager
if authParams.AuthorizationType != authority.ATOnBehalfOf {
@@ -313,6 +317,7 @@ func (b Client) AcquireTokenSilent(ctx context.Context, silent AcquireTokenSilen
if silent.Claims == "" {
ar, err = AuthResultFromStorage(storageTokenResponse)
if err == nil {
+ ar.AccessToken, err = authParams.AuthnScheme.FormatAccessToken(ar.AccessToken)
return ar, err
}
}
@@ -417,6 +422,11 @@ func (b Client) AuthResultFromToken(ctx context.Context, authParams authority.Au
if err == nil && b.cacheAccessor != nil {
err = b.cacheAccessor.Export(ctx, b.manager, cache.ExportHints{PartitionKey: key})
}
+ if err != nil {
+ return AuthResult{}, err
+ }
+
+ ar.AccessToken, err = authParams.AuthnScheme.FormatAccessToken(ar.AccessToken)
return ar, err
}
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/items.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/items.go
index 5d4c9f1d1..f9be90276 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/items.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/items.go
@@ -12,6 +12,7 @@ import (
internalTime "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/types/time"
"github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
"github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
)
@@ -75,12 +76,14 @@ type AccessToken struct {
ExtendedExpiresOn internalTime.Unix `json:"extended_expires_on,omitempty"`
CachedAt internalTime.Unix `json:"cached_at,omitempty"`
UserAssertionHash string `json:"user_assertion_hash,omitempty"`
+ TokenType string `json:"token_type,omitempty"`
+ AuthnSchemeKeyID string `json:"keyid,omitempty"`
AdditionalFields map[string]interface{}
}
// NewAccessToken is the constructor for AccessToken.
-func NewAccessToken(homeID, env, realm, clientID string, cachedAt, expiresOn, extendedExpiresOn time.Time, scopes, token string) AccessToken {
+func NewAccessToken(homeID, env, realm, clientID string, cachedAt, expiresOn, extendedExpiresOn time.Time, scopes, token, tokenType, authnSchemeKeyID string) AccessToken {
return AccessToken{
HomeAccountID: homeID,
Environment: env,
@@ -92,6 +95,8 @@ func NewAccessToken(homeID, env, realm, clientID string, cachedAt, expiresOn, ex
CachedAt: internalTime.Unix{T: cachedAt.UTC()},
ExpiresOn: internalTime.Unix{T: expiresOn.UTC()},
ExtendedExpiresOn: internalTime.Unix{T: extendedExpiresOn.UTC()},
+ TokenType: tokenType,
+ AuthnSchemeKeyID: authnSchemeKeyID,
}
}
@@ -101,6 +106,11 @@ func (a AccessToken) Key() string {
[]string{a.HomeAccountID, a.Environment, a.CredentialType, a.ClientID, a.Realm, a.Scopes},
shared.CacheKeySeparator,
)
+ // add token type to key for new access tokens types. skip for bearer token type to
+ // preserve fwd and back compat between a common cache and msal clients
+ if !strings.EqualFold(a.TokenType, authority.AccessTokenTypeBearer) {
+ key = strings.Join([]string{key, a.TokenType}, shared.CacheKeySeparator)
+ }
return strings.ToLower(key)
}
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/partitioned_storage.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/partitioned_storage.go
index 5e1cae0b8..c09318330 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/partitioned_storage.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/partitioned_storage.go
@@ -41,6 +41,8 @@ func (m *PartitionedManager) Read(ctx context.Context, authParameters authority.
realm := authParameters.AuthorityInfo.Tenant
clientID := authParameters.ClientID
scopes := authParameters.Scopes
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
+ tokenType := authParameters.AuthnScheme.AccessTokenType()
// fetch metadata if instanceDiscovery is enabled
aliases := []string{authParameters.AuthorityInfo.Host}
@@ -57,7 +59,7 @@ func (m *PartitionedManager) Read(ctx context.Context, authParameters authority.
// errors returned by read* methods indicate a cache miss and are therefore non-fatal. We continue populating
// TokenResponse fields so that e.g. lack of an ID token doesn't prevent the caller from receiving a refresh token.
- accessToken, err := m.readAccessToken(aliases, realm, clientID, userAssertionHash, scopes, partitionKeyFromRequest)
+ accessToken, err := m.readAccessToken(aliases, realm, clientID, userAssertionHash, scopes, partitionKeyFromRequest, tokenType, authnSchemeKeyID)
if err == nil {
tr.AccessToken = accessToken
}
@@ -92,7 +94,7 @@ func (m *PartitionedManager) Write(authParameters authority.AuthParams, tokenRes
target := strings.Join(tokenResponse.GrantedScopes.Slice, scopeSeparator)
userAssertionHash := authParameters.AssertionHash()
cachedAt := time.Now()
-
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
var account shared.Account
if len(tokenResponse.RefreshToken) > 0 {
@@ -116,6 +118,8 @@ func (m *PartitionedManager) Write(authParameters authority.AuthParams, tokenRes
tokenResponse.ExtExpiresOn.T,
target,
tokenResponse.AccessToken,
+ tokenResponse.TokenType,
+ authnSchemeKeyID,
)
if authParameters.AuthorizationType == authority.ATOnBehalfOf {
accessToken.UserAssertionHash = userAssertionHash // get Hash method on this
@@ -215,7 +219,7 @@ func (m *PartitionedManager) aadMetadata(ctx context.Context, authorityInfo auth
return m.aadCache[authorityInfo.Host], nil
}
-func (m *PartitionedManager) readAccessToken(envAliases []string, realm, clientID, userAssertionHash string, scopes []string, partitionKey string) (AccessToken, error) {
+func (m *PartitionedManager) readAccessToken(envAliases []string, realm, clientID, userAssertionHash string, scopes []string, partitionKey, tokenType, authnSchemeKeyID string) (AccessToken, error) {
m.contractMu.RLock()
defer m.contractMu.RUnlock()
if accessTokens, ok := m.contract.AccessTokensPartition[partitionKey]; ok {
@@ -224,9 +228,11 @@ func (m *PartitionedManager) readAccessToken(envAliases []string, realm, clientI
// an issue, however if it does become a problem then we know where to look.
for _, at := range accessTokens {
if at.Realm == realm && at.ClientID == clientID && at.UserAssertionHash == userAssertionHash {
- if checkAlias(at.Environment, envAliases) {
- if isMatchingScopes(scopes, at.Scopes) {
- return at, nil
+ if at.TokenType == tokenType && at.AuthnSchemeKeyID == authnSchemeKeyID {
+ if checkAlias(at.Environment, envAliases) {
+ if isMatchingScopes(scopes, at.Scopes) {
+ return at, nil
+ }
}
}
}
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/storage.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/storage.go
index d3a39e005..11263822b 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/storage.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/storage.go
@@ -89,6 +89,8 @@ func (m *Manager) Read(ctx context.Context, authParameters authority.AuthParams)
realm := authParameters.AuthorityInfo.Tenant
clientID := authParameters.ClientID
scopes := authParameters.Scopes
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
+ tokenType := authParameters.AuthnScheme.AccessTokenType()
// fetch metadata if instanceDiscovery is enabled
aliases := []string{authParameters.AuthorityInfo.Host}
@@ -100,7 +102,7 @@ func (m *Manager) Read(ctx context.Context, authParameters authority.AuthParams)
aliases = metadata.Aliases
}
- accessToken := m.readAccessToken(homeAccountID, aliases, realm, clientID, scopes)
+ accessToken := m.readAccessToken(homeAccountID, aliases, realm, clientID, scopes, tokenType, authnSchemeKeyID)
tr.AccessToken = accessToken
if homeAccountID == "" {
@@ -140,6 +142,7 @@ func (m *Manager) Write(authParameters authority.AuthParams, tokenResponse acces
clientID := authParameters.ClientID
target := strings.Join(tokenResponse.GrantedScopes.Slice, scopeSeparator)
cachedAt := time.Now()
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
var account shared.Account
@@ -161,6 +164,8 @@ func (m *Manager) Write(authParameters authority.AuthParams, tokenResponse acces
tokenResponse.ExtExpiresOn.T,
target,
tokenResponse.AccessToken,
+ tokenResponse.TokenType,
+ authnSchemeKeyID,
)
// Since we have a valid access token, cache it before moving on.
@@ -248,7 +253,7 @@ func (m *Manager) aadMetadata(ctx context.Context, authorityInfo authority.Info)
return m.aadCache[authorityInfo.Host], nil
}
-func (m *Manager) readAccessToken(homeID string, envAliases []string, realm, clientID string, scopes []string) AccessToken {
+func (m *Manager) readAccessToken(homeID string, envAliases []string, realm, clientID string, scopes []string, tokenType, authnSchemeKeyID string) AccessToken {
m.contractMu.RLock()
defer m.contractMu.RUnlock()
// TODO: linear search (over a map no less) is slow for a large number (thousands) of tokens.
@@ -256,9 +261,11 @@ func (m *Manager) readAccessToken(homeID string, envAliases []string, realm, cli
// an issue, however if it does become a problem then we know where to look.
for _, at := range m.contract.AccessTokens {
if at.HomeAccountID == homeID && at.Realm == realm && at.ClientID == clientID {
- if checkAlias(at.Environment, envAliases) {
- if isMatchingScopes(scopes, at.Scopes) {
- return at
+ if (at.TokenType == tokenType && at.AuthnSchemeKeyID == authnSchemeKeyID) || (at.TokenType == "" && (tokenType == "" || tokenType == "Bearer")) {
+ if checkAlias(at.Environment, envAliases) {
+ if isMatchingScopes(scopes, at.Scopes) {
+ return at
+ }
}
}
}
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/oauth.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/oauth.go
index ebd86e2ba..ef8d908a4 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/oauth.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/oauth.go
@@ -119,6 +119,7 @@ func (t *Client) Credential(ctx context.Context, authParams authority.AuthParams
return accesstokens.TokenResponse{}, err
}
return accesstokens.TokenResponse{
+ TokenType: authParams.AuthnScheme.AccessTokenType(),
AccessToken: tr.AccessToken,
ExpiresOn: internalTime.DurationTime{
T: now.Add(time.Duration(tr.ExpiresInSeconds) * time.Second),
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/accesstokens.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/accesstokens.go
index 003d38648..a7b7b0742 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/accesstokens.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/accesstokens.go
@@ -380,6 +380,12 @@ func (c Client) FromSamlGrant(ctx context.Context, authParameters authority.Auth
func (c Client) doTokenResp(ctx context.Context, authParams authority.AuthParams, qv url.Values) (TokenResponse, error) {
resp := TokenResponse{}
+ if authParams.AuthnScheme != nil {
+ trParams := authParams.AuthnScheme.TokenRequestParams()
+ for k, v := range trParams {
+ qv.Set(k, v)
+ }
+ }
err := c.Comm.URLFormCall(ctx, authParams.Endpoints.TokenEndpoint, qv, &resp)
if err != nil {
return resp, err
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/tokens.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/tokens.go
index 3dd61d5b5..3107b45c1 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/tokens.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/tokens.go
@@ -168,6 +168,7 @@ type TokenResponse struct {
AccessToken string `json:"access_token"`
RefreshToken string `json:"refresh_token"`
+ TokenType string `json:"token_type"`
FamilyID string `json:"foci"`
IDToken IDToken `json:"id_token"`
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority/authority.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority/authority.go
index 7b2ccb4f5..9d60734f8 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority/authority.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority/authority.go
@@ -29,6 +29,7 @@ const (
defaultAPIVersion = "2021-10-01"
imdsEndpoint = "http://169.254.169.254/metadata/instance/compute/location?format=text&api-version=" + defaultAPIVersion
autoDetectRegion = "TryAutoDetect"
+ AccessTokenTypeBearer = "Bearer"
)
// These are various hosts that host AAD Instance discovery endpoints.
@@ -138,6 +139,39 @@ const (
ADFS = "ADFS"
)
+// AuthenticationScheme is an extensibility mechanism designed to be used only by Azure Arc for proof of possession access tokens.
+type AuthenticationScheme interface {
+ // Extra parameters that are added to the request to the /token endpoint.
+ TokenRequestParams() map[string]string
+ // Key ID of the public / private key pair used by the encryption algorithm, if any.
+ // Tokens obtained by authentication schemes that use this are bound to the KeyId, i.e.
+ // if a different kid is presented, the access token cannot be used.
+ KeyID() string
+ // Creates the access token that goes into an Authorization HTTP header.
+ FormatAccessToken(accessToken string) (string, error)
+ //Expected to match the token_type parameter returned by ESTS. Used to disambiguate
+ // between ATs of different types (e.g. Bearer and PoP) when loading from cache etc.
+ AccessTokenType() string
+}
+
+// default authn scheme realizing AuthenticationScheme for "Bearer" tokens
+type BearerAuthenticationScheme struct{}
+
+var bearerAuthnScheme BearerAuthenticationScheme
+
+func (ba *BearerAuthenticationScheme) TokenRequestParams() map[string]string {
+ return nil
+}
+func (ba *BearerAuthenticationScheme) KeyID() string {
+ return ""
+}
+func (ba *BearerAuthenticationScheme) FormatAccessToken(accessToken string) (string, error) {
+ return accessToken, nil
+}
+func (ba *BearerAuthenticationScheme) AccessTokenType() string {
+ return AccessTokenTypeBearer
+}
+
// AuthParams represents the parameters used for authorization for token acquisition.
type AuthParams struct {
AuthorityInfo Info
@@ -180,6 +214,8 @@ type AuthParams struct {
LoginHint string
// DomainHint is a directive that can be used to accelerate the user to their federated IdP sign-in page
DomainHint string
+ // AuthnScheme is an optional scheme for formatting access tokens
+ AuthnScheme AuthenticationScheme
}
// NewAuthParams creates an authorization parameters object.
@@ -188,6 +224,7 @@ func NewAuthParams(clientID string, authorityInfo Info) AuthParams {
ClientID: clientID,
AuthorityInfo: authorityInfo,
CorrelationID: uuid.New().String(),
+ AuthnScheme: &bearerAuthnScheme,
}
}
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/version/version.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/version/version.go
index 2ac2d09e4..eb16b405c 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/version/version.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/version/version.go
@@ -5,4 +5,4 @@
package version
// Version is the version of this client package that is communicated to the server.
-const Version = "1.1.1"
+const Version = "1.2.0"
diff --git a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/public/public.go b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/public/public.go
index 88b217ded..2221b3d33 100644
--- a/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/public/public.go
+++ b/vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/public/public.go
@@ -47,6 +47,8 @@ import (
// For details see https://aka.ms/msal-net-authenticationresult
type AuthResult = base.AuthResult
+type AuthenticationScheme = authority.AuthenticationScheme
+
type Account = shared.Account
var errNoAccount = errors.New("no account was specified with public.WithAccount(), or the specified account is invalid")
@@ -211,6 +213,33 @@ func WithClaims(claims string) interface {
}
}
+// WithAuthenticationScheme is an extensibility mechanism designed to be used only by Azure Arc for proof of possession access tokens.
+func WithAuthenticationScheme(authnScheme AuthenticationScheme) interface {
+ AcquireSilentOption
+ AcquireInteractiveOption
+ options.CallOption
+} {
+ return struct {
+ AcquireSilentOption
+ AcquireInteractiveOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenSilentOptions:
+ t.authnScheme = authnScheme
+ case *interactiveAuthOptions:
+ t.authnScheme = authnScheme
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
// WithTenantID specifies a tenant for a single authentication. It may be different than the tenant set in [New] by [WithAuthority].
// This option is valid for any token acquisition method.
func WithTenantID(tenantID string) interface {
@@ -260,6 +289,7 @@ func WithTenantID(tenantID string) interface {
type acquireTokenSilentOptions struct {
account Account
claims, tenantID string
+ authnScheme AuthenticationScheme
}
// AcquireSilentOption is implemented by options for AcquireTokenSilent
@@ -310,6 +340,7 @@ func (pca Client) AcquireTokenSilent(ctx context.Context, scopes []string, opts
RequestType: accesstokens.ATPublic,
IsAppCache: false,
TenantID: o.tenantID,
+ AuthnScheme: o.authnScheme,
}
return pca.base.AcquireTokenSilent(ctx, silentParameters)
@@ -482,6 +513,7 @@ func (pca Client) RemoveAccount(ctx context.Context, account Account) error {
type interactiveAuthOptions struct {
claims, domainHint, loginHint, redirectURI, tenantID string
openURL func(url string) error
+ authnScheme AuthenticationScheme
}
// AcquireInteractiveOption is implemented by options for AcquireTokenInteractive
@@ -628,6 +660,9 @@ func (pca Client) AcquireTokenInteractive(ctx context.Context, scopes []string,
authParams.DomainHint = o.domainHint
authParams.State = uuid.New().String()
authParams.Prompt = "select_account"
+ if o.authnScheme != nil {
+ authParams.AuthnScheme = o.authnScheme
+ }
res, err := pca.browserLogin(ctx, redirectURL, authParams, o.openURL)
if err != nil {
return AuthResult{}, err
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/config/CHANGELOG.md b/vendor/github.com/aws/aws-sdk-go-v2/config/CHANGELOG.md
index 774040216..1e86a5a29 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/config/CHANGELOG.md
+++ b/vendor/github.com/aws/aws-sdk-go-v2/config/CHANGELOG.md
@@ -1,3 +1,11 @@
+# v1.18.39 (2023-09-05)
+
+* **Dependency Update**: Updated to the latest SDK module versions
+
+# v1.18.38 (2023-08-31)
+
+* **Dependency Update**: Updated to the latest SDK module versions
+
# v1.18.37 (2023-08-23)
* No change notes available for this release.
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/config/go_module_metadata.go b/vendor/github.com/aws/aws-sdk-go-v2/config/go_module_metadata.go
index c973bb552..22eff357c 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/config/go_module_metadata.go
+++ b/vendor/github.com/aws/aws-sdk-go-v2/config/go_module_metadata.go
@@ -3,4 +3,4 @@
package config
// goModuleVersion is the tagged release for this module
-const goModuleVersion = "1.18.37"
+const goModuleVersion = "1.18.39"
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/credentials/CHANGELOG.md b/vendor/github.com/aws/aws-sdk-go-v2/credentials/CHANGELOG.md
index ef2dd4b2d..f57276250 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/credentials/CHANGELOG.md
+++ b/vendor/github.com/aws/aws-sdk-go-v2/credentials/CHANGELOG.md
@@ -1,3 +1,11 @@
+# v1.13.37 (2023-09-05)
+
+* **Dependency Update**: Updated to the latest SDK module versions
+
+# v1.13.36 (2023-08-31)
+
+* **Dependency Update**: Updated to the latest SDK module versions
+
# v1.13.35 (2023-08-21)
* **Dependency Update**: Updated to the latest SDK module versions
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/credentials/go_module_metadata.go b/vendor/github.com/aws/aws-sdk-go-v2/credentials/go_module_metadata.go
index eb75bb481..340b36494 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/credentials/go_module_metadata.go
+++ b/vendor/github.com/aws/aws-sdk-go-v2/credentials/go_module_metadata.go
@@ -3,4 +3,4 @@
package credentials
// goModuleVersion is the tagged release for this module
-const goModuleVersion = "1.13.35"
+const goModuleVersion = "1.13.37"
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/CHANGELOG.md b/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/CHANGELOG.md
index 4163d3b97..9a6fcf138 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/CHANGELOG.md
+++ b/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/CHANGELOG.md
@@ -1,3 +1,11 @@
+# v1.11.83 (2023-09-05)
+
+* **Dependency Update**: Updated to the latest SDK module versions
+
+# v1.11.82 (2023-08-31)
+
+* **Dependency Update**: Updated to the latest SDK module versions
+
# v1.11.81 (2023-08-23)
* **Dependency Update**: Updated to the latest SDK module versions
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/go_module_metadata.go b/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/go_module_metadata.go
index 8d1fed872..9406f67eb 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/go_module_metadata.go
+++ b/vendor/github.com/aws/aws-sdk-go-v2/feature/s3/manager/go_module_metadata.go
@@ -3,4 +3,4 @@
package manager
// goModuleVersion is the tagged release for this module
-const goModuleVersion = "1.11.81"
+const goModuleVersion = "1.11.83"
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/service/sso/CHANGELOG.md b/vendor/github.com/aws/aws-sdk-go-v2/service/sso/CHANGELOG.md
index c7ba9b268..abe977b8b 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/service/sso/CHANGELOG.md
+++ b/vendor/github.com/aws/aws-sdk-go-v2/service/sso/CHANGELOG.md
@@ -1,3 +1,7 @@
+# v1.13.6 (2023-08-31)
+
+* No change notes available for this release.
+
# v1.13.5 (2023-08-21)
* **Dependency Update**: Updated to the latest SDK module versions
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/service/sso/go_module_metadata.go b/vendor/github.com/aws/aws-sdk-go-v2/service/sso/go_module_metadata.go
index 98050334e..8b835c904 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/service/sso/go_module_metadata.go
+++ b/vendor/github.com/aws/aws-sdk-go-v2/service/sso/go_module_metadata.go
@@ -3,4 +3,4 @@
package sso
// goModuleVersion is the tagged release for this module
-const goModuleVersion = "1.13.5"
+const goModuleVersion = "1.13.6"
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/service/sso/internal/endpoints/endpoints.go b/vendor/github.com/aws/aws-sdk-go-v2/service/sso/internal/endpoints/endpoints.go
index 1da26791c..18849bab6 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/service/sso/internal/endpoints/endpoints.go
+++ b/vendor/github.com/aws/aws-sdk-go-v2/service/sso/internal/endpoints/endpoints.go
@@ -267,6 +267,14 @@ var defaultPartitions = endpoints.Partitions{
Region: "eu-west-3",
},
},
+ endpoints.EndpointKey{
+ Region: "il-central-1",
+ }: endpoints.Endpoint{
+ Hostname: "portal.sso.il-central-1.amazonaws.com",
+ CredentialScope: endpoints.CredentialScope{
+ Region: "il-central-1",
+ },
+ },
endpoints.EndpointKey{
Region: "me-south-1",
}: endpoints.Endpoint{
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/CHANGELOG.md b/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/CHANGELOG.md
index 608ac60ce..217821f37 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/CHANGELOG.md
+++ b/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/CHANGELOG.md
@@ -1,3 +1,7 @@
+# v1.15.6 (2023-09-05)
+
+* No change notes available for this release.
+
# v1.15.5 (2023-08-21)
* **Dependency Update**: Updated to the latest SDK module versions
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/go_module_metadata.go b/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/go_module_metadata.go
index 435045414..0375e758b 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/go_module_metadata.go
+++ b/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/go_module_metadata.go
@@ -3,4 +3,4 @@
package ssooidc
// goModuleVersion is the tagged release for this module
-const goModuleVersion = "1.15.5"
+const goModuleVersion = "1.15.6"
diff --git a/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/internal/endpoints/endpoints.go b/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/internal/endpoints/endpoints.go
index 8df344e93..00410fa9d 100644
--- a/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/internal/endpoints/endpoints.go
+++ b/vendor/github.com/aws/aws-sdk-go-v2/service/ssooidc/internal/endpoints/endpoints.go
@@ -267,6 +267,14 @@ var defaultPartitions = endpoints.Partitions{
Region: "eu-west-3",
},
},
+ endpoints.EndpointKey{
+ Region: "il-central-1",
+ }: endpoints.Endpoint{
+ Hostname: "oidc.il-central-1.amazonaws.com",
+ CredentialScope: endpoints.CredentialScope{
+ Region: "il-central-1",
+ },
+ },
endpoints.EndpointKey{
Region: "me-south-1",
}: endpoints.Endpoint{
diff --git a/vendor/github.com/aws/aws-sdk-go/aws/corehandlers/awsinternal.go b/vendor/github.com/aws/aws-sdk-go/aws/corehandlers/awsinternal.go
new file mode 100644
index 000000000..140242dd1
--- /dev/null
+++ b/vendor/github.com/aws/aws-sdk-go/aws/corehandlers/awsinternal.go
@@ -0,0 +1,4 @@
+// DO NOT EDIT
+package corehandlers
+
+const isAwsInternal = ""
\ No newline at end of file
diff --git a/vendor/github.com/aws/aws-sdk-go/aws/corehandlers/user_agent.go b/vendor/github.com/aws/aws-sdk-go/aws/corehandlers/user_agent.go
index ab69c7a6f..ac842c55d 100644
--- a/vendor/github.com/aws/aws-sdk-go/aws/corehandlers/user_agent.go
+++ b/vendor/github.com/aws/aws-sdk-go/aws/corehandlers/user_agent.go
@@ -35,3 +35,13 @@ var AddHostExecEnvUserAgentHander = request.NamedHandler{
request.AddToUserAgent(r, execEnvUAKey+"/"+v)
},
}
+
+var AddAwsInternal = request.NamedHandler{
+ Name: "core.AddAwsInternal",
+ Fn: func(r *request.Request) {
+ if len(isAwsInternal) == 0 {
+ return
+ }
+ request.AddToUserAgent(r, isAwsInternal)
+ },
+}
diff --git a/vendor/github.com/aws/aws-sdk-go/aws/defaults/defaults.go b/vendor/github.com/aws/aws-sdk-go/aws/defaults/defaults.go
index 23bb639e0..e39903284 100644
--- a/vendor/github.com/aws/aws-sdk-go/aws/defaults/defaults.go
+++ b/vendor/github.com/aws/aws-sdk-go/aws/defaults/defaults.go
@@ -74,6 +74,7 @@ func Handlers() request.Handlers {
handlers.Validate.PushBackNamed(corehandlers.ValidateEndpointHandler)
handlers.Validate.AfterEachFn = request.HandlerListStopOnError
handlers.Build.PushBackNamed(corehandlers.SDKVersionUserAgentHandler)
+ handlers.Build.PushBackNamed(corehandlers.AddAwsInternal)
handlers.Build.PushBackNamed(corehandlers.AddHostExecEnvUserAgentHander)
handlers.Build.AfterEachFn = request.HandlerListStopOnError
handlers.Sign.PushBackNamed(corehandlers.BuildContentLengthHandler)
diff --git a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go
index 59582df71..adcbba207 100644
--- a/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go
+++ b/vendor/github.com/aws/aws-sdk-go/aws/endpoints/defaults.go
@@ -5711,6 +5711,9 @@ var awsPartition = partition{
},
Deprecated: boxedTrue,
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-central-1",
}: endpoint{},
@@ -6282,6 +6285,9 @@ var awsPartition = partition{
endpointKey{
Region: "ap-northeast-2",
}: endpoint{},
+ endpointKey{
+ Region: "ap-northeast-3",
+ }: endpoint{},
endpointKey{
Region: "ap-south-1",
}: endpoint{},
@@ -6348,6 +6354,9 @@ var awsPartition = partition{
},
Deprecated: boxedTrue,
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-south-1",
}: endpoint{},
@@ -6400,6 +6409,9 @@ var awsPartition = partition{
endpointKey{
Region: "ap-northeast-2",
}: endpoint{},
+ endpointKey{
+ Region: "ap-northeast-3",
+ }: endpoint{},
endpointKey{
Region: "ap-south-1",
}: endpoint{},
@@ -6466,6 +6478,9 @@ var awsPartition = partition{
},
Deprecated: boxedTrue,
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-south-1",
}: endpoint{},
@@ -7239,6 +7254,9 @@ var awsPartition = partition{
endpointKey{
Region: "eu-west-3",
}: endpoint{},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-south-1",
}: endpoint{},
@@ -11370,63 +11388,183 @@ var awsPartition = partition{
endpointKey{
Region: "af-south-1",
}: endpoint{},
+ endpointKey{
+ Region: "af-south-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.af-south-1.api.aws",
+ },
endpointKey{
Region: "ap-east-1",
}: endpoint{},
+ endpointKey{
+ Region: "ap-east-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-east-1.api.aws",
+ },
endpointKey{
Region: "ap-northeast-1",
}: endpoint{},
+ endpointKey{
+ Region: "ap-northeast-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-northeast-1.api.aws",
+ },
endpointKey{
Region: "ap-northeast-2",
}: endpoint{},
+ endpointKey{
+ Region: "ap-northeast-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-northeast-2.api.aws",
+ },
endpointKey{
Region: "ap-northeast-3",
}: endpoint{},
+ endpointKey{
+ Region: "ap-northeast-3",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-northeast-3.api.aws",
+ },
endpointKey{
Region: "ap-south-1",
}: endpoint{},
+ endpointKey{
+ Region: "ap-south-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-south-1.api.aws",
+ },
endpointKey{
Region: "ap-south-2",
}: endpoint{},
+ endpointKey{
+ Region: "ap-south-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-south-2.api.aws",
+ },
endpointKey{
Region: "ap-southeast-1",
}: endpoint{},
+ endpointKey{
+ Region: "ap-southeast-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-southeast-1.api.aws",
+ },
endpointKey{
Region: "ap-southeast-2",
}: endpoint{},
+ endpointKey{
+ Region: "ap-southeast-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-southeast-2.api.aws",
+ },
endpointKey{
Region: "ap-southeast-3",
}: endpoint{},
+ endpointKey{
+ Region: "ap-southeast-3",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-southeast-3.api.aws",
+ },
endpointKey{
Region: "ap-southeast-4",
}: endpoint{},
+ endpointKey{
+ Region: "ap-southeast-4",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ap-southeast-4.api.aws",
+ },
endpointKey{
Region: "ca-central-1",
}: endpoint{},
+ endpointKey{
+ Region: "ca-central-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.ca-central-1.api.aws",
+ },
endpointKey{
Region: "eu-central-1",
}: endpoint{},
+ endpointKey{
+ Region: "eu-central-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-central-1.api.aws",
+ },
endpointKey{
Region: "eu-central-2",
}: endpoint{},
+ endpointKey{
+ Region: "eu-central-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-central-2.api.aws",
+ },
endpointKey{
Region: "eu-north-1",
}: endpoint{},
+ endpointKey{
+ Region: "eu-north-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-north-1.api.aws",
+ },
endpointKey{
Region: "eu-south-1",
}: endpoint{},
+ endpointKey{
+ Region: "eu-south-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-south-1.api.aws",
+ },
endpointKey{
Region: "eu-south-2",
}: endpoint{},
+ endpointKey{
+ Region: "eu-south-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-south-2.api.aws",
+ },
endpointKey{
Region: "eu-west-1",
}: endpoint{},
+ endpointKey{
+ Region: "eu-west-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-west-1.api.aws",
+ },
endpointKey{
Region: "eu-west-2",
}: endpoint{},
+ endpointKey{
+ Region: "eu-west-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-west-2.api.aws",
+ },
endpointKey{
Region: "eu-west-3",
}: endpoint{},
+ endpointKey{
+ Region: "eu-west-3",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.eu-west-3.api.aws",
+ },
endpointKey{
Region: "fips",
}: endpoint{
@@ -11439,18 +11577,48 @@ var awsPartition = partition{
endpointKey{
Region: "il-central-1",
}: endpoint{},
+ endpointKey{
+ Region: "il-central-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.il-central-1.api.aws",
+ },
endpointKey{
Region: "me-central-1",
}: endpoint{},
+ endpointKey{
+ Region: "me-central-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.me-central-1.api.aws",
+ },
endpointKey{
Region: "me-south-1",
}: endpoint{},
+ endpointKey{
+ Region: "me-south-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.me-south-1.api.aws",
+ },
endpointKey{
Region: "sa-east-1",
}: endpoint{},
+ endpointKey{
+ Region: "sa-east-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.sa-east-1.api.aws",
+ },
endpointKey{
Region: "us-east-1",
}: endpoint{},
+ endpointKey{
+ Region: "us-east-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.us-east-1.api.aws",
+ },
endpointKey{
Region: "us-east-1",
Variant: fipsVariant,
@@ -11469,6 +11637,12 @@ var awsPartition = partition{
endpointKey{
Region: "us-east-2",
}: endpoint{},
+ endpointKey{
+ Region: "us-east-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.us-east-2.api.aws",
+ },
endpointKey{
Region: "us-east-2",
Variant: fipsVariant,
@@ -11487,6 +11661,12 @@ var awsPartition = partition{
endpointKey{
Region: "us-west-1",
}: endpoint{},
+ endpointKey{
+ Region: "us-west-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.us-west-1.api.aws",
+ },
endpointKey{
Region: "us-west-1",
Variant: fipsVariant,
@@ -11505,6 +11685,12 @@ var awsPartition = partition{
endpointKey{
Region: "us-west-2",
}: endpoint{},
+ endpointKey{
+ Region: "us-west-2",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.us-west-2.api.aws",
+ },
endpointKey{
Region: "us-west-2",
Variant: fipsVariant,
@@ -13802,6 +13988,9 @@ var awsPartition = partition{
endpointKey{
Region: "eu-central-1",
}: endpoint{},
+ endpointKey{
+ Region: "eu-central-2",
+ }: endpoint{},
endpointKey{
Region: "eu-north-1",
}: endpoint{},
@@ -13817,6 +14006,9 @@ var awsPartition = partition{
endpointKey{
Region: "eu-west-3",
}: endpoint{},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-south-1",
}: endpoint{},
@@ -14257,7 +14449,7 @@ var awsPartition = partition{
Region: "ca-central-1",
Variant: fipsVariant,
}: endpoint{
- Hostname: "internetmonitor-fips.ca-central-1.api.aws",
+ Hostname: "internetmonitor-fips.ca-central-1.amazonaws.com",
},
endpointKey{
Region: "eu-central-1",
@@ -14328,7 +14520,7 @@ var awsPartition = partition{
Region: "us-east-1",
Variant: fipsVariant,
}: endpoint{
- Hostname: "internetmonitor-fips.us-east-1.api.aws",
+ Hostname: "internetmonitor-fips.us-east-1.amazonaws.com",
},
endpointKey{
Region: "us-east-2",
@@ -14339,7 +14531,7 @@ var awsPartition = partition{
Region: "us-east-2",
Variant: fipsVariant,
}: endpoint{
- Hostname: "internetmonitor-fips.us-east-2.api.aws",
+ Hostname: "internetmonitor-fips.us-east-2.amazonaws.com",
},
endpointKey{
Region: "us-west-1",
@@ -14350,7 +14542,7 @@ var awsPartition = partition{
Region: "us-west-1",
Variant: fipsVariant,
}: endpoint{
- Hostname: "internetmonitor-fips.us-west-1.api.aws",
+ Hostname: "internetmonitor-fips.us-west-1.amazonaws.com",
},
endpointKey{
Region: "us-west-2",
@@ -14361,7 +14553,7 @@ var awsPartition = partition{
Region: "us-west-2",
Variant: fipsVariant,
}: endpoint{
- Hostname: "internetmonitor-fips.us-west-2.api.aws",
+ Hostname: "internetmonitor-fips.us-west-2.amazonaws.com",
},
},
},
@@ -15490,6 +15682,9 @@ var awsPartition = partition{
},
Deprecated: boxedTrue,
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-central-1",
}: endpoint{},
@@ -17413,6 +17608,9 @@ var awsPartition = partition{
endpointKey{
Region: "ap-southeast-2",
}: endpoint{},
+ endpointKey{
+ Region: "ap-southeast-3",
+ }: endpoint{},
endpointKey{
Region: "ca-central-1",
}: endpoint{},
@@ -17470,6 +17668,9 @@ var awsPartition = partition{
},
Deprecated: boxedTrue,
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-south-1",
}: endpoint{},
@@ -18009,6 +18210,9 @@ var awsPartition = partition{
},
Deprecated: boxedTrue,
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-south-1",
}: endpoint{},
@@ -18611,6 +18815,9 @@ var awsPartition = partition{
endpointKey{
Region: "eu-central-1",
}: endpoint{},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "us-east-1",
}: endpoint{},
@@ -20169,6 +20376,14 @@ var awsPartition = partition{
Region: "eu-west-3",
},
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{
+ Hostname: "oidc.il-central-1.amazonaws.com",
+ CredentialScope: credentialScope{
+ Region: "il-central-1",
+ },
+ },
endpointKey{
Region: "me-south-1",
}: endpoint{
@@ -21259,6 +21474,14 @@ var awsPartition = partition{
Region: "eu-west-3",
},
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{
+ Hostname: "portal.sso.il-central-1.amazonaws.com",
+ CredentialScope: credentialScope{
+ Region: "il-central-1",
+ },
+ },
endpointKey{
Region: "me-south-1",
}: endpoint{
@@ -27757,6 +27980,9 @@ var awsPartition = partition{
endpointKey{
Region: "eu-west-3",
}: endpoint{},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-south-1",
}: endpoint{},
@@ -29311,6 +29537,9 @@ var awsPartition = partition{
},
Deprecated: boxedTrue,
},
+ endpointKey{
+ Region: "il-central-1",
+ }: endpoint{},
endpointKey{
Region: "me-central-1",
}: endpoint{},
@@ -32478,9 +32707,21 @@ var awscnPartition = partition{
endpointKey{
Region: "cn-north-1",
}: endpoint{},
+ endpointKey{
+ Region: "cn-north-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.cn-north-1.api.amazonwebservices.com.cn",
+ },
endpointKey{
Region: "cn-northwest-1",
}: endpoint{},
+ endpointKey{
+ Region: "cn-northwest-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.cn-northwest-1.api.amazonwebservices.com.cn",
+ },
},
},
"events": service{
@@ -35936,6 +36177,12 @@ var awsusgovPartition = partition{
endpointKey{
Region: "us-gov-east-1",
}: endpoint{},
+ endpointKey{
+ Region: "us-gov-east-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.us-gov-east-1.api.aws",
+ },
endpointKey{
Region: "us-gov-east-1",
Variant: fipsVariant,
@@ -35954,6 +36201,12 @@ var awsusgovPartition = partition{
endpointKey{
Region: "us-gov-west-1",
}: endpoint{},
+ endpointKey{
+ Region: "us-gov-west-1",
+ Variant: dualStackVariant,
+ }: endpoint{
+ Hostname: "aos.us-gov-west-1.api.aws",
+ },
endpointKey{
Region: "us-gov-west-1",
Variant: fipsVariant,
@@ -36190,6 +36443,28 @@ var awsusgovPartition = partition{
},
},
},
+ "geo": service{
+ Endpoints: serviceEndpoints{
+ endpointKey{
+ Region: "fips-us-gov-west-1",
+ }: endpoint{
+ Hostname: "geo-fips.us-gov-west-1.amazonaws.com",
+ CredentialScope: credentialScope{
+ Region: "us-gov-west-1",
+ },
+ Deprecated: boxedTrue,
+ },
+ endpointKey{
+ Region: "us-gov-west-1",
+ }: endpoint{},
+ endpointKey{
+ Region: "us-gov-west-1",
+ Variant: fipsVariant,
+ }: endpoint{
+ Hostname: "geo-fips.us-gov-west-1.amazonaws.com",
+ },
+ },
+ },
"glacier": service{
Endpoints: serviceEndpoints{
endpointKey{
diff --git a/vendor/github.com/aws/aws-sdk-go/aws/version.go b/vendor/github.com/aws/aws-sdk-go/aws/version.go
index 96b11c71c..03f58a911 100644
--- a/vendor/github.com/aws/aws-sdk-go/aws/version.go
+++ b/vendor/github.com/aws/aws-sdk-go/aws/version.go
@@ -5,4 +5,4 @@ package aws
const SDKName = "aws-sdk-go"
// SDKVersion is the version of this SDK
-const SDKVersion = "1.44.333"
+const SDKVersion = "1.45.4"
diff --git a/vendor/github.com/gogo/protobuf/jsonpb/jsonpb.go b/vendor/github.com/gogo/protobuf/jsonpb/jsonpb.go
new file mode 100644
index 000000000..e8134ec8b
--- /dev/null
+++ b/vendor/github.com/gogo/protobuf/jsonpb/jsonpb.go
@@ -0,0 +1,1435 @@
+// Go support for Protocol Buffers - Google's data interchange format
+//
+// Copyright 2015 The Go Authors. All rights reserved.
+// https://github.com/golang/protobuf
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/*
+Package jsonpb provides marshaling and unmarshaling between protocol buffers and JSON.
+It follows the specification at https://developers.google.com/protocol-buffers/docs/proto3#json.
+
+This package produces a different output than the standard "encoding/json" package,
+which does not operate correctly on protocol buffers.
+*/
+package jsonpb
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "reflect"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/gogo/protobuf/proto"
+ "github.com/gogo/protobuf/types"
+)
+
+const secondInNanos = int64(time.Second / time.Nanosecond)
+const maxSecondsInDuration = 315576000000
+
+// Marshaler is a configurable object for converting between
+// protocol buffer objects and a JSON representation for them.
+type Marshaler struct {
+ // Whether to render enum values as integers, as opposed to string values.
+ EnumsAsInts bool
+
+ // Whether to render fields with zero values.
+ EmitDefaults bool
+
+ // A string to indent each level by. The presence of this field will
+ // also cause a space to appear between the field separator and
+ // value, and for newlines to be appear between fields and array
+ // elements.
+ Indent string
+
+ // Whether to use the original (.proto) name for fields.
+ OrigName bool
+
+ // A custom URL resolver to use when marshaling Any messages to JSON.
+ // If unset, the default resolution strategy is to extract the
+ // fully-qualified type name from the type URL and pass that to
+ // proto.MessageType(string).
+ AnyResolver AnyResolver
+}
+
+// AnyResolver takes a type URL, present in an Any message, and resolves it into
+// an instance of the associated message.
+type AnyResolver interface {
+ Resolve(typeUrl string) (proto.Message, error)
+}
+
+func defaultResolveAny(typeUrl string) (proto.Message, error) {
+ // Only the part of typeUrl after the last slash is relevant.
+ mname := typeUrl
+ if slash := strings.LastIndex(mname, "/"); slash >= 0 {
+ mname = mname[slash+1:]
+ }
+ mt := proto.MessageType(mname)
+ if mt == nil {
+ return nil, fmt.Errorf("unknown message type %q", mname)
+ }
+ return reflect.New(mt.Elem()).Interface().(proto.Message), nil
+}
+
+// JSONPBMarshaler is implemented by protobuf messages that customize the
+// way they are marshaled to JSON. Messages that implement this should
+// also implement JSONPBUnmarshaler so that the custom format can be
+// parsed.
+//
+// The JSON marshaling must follow the proto to JSON specification:
+// https://developers.google.com/protocol-buffers/docs/proto3#json
+type JSONPBMarshaler interface {
+ MarshalJSONPB(*Marshaler) ([]byte, error)
+}
+
+// JSONPBUnmarshaler is implemented by protobuf messages that customize
+// the way they are unmarshaled from JSON. Messages that implement this
+// should also implement JSONPBMarshaler so that the custom format can be
+// produced.
+//
+// The JSON unmarshaling must follow the JSON to proto specification:
+// https://developers.google.com/protocol-buffers/docs/proto3#json
+type JSONPBUnmarshaler interface {
+ UnmarshalJSONPB(*Unmarshaler, []byte) error
+}
+
+// Marshal marshals a protocol buffer into JSON.
+func (m *Marshaler) Marshal(out io.Writer, pb proto.Message) error {
+ v := reflect.ValueOf(pb)
+ if pb == nil || (v.Kind() == reflect.Ptr && v.IsNil()) {
+ return errors.New("Marshal called with nil")
+ }
+ // Check for unset required fields first.
+ if err := checkRequiredFields(pb); err != nil {
+ return err
+ }
+ writer := &errWriter{writer: out}
+ return m.marshalObject(writer, pb, "", "")
+}
+
+// MarshalToString converts a protocol buffer object to JSON string.
+func (m *Marshaler) MarshalToString(pb proto.Message) (string, error) {
+ var buf bytes.Buffer
+ if err := m.Marshal(&buf, pb); err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+type int32Slice []int32
+
+var nonFinite = map[string]float64{
+ `"NaN"`: math.NaN(),
+ `"Infinity"`: math.Inf(1),
+ `"-Infinity"`: math.Inf(-1),
+}
+
+// For sorting extensions ids to ensure stable output.
+func (s int32Slice) Len() int { return len(s) }
+func (s int32Slice) Less(i, j int) bool { return s[i] < s[j] }
+func (s int32Slice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+
+type isWkt interface {
+ XXX_WellKnownType() string
+}
+
+var (
+ wktType = reflect.TypeOf((*isWkt)(nil)).Elem()
+ messageType = reflect.TypeOf((*proto.Message)(nil)).Elem()
+)
+
+// marshalObject writes a struct to the Writer.
+func (m *Marshaler) marshalObject(out *errWriter, v proto.Message, indent, typeURL string) error {
+ if jsm, ok := v.(JSONPBMarshaler); ok {
+ b, err := jsm.MarshalJSONPB(m)
+ if err != nil {
+ return err
+ }
+ if typeURL != "" {
+ // we are marshaling this object to an Any type
+ var js map[string]*json.RawMessage
+ if err = json.Unmarshal(b, &js); err != nil {
+ return fmt.Errorf("type %T produced invalid JSON: %v", v, err)
+ }
+ turl, err := json.Marshal(typeURL)
+ if err != nil {
+ return fmt.Errorf("failed to marshal type URL %q to JSON: %v", typeURL, err)
+ }
+ js["@type"] = (*json.RawMessage)(&turl)
+ if m.Indent != "" {
+ b, err = json.MarshalIndent(js, indent, m.Indent)
+ } else {
+ b, err = json.Marshal(js)
+ }
+ if err != nil {
+ return err
+ }
+ }
+
+ out.write(string(b))
+ return out.err
+ }
+
+ s := reflect.ValueOf(v).Elem()
+
+ // Handle well-known types.
+ if wkt, ok := v.(isWkt); ok {
+ switch wkt.XXX_WellKnownType() {
+ case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value",
+ "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue":
+ // "Wrappers use the same representation in JSON
+ // as the wrapped primitive type, ..."
+ sprop := proto.GetProperties(s.Type())
+ return m.marshalValue(out, sprop.Prop[0], s.Field(0), indent)
+ case "Any":
+ // Any is a bit more involved.
+ return m.marshalAny(out, v, indent)
+ case "Duration":
+ s, ns := s.Field(0).Int(), s.Field(1).Int()
+ if s < -maxSecondsInDuration || s > maxSecondsInDuration {
+ return fmt.Errorf("seconds out of range %v", s)
+ }
+ if ns <= -secondInNanos || ns >= secondInNanos {
+ return fmt.Errorf("ns out of range (%v, %v)", -secondInNanos, secondInNanos)
+ }
+ if (s > 0 && ns < 0) || (s < 0 && ns > 0) {
+ return errors.New("signs of seconds and nanos do not match")
+ }
+ // Generated output always contains 0, 3, 6, or 9 fractional digits,
+ // depending on required precision, followed by the suffix "s".
+ f := "%d.%09d"
+ if ns < 0 {
+ ns = -ns
+ if s == 0 {
+ f = "-%d.%09d"
+ }
+ }
+ x := fmt.Sprintf(f, s, ns)
+ x = strings.TrimSuffix(x, "000")
+ x = strings.TrimSuffix(x, "000")
+ x = strings.TrimSuffix(x, ".000")
+ out.write(`"`)
+ out.write(x)
+ out.write(`s"`)
+ return out.err
+ case "Struct", "ListValue":
+ // Let marshalValue handle the `Struct.fields` map or the `ListValue.values` slice.
+ // TODO: pass the correct Properties if needed.
+ return m.marshalValue(out, &proto.Properties{}, s.Field(0), indent)
+ case "Timestamp":
+ // "RFC 3339, where generated output will always be Z-normalized
+ // and uses 0, 3, 6 or 9 fractional digits."
+ s, ns := s.Field(0).Int(), s.Field(1).Int()
+ if ns < 0 || ns >= secondInNanos {
+ return fmt.Errorf("ns out of range [0, %v)", secondInNanos)
+ }
+ t := time.Unix(s, ns).UTC()
+ // time.RFC3339Nano isn't exactly right (we need to get 3/6/9 fractional digits).
+ x := t.Format("2006-01-02T15:04:05.000000000")
+ x = strings.TrimSuffix(x, "000")
+ x = strings.TrimSuffix(x, "000")
+ x = strings.TrimSuffix(x, ".000")
+ out.write(`"`)
+ out.write(x)
+ out.write(`Z"`)
+ return out.err
+ case "Value":
+ // Value has a single oneof.
+ kind := s.Field(0)
+ if kind.IsNil() {
+ // "absence of any variant indicates an error"
+ return errors.New("nil Value")
+ }
+ // oneof -> *T -> T -> T.F
+ x := kind.Elem().Elem().Field(0)
+ // TODO: pass the correct Properties if needed.
+ return m.marshalValue(out, &proto.Properties{}, x, indent)
+ }
+ }
+
+ out.write("{")
+ if m.Indent != "" {
+ out.write("\n")
+ }
+
+ firstField := true
+
+ if typeURL != "" {
+ if err := m.marshalTypeURL(out, indent, typeURL); err != nil {
+ return err
+ }
+ firstField = false
+ }
+
+ for i := 0; i < s.NumField(); i++ {
+ value := s.Field(i)
+ valueField := s.Type().Field(i)
+ if strings.HasPrefix(valueField.Name, "XXX_") {
+ continue
+ }
+
+ //this is not a protobuf field
+ if valueField.Tag.Get("protobuf") == "" && valueField.Tag.Get("protobuf_oneof") == "" {
+ continue
+ }
+
+ // IsNil will panic on most value kinds.
+ switch value.Kind() {
+ case reflect.Chan, reflect.Func, reflect.Interface:
+ if value.IsNil() {
+ continue
+ }
+ }
+
+ if !m.EmitDefaults {
+ switch value.Kind() {
+ case reflect.Bool:
+ if !value.Bool() {
+ continue
+ }
+ case reflect.Int32, reflect.Int64:
+ if value.Int() == 0 {
+ continue
+ }
+ case reflect.Uint32, reflect.Uint64:
+ if value.Uint() == 0 {
+ continue
+ }
+ case reflect.Float32, reflect.Float64:
+ if value.Float() == 0 {
+ continue
+ }
+ case reflect.String:
+ if value.Len() == 0 {
+ continue
+ }
+ case reflect.Map, reflect.Ptr, reflect.Slice:
+ if value.IsNil() {
+ continue
+ }
+ }
+ }
+
+ // Oneof fields need special handling.
+ if valueField.Tag.Get("protobuf_oneof") != "" {
+ // value is an interface containing &T{real_value}.
+ sv := value.Elem().Elem() // interface -> *T -> T
+ value = sv.Field(0)
+ valueField = sv.Type().Field(0)
+ }
+ prop := jsonProperties(valueField, m.OrigName)
+ if !firstField {
+ m.writeSep(out)
+ }
+ // If the map value is a cast type, it may not implement proto.Message, therefore
+ // allow the struct tag to declare the underlying message type. Change the property
+ // of the child types, use CustomType as a passer. CastType currently property is
+ // not used in json encoding.
+ if value.Kind() == reflect.Map {
+ if tag := valueField.Tag.Get("protobuf"); tag != "" {
+ for _, v := range strings.Split(tag, ",") {
+ if !strings.HasPrefix(v, "castvaluetype=") {
+ continue
+ }
+ v = strings.TrimPrefix(v, "castvaluetype=")
+ prop.MapValProp.CustomType = v
+ break
+ }
+ }
+ }
+ if err := m.marshalField(out, prop, value, indent); err != nil {
+ return err
+ }
+ firstField = false
+ }
+
+ // Handle proto2 extensions.
+ if ep, ok := v.(proto.Message); ok {
+ extensions := proto.RegisteredExtensions(v)
+ // Sort extensions for stable output.
+ ids := make([]int32, 0, len(extensions))
+ for id, desc := range extensions {
+ if !proto.HasExtension(ep, desc) {
+ continue
+ }
+ ids = append(ids, id)
+ }
+ sort.Sort(int32Slice(ids))
+ for _, id := range ids {
+ desc := extensions[id]
+ if desc == nil {
+ // unknown extension
+ continue
+ }
+ ext, extErr := proto.GetExtension(ep, desc)
+ if extErr != nil {
+ return extErr
+ }
+ value := reflect.ValueOf(ext)
+ var prop proto.Properties
+ prop.Parse(desc.Tag)
+ prop.JSONName = fmt.Sprintf("[%s]", desc.Name)
+ if !firstField {
+ m.writeSep(out)
+ }
+ if err := m.marshalField(out, &prop, value, indent); err != nil {
+ return err
+ }
+ firstField = false
+ }
+
+ }
+
+ if m.Indent != "" {
+ out.write("\n")
+ out.write(indent)
+ }
+ out.write("}")
+ return out.err
+}
+
+func (m *Marshaler) writeSep(out *errWriter) {
+ if m.Indent != "" {
+ out.write(",\n")
+ } else {
+ out.write(",")
+ }
+}
+
+func (m *Marshaler) marshalAny(out *errWriter, any proto.Message, indent string) error {
+ // "If the Any contains a value that has a special JSON mapping,
+ // it will be converted as follows: {"@type": xxx, "value": yyy}.
+ // Otherwise, the value will be converted into a JSON object,
+ // and the "@type" field will be inserted to indicate the actual data type."
+ v := reflect.ValueOf(any).Elem()
+ turl := v.Field(0).String()
+ val := v.Field(1).Bytes()
+
+ var msg proto.Message
+ var err error
+ if m.AnyResolver != nil {
+ msg, err = m.AnyResolver.Resolve(turl)
+ } else {
+ msg, err = defaultResolveAny(turl)
+ }
+ if err != nil {
+ return err
+ }
+
+ if err := proto.Unmarshal(val, msg); err != nil {
+ return err
+ }
+
+ if _, ok := msg.(isWkt); ok {
+ out.write("{")
+ if m.Indent != "" {
+ out.write("\n")
+ }
+ if err := m.marshalTypeURL(out, indent, turl); err != nil {
+ return err
+ }
+ m.writeSep(out)
+ if m.Indent != "" {
+ out.write(indent)
+ out.write(m.Indent)
+ out.write(`"value": `)
+ } else {
+ out.write(`"value":`)
+ }
+ if err := m.marshalObject(out, msg, indent+m.Indent, ""); err != nil {
+ return err
+ }
+ if m.Indent != "" {
+ out.write("\n")
+ out.write(indent)
+ }
+ out.write("}")
+ return out.err
+ }
+
+ return m.marshalObject(out, msg, indent, turl)
+}
+
+func (m *Marshaler) marshalTypeURL(out *errWriter, indent, typeURL string) error {
+ if m.Indent != "" {
+ out.write(indent)
+ out.write(m.Indent)
+ }
+ out.write(`"@type":`)
+ if m.Indent != "" {
+ out.write(" ")
+ }
+ b, err := json.Marshal(typeURL)
+ if err != nil {
+ return err
+ }
+ out.write(string(b))
+ return out.err
+}
+
+// marshalField writes field description and value to the Writer.
+func (m *Marshaler) marshalField(out *errWriter, prop *proto.Properties, v reflect.Value, indent string) error {
+ if m.Indent != "" {
+ out.write(indent)
+ out.write(m.Indent)
+ }
+ out.write(`"`)
+ out.write(prop.JSONName)
+ out.write(`":`)
+ if m.Indent != "" {
+ out.write(" ")
+ }
+ if err := m.marshalValue(out, prop, v, indent); err != nil {
+ return err
+ }
+ return nil
+}
+
+// marshalValue writes the value to the Writer.
+func (m *Marshaler) marshalValue(out *errWriter, prop *proto.Properties, v reflect.Value, indent string) error {
+
+ v = reflect.Indirect(v)
+
+ // Handle nil pointer
+ if v.Kind() == reflect.Invalid {
+ out.write("null")
+ return out.err
+ }
+
+ // Handle repeated elements.
+ if v.Kind() == reflect.Slice && v.Type().Elem().Kind() != reflect.Uint8 {
+ out.write("[")
+ comma := ""
+ for i := 0; i < v.Len(); i++ {
+ sliceVal := v.Index(i)
+ out.write(comma)
+ if m.Indent != "" {
+ out.write("\n")
+ out.write(indent)
+ out.write(m.Indent)
+ out.write(m.Indent)
+ }
+ if err := m.marshalValue(out, prop, sliceVal, indent+m.Indent); err != nil {
+ return err
+ }
+ comma = ","
+ }
+ if m.Indent != "" {
+ out.write("\n")
+ out.write(indent)
+ out.write(m.Indent)
+ }
+ out.write("]")
+ return out.err
+ }
+
+ // Handle well-known types.
+ // Most are handled up in marshalObject (because 99% are messages).
+ if v.Type().Implements(wktType) {
+ wkt := v.Interface().(isWkt)
+ switch wkt.XXX_WellKnownType() {
+ case "NullValue":
+ out.write("null")
+ return out.err
+ }
+ }
+
+ if t, ok := v.Interface().(time.Time); ok {
+ ts, err := types.TimestampProto(t)
+ if err != nil {
+ return err
+ }
+ return m.marshalValue(out, prop, reflect.ValueOf(ts), indent)
+ }
+
+ if d, ok := v.Interface().(time.Duration); ok {
+ dur := types.DurationProto(d)
+ return m.marshalValue(out, prop, reflect.ValueOf(dur), indent)
+ }
+
+ // Handle enumerations.
+ if !m.EnumsAsInts && prop.Enum != "" {
+ // Unknown enum values will are stringified by the proto library as their
+ // value. Such values should _not_ be quoted or they will be interpreted
+ // as an enum string instead of their value.
+ enumStr := v.Interface().(fmt.Stringer).String()
+ var valStr string
+ if v.Kind() == reflect.Ptr {
+ valStr = strconv.Itoa(int(v.Elem().Int()))
+ } else {
+ valStr = strconv.Itoa(int(v.Int()))
+ }
+
+ if m, ok := v.Interface().(interface {
+ MarshalJSON() ([]byte, error)
+ }); ok {
+ data, err := m.MarshalJSON()
+ if err != nil {
+ return err
+ }
+ enumStr = string(data)
+ enumStr, err = strconv.Unquote(enumStr)
+ if err != nil {
+ return err
+ }
+ }
+
+ isKnownEnum := enumStr != valStr
+
+ if isKnownEnum {
+ out.write(`"`)
+ }
+ out.write(enumStr)
+ if isKnownEnum {
+ out.write(`"`)
+ }
+ return out.err
+ }
+
+ // Handle nested messages.
+ if v.Kind() == reflect.Struct {
+ i := v
+ if v.CanAddr() {
+ i = v.Addr()
+ } else {
+ i = reflect.New(v.Type())
+ i.Elem().Set(v)
+ }
+ iface := i.Interface()
+ if iface == nil {
+ out.write(`null`)
+ return out.err
+ }
+
+ if m, ok := v.Interface().(interface {
+ MarshalJSON() ([]byte, error)
+ }); ok {
+ data, err := m.MarshalJSON()
+ if err != nil {
+ return err
+ }
+ out.write(string(data))
+ return nil
+ }
+
+ pm, ok := iface.(proto.Message)
+ if !ok {
+ if prop.CustomType == "" {
+ return fmt.Errorf("%v does not implement proto.Message", v.Type())
+ }
+ t := proto.MessageType(prop.CustomType)
+ if t == nil || !i.Type().ConvertibleTo(t) {
+ return fmt.Errorf("%v declared custom type %s but it is not convertible to %v", v.Type(), prop.CustomType, t)
+ }
+ pm = i.Convert(t).Interface().(proto.Message)
+ }
+ return m.marshalObject(out, pm, indent+m.Indent, "")
+ }
+
+ // Handle maps.
+ // Since Go randomizes map iteration, we sort keys for stable output.
+ if v.Kind() == reflect.Map {
+ out.write(`{`)
+ keys := v.MapKeys()
+ sort.Sort(mapKeys(keys))
+ for i, k := range keys {
+ if i > 0 {
+ out.write(`,`)
+ }
+ if m.Indent != "" {
+ out.write("\n")
+ out.write(indent)
+ out.write(m.Indent)
+ out.write(m.Indent)
+ }
+
+ // TODO handle map key prop properly
+ b, err := json.Marshal(k.Interface())
+ if err != nil {
+ return err
+ }
+ s := string(b)
+
+ // If the JSON is not a string value, encode it again to make it one.
+ if !strings.HasPrefix(s, `"`) {
+ b, err := json.Marshal(s)
+ if err != nil {
+ return err
+ }
+ s = string(b)
+ }
+
+ out.write(s)
+ out.write(`:`)
+ if m.Indent != "" {
+ out.write(` `)
+ }
+
+ vprop := prop
+ if prop != nil && prop.MapValProp != nil {
+ vprop = prop.MapValProp
+ }
+ if err := m.marshalValue(out, vprop, v.MapIndex(k), indent+m.Indent); err != nil {
+ return err
+ }
+ }
+ if m.Indent != "" {
+ out.write("\n")
+ out.write(indent)
+ out.write(m.Indent)
+ }
+ out.write(`}`)
+ return out.err
+ }
+
+ // Handle non-finite floats, e.g. NaN, Infinity and -Infinity.
+ if v.Kind() == reflect.Float32 || v.Kind() == reflect.Float64 {
+ f := v.Float()
+ var sval string
+ switch {
+ case math.IsInf(f, 1):
+ sval = `"Infinity"`
+ case math.IsInf(f, -1):
+ sval = `"-Infinity"`
+ case math.IsNaN(f):
+ sval = `"NaN"`
+ }
+ if sval != "" {
+ out.write(sval)
+ return out.err
+ }
+ }
+
+ // Default handling defers to the encoding/json library.
+ b, err := json.Marshal(v.Interface())
+ if err != nil {
+ return err
+ }
+ needToQuote := string(b[0]) != `"` && (v.Kind() == reflect.Int64 || v.Kind() == reflect.Uint64)
+ if needToQuote {
+ out.write(`"`)
+ }
+ out.write(string(b))
+ if needToQuote {
+ out.write(`"`)
+ }
+ return out.err
+}
+
+// Unmarshaler is a configurable object for converting from a JSON
+// representation to a protocol buffer object.
+type Unmarshaler struct {
+ // Whether to allow messages to contain unknown fields, as opposed to
+ // failing to unmarshal.
+ AllowUnknownFields bool
+
+ // A custom URL resolver to use when unmarshaling Any messages from JSON.
+ // If unset, the default resolution strategy is to extract the
+ // fully-qualified type name from the type URL and pass that to
+ // proto.MessageType(string).
+ AnyResolver AnyResolver
+}
+
+// UnmarshalNext unmarshals the next protocol buffer from a JSON object stream.
+// This function is lenient and will decode any options permutations of the
+// related Marshaler.
+func (u *Unmarshaler) UnmarshalNext(dec *json.Decoder, pb proto.Message) error {
+ inputValue := json.RawMessage{}
+ if err := dec.Decode(&inputValue); err != nil {
+ return err
+ }
+ if err := u.unmarshalValue(reflect.ValueOf(pb).Elem(), inputValue, nil); err != nil {
+ return err
+ }
+ return checkRequiredFields(pb)
+}
+
+// Unmarshal unmarshals a JSON object stream into a protocol
+// buffer. This function is lenient and will decode any options
+// permutations of the related Marshaler.
+func (u *Unmarshaler) Unmarshal(r io.Reader, pb proto.Message) error {
+ dec := json.NewDecoder(r)
+ return u.UnmarshalNext(dec, pb)
+}
+
+// UnmarshalNext unmarshals the next protocol buffer from a JSON object stream.
+// This function is lenient and will decode any options permutations of the
+// related Marshaler.
+func UnmarshalNext(dec *json.Decoder, pb proto.Message) error {
+ return new(Unmarshaler).UnmarshalNext(dec, pb)
+}
+
+// Unmarshal unmarshals a JSON object stream into a protocol
+// buffer. This function is lenient and will decode any options
+// permutations of the related Marshaler.
+func Unmarshal(r io.Reader, pb proto.Message) error {
+ return new(Unmarshaler).Unmarshal(r, pb)
+}
+
+// UnmarshalString will populate the fields of a protocol buffer based
+// on a JSON string. This function is lenient and will decode any options
+// permutations of the related Marshaler.
+func UnmarshalString(str string, pb proto.Message) error {
+ return new(Unmarshaler).Unmarshal(strings.NewReader(str), pb)
+}
+
+// unmarshalValue converts/copies a value into the target.
+// prop may be nil.
+func (u *Unmarshaler) unmarshalValue(target reflect.Value, inputValue json.RawMessage, prop *proto.Properties) error {
+ targetType := target.Type()
+
+ // Allocate memory for pointer fields.
+ if targetType.Kind() == reflect.Ptr {
+ // If input value is "null" and target is a pointer type, then the field should be treated as not set
+ // UNLESS the target is structpb.Value, in which case it should be set to structpb.NullValue.
+ _, isJSONPBUnmarshaler := target.Interface().(JSONPBUnmarshaler)
+ if string(inputValue) == "null" && targetType != reflect.TypeOf(&types.Value{}) && !isJSONPBUnmarshaler {
+ return nil
+ }
+ target.Set(reflect.New(targetType.Elem()))
+
+ return u.unmarshalValue(target.Elem(), inputValue, prop)
+ }
+
+ if jsu, ok := target.Addr().Interface().(JSONPBUnmarshaler); ok {
+ return jsu.UnmarshalJSONPB(u, []byte(inputValue))
+ }
+
+ // Handle well-known types that are not pointers.
+ if w, ok := target.Addr().Interface().(isWkt); ok {
+ switch w.XXX_WellKnownType() {
+ case "DoubleValue", "FloatValue", "Int64Value", "UInt64Value",
+ "Int32Value", "UInt32Value", "BoolValue", "StringValue", "BytesValue":
+ return u.unmarshalValue(target.Field(0), inputValue, prop)
+ case "Any":
+ // Use json.RawMessage pointer type instead of value to support pre-1.8 version.
+ // 1.8 changed RawMessage.MarshalJSON from pointer type to value type, see
+ // https://github.com/golang/go/issues/14493
+ var jsonFields map[string]*json.RawMessage
+ if err := json.Unmarshal(inputValue, &jsonFields); err != nil {
+ return err
+ }
+
+ val, ok := jsonFields["@type"]
+ if !ok || val == nil {
+ return errors.New("Any JSON doesn't have '@type'")
+ }
+
+ var turl string
+ if err := json.Unmarshal([]byte(*val), &turl); err != nil {
+ return fmt.Errorf("can't unmarshal Any's '@type': %q", *val)
+ }
+ target.Field(0).SetString(turl)
+
+ var m proto.Message
+ var err error
+ if u.AnyResolver != nil {
+ m, err = u.AnyResolver.Resolve(turl)
+ } else {
+ m, err = defaultResolveAny(turl)
+ }
+ if err != nil {
+ return err
+ }
+
+ if _, ok := m.(isWkt); ok {
+ val, ok := jsonFields["value"]
+ if !ok {
+ return errors.New("Any JSON doesn't have 'value'")
+ }
+
+ if err = u.unmarshalValue(reflect.ValueOf(m).Elem(), *val, nil); err != nil {
+ return fmt.Errorf("can't unmarshal Any nested proto %T: %v", m, err)
+ }
+ } else {
+ delete(jsonFields, "@type")
+ nestedProto, uerr := json.Marshal(jsonFields)
+ if uerr != nil {
+ return fmt.Errorf("can't generate JSON for Any's nested proto to be unmarshaled: %v", uerr)
+ }
+
+ if err = u.unmarshalValue(reflect.ValueOf(m).Elem(), nestedProto, nil); err != nil {
+ return fmt.Errorf("can't unmarshal Any nested proto %T: %v", m, err)
+ }
+ }
+
+ b, err := proto.Marshal(m)
+ if err != nil {
+ return fmt.Errorf("can't marshal proto %T into Any.Value: %v", m, err)
+ }
+ target.Field(1).SetBytes(b)
+
+ return nil
+ case "Duration":
+ unq, err := unquote(string(inputValue))
+ if err != nil {
+ return err
+ }
+
+ d, err := time.ParseDuration(unq)
+ if err != nil {
+ return fmt.Errorf("bad Duration: %v", err)
+ }
+
+ ns := d.Nanoseconds()
+ s := ns / 1e9
+ ns %= 1e9
+ target.Field(0).SetInt(s)
+ target.Field(1).SetInt(ns)
+ return nil
+ case "Timestamp":
+ unq, err := unquote(string(inputValue))
+ if err != nil {
+ return err
+ }
+
+ t, err := time.Parse(time.RFC3339Nano, unq)
+ if err != nil {
+ return fmt.Errorf("bad Timestamp: %v", err)
+ }
+
+ target.Field(0).SetInt(t.Unix())
+ target.Field(1).SetInt(int64(t.Nanosecond()))
+ return nil
+ case "Struct":
+ var m map[string]json.RawMessage
+ if err := json.Unmarshal(inputValue, &m); err != nil {
+ return fmt.Errorf("bad StructValue: %v", err)
+ }
+ target.Field(0).Set(reflect.ValueOf(map[string]*types.Value{}))
+ for k, jv := range m {
+ pv := &types.Value{}
+ if err := u.unmarshalValue(reflect.ValueOf(pv).Elem(), jv, prop); err != nil {
+ return fmt.Errorf("bad value in StructValue for key %q: %v", k, err)
+ }
+ target.Field(0).SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(pv))
+ }
+ return nil
+ case "ListValue":
+ var s []json.RawMessage
+ if err := json.Unmarshal(inputValue, &s); err != nil {
+ return fmt.Errorf("bad ListValue: %v", err)
+ }
+
+ target.Field(0).Set(reflect.ValueOf(make([]*types.Value, len(s))))
+ for i, sv := range s {
+ if err := u.unmarshalValue(target.Field(0).Index(i), sv, prop); err != nil {
+ return err
+ }
+ }
+ return nil
+ case "Value":
+ ivStr := string(inputValue)
+ if ivStr == "null" {
+ target.Field(0).Set(reflect.ValueOf(&types.Value_NullValue{}))
+ } else if v, err := strconv.ParseFloat(ivStr, 0); err == nil {
+ target.Field(0).Set(reflect.ValueOf(&types.Value_NumberValue{NumberValue: v}))
+ } else if v, err := unquote(ivStr); err == nil {
+ target.Field(0).Set(reflect.ValueOf(&types.Value_StringValue{StringValue: v}))
+ } else if v, err := strconv.ParseBool(ivStr); err == nil {
+ target.Field(0).Set(reflect.ValueOf(&types.Value_BoolValue{BoolValue: v}))
+ } else if err := json.Unmarshal(inputValue, &[]json.RawMessage{}); err == nil {
+ lv := &types.ListValue{}
+ target.Field(0).Set(reflect.ValueOf(&types.Value_ListValue{ListValue: lv}))
+ return u.unmarshalValue(reflect.ValueOf(lv).Elem(), inputValue, prop)
+ } else if err := json.Unmarshal(inputValue, &map[string]json.RawMessage{}); err == nil {
+ sv := &types.Struct{}
+ target.Field(0).Set(reflect.ValueOf(&types.Value_StructValue{StructValue: sv}))
+ return u.unmarshalValue(reflect.ValueOf(sv).Elem(), inputValue, prop)
+ } else {
+ return fmt.Errorf("unrecognized type for Value %q", ivStr)
+ }
+ return nil
+ }
+ }
+
+ if t, ok := target.Addr().Interface().(*time.Time); ok {
+ ts := &types.Timestamp{}
+ if err := u.unmarshalValue(reflect.ValueOf(ts).Elem(), inputValue, prop); err != nil {
+ return err
+ }
+ tt, err := types.TimestampFromProto(ts)
+ if err != nil {
+ return err
+ }
+ *t = tt
+ return nil
+ }
+
+ if d, ok := target.Addr().Interface().(*time.Duration); ok {
+ dur := &types.Duration{}
+ if err := u.unmarshalValue(reflect.ValueOf(dur).Elem(), inputValue, prop); err != nil {
+ return err
+ }
+ dd, err := types.DurationFromProto(dur)
+ if err != nil {
+ return err
+ }
+ *d = dd
+ return nil
+ }
+
+ // Handle enums, which have an underlying type of int32,
+ // and may appear as strings.
+ // The case of an enum appearing as a number is handled
+ // at the bottom of this function.
+ if inputValue[0] == '"' && prop != nil && prop.Enum != "" {
+ vmap := proto.EnumValueMap(prop.Enum)
+ // Don't need to do unquoting; valid enum names
+ // are from a limited character set.
+ s := inputValue[1 : len(inputValue)-1]
+ n, ok := vmap[string(s)]
+ if !ok {
+ return fmt.Errorf("unknown value %q for enum %s", s, prop.Enum)
+ }
+ if target.Kind() == reflect.Ptr { // proto2
+ target.Set(reflect.New(targetType.Elem()))
+ target = target.Elem()
+ }
+ if targetType.Kind() != reflect.Int32 {
+ return fmt.Errorf("invalid target %q for enum %s", targetType.Kind(), prop.Enum)
+ }
+ target.SetInt(int64(n))
+ return nil
+ }
+
+ if prop != nil && len(prop.CustomType) > 0 && target.CanAddr() {
+ if m, ok := target.Addr().Interface().(interface {
+ UnmarshalJSON([]byte) error
+ }); ok {
+ return json.Unmarshal(inputValue, m)
+ }
+ }
+
+ // Handle nested messages.
+ if targetType.Kind() == reflect.Struct {
+ var jsonFields map[string]json.RawMessage
+ if err := json.Unmarshal(inputValue, &jsonFields); err != nil {
+ return err
+ }
+
+ consumeField := func(prop *proto.Properties) (json.RawMessage, bool) {
+ // Be liberal in what names we accept; both orig_name and camelName are okay.
+ fieldNames := acceptedJSONFieldNames(prop)
+
+ vOrig, okOrig := jsonFields[fieldNames.orig]
+ vCamel, okCamel := jsonFields[fieldNames.camel]
+ if !okOrig && !okCamel {
+ return nil, false
+ }
+ // If, for some reason, both are present in the data, favour the camelName.
+ var raw json.RawMessage
+ if okOrig {
+ raw = vOrig
+ delete(jsonFields, fieldNames.orig)
+ }
+ if okCamel {
+ raw = vCamel
+ delete(jsonFields, fieldNames.camel)
+ }
+ return raw, true
+ }
+
+ sprops := proto.GetProperties(targetType)
+ for i := 0; i < target.NumField(); i++ {
+ ft := target.Type().Field(i)
+ if strings.HasPrefix(ft.Name, "XXX_") {
+ continue
+ }
+ valueForField, ok := consumeField(sprops.Prop[i])
+ if !ok {
+ continue
+ }
+
+ if err := u.unmarshalValue(target.Field(i), valueForField, sprops.Prop[i]); err != nil {
+ return err
+ }
+ }
+ // Check for any oneof fields.
+ if len(jsonFields) > 0 {
+ for _, oop := range sprops.OneofTypes {
+ raw, ok := consumeField(oop.Prop)
+ if !ok {
+ continue
+ }
+ nv := reflect.New(oop.Type.Elem())
+ target.Field(oop.Field).Set(nv)
+ if err := u.unmarshalValue(nv.Elem().Field(0), raw, oop.Prop); err != nil {
+ return err
+ }
+ }
+ }
+ // Handle proto2 extensions.
+ if len(jsonFields) > 0 {
+ if ep, ok := target.Addr().Interface().(proto.Message); ok {
+ for _, ext := range proto.RegisteredExtensions(ep) {
+ name := fmt.Sprintf("[%s]", ext.Name)
+ raw, ok := jsonFields[name]
+ if !ok {
+ continue
+ }
+ delete(jsonFields, name)
+ nv := reflect.New(reflect.TypeOf(ext.ExtensionType).Elem())
+ if err := u.unmarshalValue(nv.Elem(), raw, nil); err != nil {
+ return err
+ }
+ if err := proto.SetExtension(ep, ext, nv.Interface()); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ if !u.AllowUnknownFields && len(jsonFields) > 0 {
+ // Pick any field to be the scapegoat.
+ var f string
+ for fname := range jsonFields {
+ f = fname
+ break
+ }
+ return fmt.Errorf("unknown field %q in %v", f, targetType)
+ }
+ return nil
+ }
+
+ // Handle arrays
+ if targetType.Kind() == reflect.Slice {
+ if targetType.Elem().Kind() == reflect.Uint8 {
+ outRef := reflect.New(targetType)
+ outVal := outRef.Interface()
+ //CustomType with underlying type []byte
+ if _, ok := outVal.(interface {
+ UnmarshalJSON([]byte) error
+ }); ok {
+ if err := json.Unmarshal(inputValue, outVal); err != nil {
+ return err
+ }
+ target.Set(outRef.Elem())
+ return nil
+ }
+ // Special case for encoded bytes. Pre-go1.5 doesn't support unmarshalling
+ // strings into aliased []byte types.
+ // https://github.com/golang/go/commit/4302fd0409da5e4f1d71471a6770dacdc3301197
+ // https://github.com/golang/go/commit/c60707b14d6be26bf4213114d13070bff00d0b0a
+ var out []byte
+ if err := json.Unmarshal(inputValue, &out); err != nil {
+ return err
+ }
+ target.SetBytes(out)
+ return nil
+ }
+
+ var slc []json.RawMessage
+ if err := json.Unmarshal(inputValue, &slc); err != nil {
+ return err
+ }
+ if slc != nil {
+ l := len(slc)
+ target.Set(reflect.MakeSlice(targetType, l, l))
+ for i := 0; i < l; i++ {
+ if err := u.unmarshalValue(target.Index(i), slc[i], prop); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }
+
+ // Handle maps (whose keys are always strings)
+ if targetType.Kind() == reflect.Map {
+ var mp map[string]json.RawMessage
+ if err := json.Unmarshal(inputValue, &mp); err != nil {
+ return err
+ }
+ if mp != nil {
+ target.Set(reflect.MakeMap(targetType))
+ for ks, raw := range mp {
+ // Unmarshal map key. The core json library already decoded the key into a
+ // string, so we handle that specially. Other types were quoted post-serialization.
+ var k reflect.Value
+ if targetType.Key().Kind() == reflect.String {
+ k = reflect.ValueOf(ks)
+ } else {
+ k = reflect.New(targetType.Key()).Elem()
+ var kprop *proto.Properties
+ if prop != nil && prop.MapKeyProp != nil {
+ kprop = prop.MapKeyProp
+ }
+ if err := u.unmarshalValue(k, json.RawMessage(ks), kprop); err != nil {
+ return err
+ }
+ }
+
+ if !k.Type().AssignableTo(targetType.Key()) {
+ k = k.Convert(targetType.Key())
+ }
+
+ // Unmarshal map value.
+ v := reflect.New(targetType.Elem()).Elem()
+ var vprop *proto.Properties
+ if prop != nil && prop.MapValProp != nil {
+ vprop = prop.MapValProp
+ }
+ if err := u.unmarshalValue(v, raw, vprop); err != nil {
+ return err
+ }
+ target.SetMapIndex(k, v)
+ }
+ }
+ return nil
+ }
+
+ // Non-finite numbers can be encoded as strings.
+ isFloat := targetType.Kind() == reflect.Float32 || targetType.Kind() == reflect.Float64
+ if isFloat {
+ if num, ok := nonFinite[string(inputValue)]; ok {
+ target.SetFloat(num)
+ return nil
+ }
+ }
+
+ // integers & floats can be encoded as strings. In this case we drop
+ // the quotes and proceed as normal.
+ isNum := targetType.Kind() == reflect.Int64 || targetType.Kind() == reflect.Uint64 ||
+ targetType.Kind() == reflect.Int32 || targetType.Kind() == reflect.Uint32 ||
+ targetType.Kind() == reflect.Float32 || targetType.Kind() == reflect.Float64
+ if isNum && strings.HasPrefix(string(inputValue), `"`) {
+ inputValue = inputValue[1 : len(inputValue)-1]
+ }
+
+ // Use the encoding/json for parsing other value types.
+ return json.Unmarshal(inputValue, target.Addr().Interface())
+}
+
+func unquote(s string) (string, error) {
+ var ret string
+ err := json.Unmarshal([]byte(s), &ret)
+ return ret, err
+}
+
+// jsonProperties returns parsed proto.Properties for the field and corrects JSONName attribute.
+func jsonProperties(f reflect.StructField, origName bool) *proto.Properties {
+ var prop proto.Properties
+ prop.Init(f.Type, f.Name, f.Tag.Get("protobuf"), &f)
+ if origName || prop.JSONName == "" {
+ prop.JSONName = prop.OrigName
+ }
+ return &prop
+}
+
+type fieldNames struct {
+ orig, camel string
+}
+
+func acceptedJSONFieldNames(prop *proto.Properties) fieldNames {
+ opts := fieldNames{orig: prop.OrigName, camel: prop.OrigName}
+ if prop.JSONName != "" {
+ opts.camel = prop.JSONName
+ }
+ return opts
+}
+
+// Writer wrapper inspired by https://blog.golang.org/errors-are-values
+type errWriter struct {
+ writer io.Writer
+ err error
+}
+
+func (w *errWriter) write(str string) {
+ if w.err != nil {
+ return
+ }
+ _, w.err = w.writer.Write([]byte(str))
+}
+
+// Map fields may have key types of non-float scalars, strings and enums.
+// The easiest way to sort them in some deterministic order is to use fmt.
+// If this turns out to be inefficient we can always consider other options,
+// such as doing a Schwartzian transform.
+//
+// Numeric keys are sorted in numeric order per
+// https://developers.google.com/protocol-buffers/docs/proto#maps.
+type mapKeys []reflect.Value
+
+func (s mapKeys) Len() int { return len(s) }
+func (s mapKeys) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
+func (s mapKeys) Less(i, j int) bool {
+ if k := s[i].Kind(); k == s[j].Kind() {
+ switch k {
+ case reflect.String:
+ return s[i].String() < s[j].String()
+ case reflect.Int32, reflect.Int64:
+ return s[i].Int() < s[j].Int()
+ case reflect.Uint32, reflect.Uint64:
+ return s[i].Uint() < s[j].Uint()
+ }
+ }
+ return fmt.Sprint(s[i].Interface()) < fmt.Sprint(s[j].Interface())
+}
+
+// checkRequiredFields returns an error if any required field in the given proto message is not set.
+// This function is used by both Marshal and Unmarshal. While required fields only exist in a
+// proto2 message, a proto3 message can contain proto2 message(s).
+func checkRequiredFields(pb proto.Message) error {
+ // Most well-known type messages do not contain required fields. The "Any" type may contain
+ // a message that has required fields.
+ //
+ // When an Any message is being marshaled, the code will invoked proto.Unmarshal on Any.Value
+ // field in order to transform that into JSON, and that should have returned an error if a
+ // required field is not set in the embedded message.
+ //
+ // When an Any message is being unmarshaled, the code will have invoked proto.Marshal on the
+ // embedded message to store the serialized message in Any.Value field, and that should have
+ // returned an error if a required field is not set.
+ if _, ok := pb.(isWkt); ok {
+ return nil
+ }
+
+ v := reflect.ValueOf(pb)
+ // Skip message if it is not a struct pointer.
+ if v.Kind() != reflect.Ptr {
+ return nil
+ }
+ v = v.Elem()
+ if v.Kind() != reflect.Struct {
+ return nil
+ }
+
+ for i := 0; i < v.NumField(); i++ {
+ field := v.Field(i)
+ sfield := v.Type().Field(i)
+
+ if sfield.PkgPath != "" {
+ // blank PkgPath means the field is exported; skip if not exported
+ continue
+ }
+
+ if strings.HasPrefix(sfield.Name, "XXX_") {
+ continue
+ }
+
+ // Oneof field is an interface implemented by wrapper structs containing the actual oneof
+ // field, i.e. an interface containing &T{real_value}.
+ if sfield.Tag.Get("protobuf_oneof") != "" {
+ if field.Kind() != reflect.Interface {
+ continue
+ }
+ v := field.Elem()
+ if v.Kind() != reflect.Ptr || v.IsNil() {
+ continue
+ }
+ v = v.Elem()
+ if v.Kind() != reflect.Struct || v.NumField() < 1 {
+ continue
+ }
+ field = v.Field(0)
+ sfield = v.Type().Field(0)
+ }
+
+ protoTag := sfield.Tag.Get("protobuf")
+ if protoTag == "" {
+ continue
+ }
+ var prop proto.Properties
+ prop.Init(sfield.Type, sfield.Name, protoTag, &sfield)
+
+ switch field.Kind() {
+ case reflect.Map:
+ if field.IsNil() {
+ continue
+ }
+ // Check each map value.
+ keys := field.MapKeys()
+ for _, k := range keys {
+ v := field.MapIndex(k)
+ if err := checkRequiredFieldsInValue(v); err != nil {
+ return err
+ }
+ }
+ case reflect.Slice:
+ // Handle non-repeated type, e.g. bytes.
+ if !prop.Repeated {
+ if prop.Required && field.IsNil() {
+ return fmt.Errorf("required field %q is not set", prop.Name)
+ }
+ continue
+ }
+
+ // Handle repeated type.
+ if field.IsNil() {
+ continue
+ }
+ // Check each slice item.
+ for i := 0; i < field.Len(); i++ {
+ v := field.Index(i)
+ if err := checkRequiredFieldsInValue(v); err != nil {
+ return err
+ }
+ }
+ case reflect.Ptr:
+ if field.IsNil() {
+ if prop.Required {
+ return fmt.Errorf("required field %q is not set", prop.Name)
+ }
+ continue
+ }
+ if err := checkRequiredFieldsInValue(field); err != nil {
+ return err
+ }
+ }
+ }
+
+ // Handle proto2 extensions.
+ for _, ext := range proto.RegisteredExtensions(pb) {
+ if !proto.HasExtension(pb, ext) {
+ continue
+ }
+ ep, err := proto.GetExtension(pb, ext)
+ if err != nil {
+ return err
+ }
+ err = checkRequiredFieldsInValue(reflect.ValueOf(ep))
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func checkRequiredFieldsInValue(v reflect.Value) error {
+ if v.Type().Implements(messageType) {
+ return checkRequiredFields(v.Interface().(proto.Message))
+ }
+ return nil
+}
diff --git a/vendor/github.com/google/s2a-go/README.md b/vendor/github.com/google/s2a-go/README.md
index d566950f3..fe0f5c1da 100644
--- a/vendor/github.com/google/s2a-go/README.md
+++ b/vendor/github.com/google/s2a-go/README.md
@@ -10,8 +10,5 @@ Session Agent during the TLS handshake, and to encrypt traffic to the peer
after the TLS handshake is complete.
This repository contains the source code for the Secure Session Agent's Go
-client libraries, which allow gRPC-Go applications to use the Secure Session
-Agent. This repository supports the Bazel and Golang build systems.
-
-All code in this repository is experimental and subject to change. We do not
-guarantee API stability at this time.
+client libraries, which allow gRPC and HTTP Go applications to use the Secure Session
+Agent.
diff --git a/vendor/github.com/google/s2a-go/internal/handshaker/service/service.go b/vendor/github.com/google/s2a-go/internal/handshaker/service/service.go
index 49573af88..ed4496537 100644
--- a/vendor/github.com/google/s2a-go/internal/handshaker/service/service.go
+++ b/vendor/github.com/google/s2a-go/internal/handshaker/service/service.go
@@ -21,50 +21,27 @@ package service
import (
"context"
- "net"
- "os"
- "strings"
"sync"
- "time"
- "google.golang.org/appengine"
- "google.golang.org/appengine/socket"
grpc "google.golang.org/grpc"
- "google.golang.org/grpc/grpclog"
+ "google.golang.org/grpc/credentials"
+ "google.golang.org/grpc/credentials/insecure"
)
-// An environment variable, if true, opportunistically use AppEngine-specific dialer to call S2A.
-const enableAppEngineDialerEnv = "S2A_ENABLE_APP_ENGINE_DIALER"
-
var (
- // appEngineDialerHook is an AppEngine-specific dial option that is set
- // during init time. If nil, then the application is not running on Google
- // AppEngine.
- appEngineDialerHook func(context.Context) grpc.DialOption
// mu guards hsConnMap and hsDialer.
mu sync.Mutex
// hsConnMap represents a mapping from an S2A handshaker service address
// to a corresponding connection to an S2A handshaker service instance.
hsConnMap = make(map[string]*grpc.ClientConn)
// hsDialer will be reassigned in tests.
- hsDialer = grpc.Dial
+ hsDialer = grpc.DialContext
)
-func init() {
- if !appengine.IsAppEngine() && !appengine.IsDevAppServer() {
- return
- }
- appEngineDialerHook = func(ctx context.Context) grpc.DialOption {
- return grpc.WithDialer(func(addr string, timeout time.Duration) (net.Conn, error) {
- return socket.DialTimeout(ctx, "tcp", addr, timeout)
- })
- }
-}
-
// Dial dials the S2A handshaker service. If a connection has already been
// established, this function returns it. Otherwise, a new connection is
// created.
-func Dial(handshakerServiceAddress string) (*grpc.ClientConn, error) {
+func Dial(ctx context.Context, handshakerServiceAddress string, transportCreds credentials.TransportCredentials) (*grpc.ClientConn, error) {
mu.Lock()
defer mu.Unlock()
@@ -72,17 +49,14 @@ func Dial(handshakerServiceAddress string) (*grpc.ClientConn, error) {
if !ok {
// Create a new connection to the S2A handshaker service. Note that
// this connection stays open until the application is closed.
- grpcOpts := []grpc.DialOption{
- grpc.WithInsecure(),
- }
- if enableAppEngineDialer() && appEngineDialerHook != nil {
- if grpclog.V(1) {
- grpclog.Info("Using AppEngine-specific dialer to talk to S2A.")
- }
- grpcOpts = append(grpcOpts, appEngineDialerHook(context.Background()))
+ var grpcOpts []grpc.DialOption
+ if transportCreds != nil {
+ grpcOpts = append(grpcOpts, grpc.WithTransportCredentials(transportCreds))
+ } else {
+ grpcOpts = append(grpcOpts, grpc.WithTransportCredentials(insecure.NewCredentials()))
}
var err error
- hsConn, err = hsDialer(handshakerServiceAddress, grpcOpts...)
+ hsConn, err = hsDialer(ctx, handshakerServiceAddress, grpcOpts...)
if err != nil {
return nil, err
}
@@ -90,10 +64,3 @@ func Dial(handshakerServiceAddress string) (*grpc.ClientConn, error) {
}
return hsConn, nil
}
-
-func enableAppEngineDialer() bool {
- if strings.ToLower(os.Getenv(enableAppEngineDialerEnv)) == "true" {
- return true
- }
- return false
-}
diff --git a/vendor/github.com/google/s2a-go/internal/record/ticketsender.go b/vendor/github.com/google/s2a-go/internal/record/ticketsender.go
index 33fa3c55d..e51199ab3 100644
--- a/vendor/github.com/google/s2a-go/internal/record/ticketsender.go
+++ b/vendor/github.com/google/s2a-go/internal/record/ticketsender.go
@@ -83,13 +83,15 @@ func (t *ticketSender) sendTicketsToS2A(sessionTickets [][]byte, callComplete ch
t.ensureProcessSessionTickets.Done()
}
}()
- hsConn, err := service.Dial(t.hsAddr)
+ ctx, cancel := context.WithTimeout(context.Background(), sessionTimeout)
+ defer cancel()
+ // The transportCreds only needs to be set when talking to S2AV2 and also
+ // if mTLS is required.
+ hsConn, err := service.Dial(ctx, t.hsAddr, nil)
if err != nil {
return err
}
client := s2apb.NewS2AServiceClient(hsConn)
- ctx, cancel := context.WithTimeout(context.Background(), sessionTimeout)
- defer cancel()
session, err := client.SetUpSession(ctx)
if err != nil {
return err
diff --git a/vendor/github.com/google/s2a-go/internal/v2/s2av2.go b/vendor/github.com/google/s2a-go/internal/v2/s2av2.go
index 26fac02dc..85a8379d8 100644
--- a/vendor/github.com/google/s2a-go/internal/v2/s2av2.go
+++ b/vendor/github.com/google/s2a-go/internal/v2/s2av2.go
@@ -52,11 +52,12 @@ const (
const s2aTimeoutEnv = "S2A_TIMEOUT"
type s2av2TransportCreds struct {
- info *credentials.ProtocolInfo
- isClient bool
- serverName string
- s2av2Address string
- tokenManager *tokenmanager.AccessTokenManager
+ info *credentials.ProtocolInfo
+ isClient bool
+ serverName string
+ s2av2Address string
+ transportCreds credentials.TransportCredentials
+ tokenManager *tokenmanager.AccessTokenManager
// localIdentity should only be used by the client.
localIdentity *commonpbv1.Identity
// localIdentities should only be used by the server.
@@ -69,7 +70,7 @@ type s2av2TransportCreds struct {
// NewClientCreds returns a client-side transport credentials object that uses
// the S2Av2 to establish a secure connection with a server.
-func NewClientCreds(s2av2Address string, localIdentity *commonpbv1.Identity, verificationMode s2av2pb.ValidatePeerCertificateChainReq_VerificationMode, fallbackClientHandshakeFunc fallback.ClientHandshake, getS2AStream func(ctx context.Context, s2av2Address string) (stream.S2AStream, error), serverAuthorizationPolicy []byte) (credentials.TransportCredentials, error) {
+func NewClientCreds(s2av2Address string, transportCreds credentials.TransportCredentials, localIdentity *commonpbv1.Identity, verificationMode s2av2pb.ValidatePeerCertificateChainReq_VerificationMode, fallbackClientHandshakeFunc fallback.ClientHandshake, getS2AStream func(ctx context.Context, s2av2Address string) (stream.S2AStream, error), serverAuthorizationPolicy []byte) (credentials.TransportCredentials, error) {
// Create an AccessTokenManager instance to use to authenticate to S2Av2.
accessTokenManager, err := tokenmanager.NewSingleTokenAccessTokenManager()
@@ -80,6 +81,7 @@ func NewClientCreds(s2av2Address string, localIdentity *commonpbv1.Identity, ver
isClient: true,
serverName: "",
s2av2Address: s2av2Address,
+ transportCreds: transportCreds,
localIdentity: localIdentity,
verificationMode: verificationMode,
fallbackClientHandshake: fallbackClientHandshakeFunc,
@@ -99,7 +101,7 @@ func NewClientCreds(s2av2Address string, localIdentity *commonpbv1.Identity, ver
// NewServerCreds returns a server-side transport credentials object that uses
// the S2Av2 to establish a secure connection with a client.
-func NewServerCreds(s2av2Address string, localIdentities []*commonpbv1.Identity, verificationMode s2av2pb.ValidatePeerCertificateChainReq_VerificationMode, getS2AStream func(ctx context.Context, s2av2Address string) (stream.S2AStream, error)) (credentials.TransportCredentials, error) {
+func NewServerCreds(s2av2Address string, transportCreds credentials.TransportCredentials, localIdentities []*commonpbv1.Identity, verificationMode s2av2pb.ValidatePeerCertificateChainReq_VerificationMode, getS2AStream func(ctx context.Context, s2av2Address string) (stream.S2AStream, error)) (credentials.TransportCredentials, error) {
// Create an AccessTokenManager instance to use to authenticate to S2Av2.
accessTokenManager, err := tokenmanager.NewSingleTokenAccessTokenManager()
creds := &s2av2TransportCreds{
@@ -108,6 +110,7 @@ func NewServerCreds(s2av2Address string, localIdentities []*commonpbv1.Identity,
},
isClient: false,
s2av2Address: s2av2Address,
+ transportCreds: transportCreds,
localIdentities: localIdentities,
verificationMode: verificationMode,
getS2AStream: getS2AStream,
@@ -136,7 +139,7 @@ func (c *s2av2TransportCreds) ClientHandshake(ctx context.Context, serverAuthori
var err error
retry.Run(timeoutCtx,
func() error {
- s2AStream, err = createStream(timeoutCtx, c.s2av2Address, c.getS2AStream)
+ s2AStream, err = createStream(timeoutCtx, c.s2av2Address, c.transportCreds, c.getS2AStream)
return err
})
if err != nil {
@@ -210,7 +213,7 @@ func (c *s2av2TransportCreds) ServerHandshake(rawConn net.Conn) (net.Conn, crede
var err error
retry.Run(ctx,
func() error {
- s2AStream, err = createStream(ctx, c.s2av2Address, c.getS2AStream)
+ s2AStream, err = createStream(ctx, c.s2av2Address, c.transportCreds, c.getS2AStream)
return err
})
if err != nil {
@@ -311,11 +314,12 @@ func (c *s2av2TransportCreds) Clone() credentials.TransportCredentials {
func NewClientTLSConfig(
ctx context.Context,
s2av2Address string,
+ transportCreds credentials.TransportCredentials,
tokenManager tokenmanager.AccessTokenManager,
verificationMode s2av2pb.ValidatePeerCertificateChainReq_VerificationMode,
serverName string,
serverAuthorizationPolicy []byte) (*tls.Config, error) {
- s2AStream, err := createStream(ctx, s2av2Address, nil)
+ s2AStream, err := createStream(ctx, s2av2Address, transportCreds, nil)
if err != nil {
grpclog.Infof("Failed to connect to S2Av2: %v", err)
return nil, err
@@ -358,12 +362,12 @@ func (x s2AGrpcStream) CloseSend() error {
return x.stream.CloseSend()
}
-func createStream(ctx context.Context, s2av2Address string, getS2AStream func(ctx context.Context, s2av2Address string) (stream.S2AStream, error)) (stream.S2AStream, error) {
+func createStream(ctx context.Context, s2av2Address string, transportCreds credentials.TransportCredentials, getS2AStream func(ctx context.Context, s2av2Address string) (stream.S2AStream, error)) (stream.S2AStream, error) {
if getS2AStream != nil {
return getS2AStream(ctx, s2av2Address)
}
// TODO(rmehta19): Consider whether to close the connection to S2Av2.
- conn, err := service.Dial(s2av2Address)
+ conn, err := service.Dial(ctx, s2av2Address, transportCreds)
if err != nil {
return nil, err
}
diff --git a/vendor/github.com/google/s2a-go/retry/retry.go b/vendor/github.com/google/s2a-go/retry/retry.go
index 224915f4d..f7e0a2377 100644
--- a/vendor/github.com/google/s2a-go/retry/retry.go
+++ b/vendor/github.com/google/s2a-go/retry/retry.go
@@ -120,9 +120,9 @@ func Run(ctx context.Context, f func() error) {
}
break
}
- if sleepErr := Sleep(ctx, bo); sleepErr != nil {
+ if errSleep := Sleep(ctx, bo); errSleep != nil {
if grpclog.V(1) {
- grpclog.Infof("exit retry loop due to sleep error: %v", sleepErr)
+ grpclog.Infof("exit retry loop due to sleep error: %v", errSleep)
}
break
}
diff --git a/vendor/github.com/google/s2a-go/s2a.go b/vendor/github.com/google/s2a-go/s2a.go
index d684c2c73..5ecb06f93 100644
--- a/vendor/github.com/google/s2a-go/s2a.go
+++ b/vendor/github.com/google/s2a-go/s2a.go
@@ -112,7 +112,7 @@ func NewClientCreds(opts *ClientOptions) (credentials.TransportCredentials, erro
if opts.FallbackOpts != nil && opts.FallbackOpts.FallbackClientHandshakeFunc != nil {
fallbackFunc = opts.FallbackOpts.FallbackClientHandshakeFunc
}
- return v2.NewClientCreds(opts.S2AAddress, localIdentity, verificationMode, fallbackFunc, opts.getS2AStream, opts.serverAuthorizationPolicy)
+ return v2.NewClientCreds(opts.S2AAddress, opts.TransportCreds, localIdentity, verificationMode, fallbackFunc, opts.getS2AStream, opts.serverAuthorizationPolicy)
}
// NewServerCreds returns a server-side transport credentials object that uses
@@ -147,7 +147,7 @@ func NewServerCreds(opts *ServerOptions) (credentials.TransportCredentials, erro
}, nil
}
verificationMode := getVerificationMode(opts.VerificationMode)
- return v2.NewServerCreds(opts.S2AAddress, localIdentities, verificationMode, opts.getS2AStream)
+ return v2.NewServerCreds(opts.S2AAddress, opts.TransportCreds, localIdentities, verificationMode, opts.getS2AStream)
}
// ClientHandshake initiates a client-side TLS handshake using the S2A.
@@ -156,17 +156,17 @@ func (c *s2aTransportCreds) ClientHandshake(ctx context.Context, serverAuthority
return nil, nil, errors.New("client handshake called using server transport credentials")
}
+ var cancel context.CancelFunc
+ ctx, cancel = context.WithCancel(ctx)
+ defer cancel()
+
// Connect to the S2A.
- hsConn, err := service.Dial(c.s2aAddr)
+ hsConn, err := service.Dial(ctx, c.s2aAddr, nil)
if err != nil {
grpclog.Infof("Failed to connect to S2A: %v", err)
return nil, nil, err
}
- var cancel context.CancelFunc
- ctx, cancel = context.WithCancel(ctx)
- defer cancel()
-
opts := &handshaker.ClientHandshakerOptions{
MinTLSVersion: c.minTLSVersion,
MaxTLSVersion: c.maxTLSVersion,
@@ -204,16 +204,16 @@ func (c *s2aTransportCreds) ServerHandshake(rawConn net.Conn) (net.Conn, credent
return nil, nil, errors.New("server handshake called using client transport credentials")
}
+ ctx, cancel := context.WithTimeout(context.Background(), defaultTimeout)
+ defer cancel()
+
// Connect to the S2A.
- hsConn, err := service.Dial(c.s2aAddr)
+ hsConn, err := service.Dial(ctx, c.s2aAddr, nil)
if err != nil {
grpclog.Infof("Failed to connect to S2A: %v", err)
return nil, nil, err
}
- ctx, cancel := context.WithTimeout(context.Background(), defaultTimeout)
- defer cancel()
-
opts := &handshaker.ServerHandshakerOptions{
MinTLSVersion: c.minTLSVersion,
MaxTLSVersion: c.maxTLSVersion,
@@ -313,6 +313,7 @@ func NewTLSClientConfigFactory(opts *ClientOptions) (TLSClientConfigFactory, err
grpclog.Infof("Access token manager not initialized: %v", err)
return &s2aTLSClientConfigFactory{
s2av2Address: opts.S2AAddress,
+ transportCreds: opts.TransportCreds,
tokenManager: nil,
verificationMode: getVerificationMode(opts.VerificationMode),
serverAuthorizationPolicy: opts.serverAuthorizationPolicy,
@@ -320,6 +321,7 @@ func NewTLSClientConfigFactory(opts *ClientOptions) (TLSClientConfigFactory, err
}
return &s2aTLSClientConfigFactory{
s2av2Address: opts.S2AAddress,
+ transportCreds: opts.TransportCreds,
tokenManager: tokenManager,
verificationMode: getVerificationMode(opts.VerificationMode),
serverAuthorizationPolicy: opts.serverAuthorizationPolicy,
@@ -328,6 +330,7 @@ func NewTLSClientConfigFactory(opts *ClientOptions) (TLSClientConfigFactory, err
type s2aTLSClientConfigFactory struct {
s2av2Address string
+ transportCreds credentials.TransportCredentials
tokenManager tokenmanager.AccessTokenManager
verificationMode s2av2pb.ValidatePeerCertificateChainReq_VerificationMode
serverAuthorizationPolicy []byte
@@ -339,7 +342,7 @@ func (f *s2aTLSClientConfigFactory) Build(
if opts != nil && opts.ServerName != "" {
serverName = opts.ServerName
}
- return v2.NewClientTLSConfig(ctx, f.s2av2Address, f.tokenManager, f.verificationMode, serverName, f.serverAuthorizationPolicy)
+ return v2.NewClientTLSConfig(ctx, f.s2av2Address, f.transportCreds, f.tokenManager, f.verificationMode, serverName, f.serverAuthorizationPolicy)
}
func getVerificationMode(verificationMode VerificationModeType) s2av2pb.ValidatePeerCertificateChainReq_VerificationMode {
diff --git a/vendor/github.com/google/s2a-go/s2a_options.go b/vendor/github.com/google/s2a-go/s2a_options.go
index 94feafb9c..fcdbc1621 100644
--- a/vendor/github.com/google/s2a-go/s2a_options.go
+++ b/vendor/github.com/google/s2a-go/s2a_options.go
@@ -26,6 +26,7 @@ import (
"github.com/google/s2a-go/fallback"
"github.com/google/s2a-go/stream"
+ "google.golang.org/grpc/credentials"
s2apb "github.com/google/s2a-go/internal/proto/common_go_proto"
)
@@ -92,6 +93,9 @@ type ClientOptions struct {
LocalIdentity Identity
// S2AAddress is the address of the S2A.
S2AAddress string
+ // Optional transport credentials.
+ // If set, this will be used for the gRPC connection to the S2A server.
+ TransportCreds credentials.TransportCredentials
// EnsureProcessSessionTickets waits for all session tickets to be sent to
// S2A before a process completes.
//
@@ -173,6 +177,9 @@ type ServerOptions struct {
LocalIdentities []Identity
// S2AAddress is the address of the S2A.
S2AAddress string
+ // Optional transport credentials.
+ // If set, this will be used for the gRPC connection to the S2A server.
+ TransportCreds credentials.TransportCredentials
// If true, enables the use of legacy S2Av1.
EnableLegacyMode bool
// VerificationMode specifies the mode that S2A must use to verify the
diff --git a/vendor/github.com/google/s2a-go/testdata/mds_client_cert.pem b/vendor/github.com/google/s2a-go/testdata/mds_client_cert.pem
new file mode 100644
index 000000000..60c4cf069
--- /dev/null
+++ b/vendor/github.com/google/s2a-go/testdata/mds_client_cert.pem
@@ -0,0 +1,19 @@
+-----BEGIN CERTIFICATE-----
+MIIDCDCCAfACFFlYsYCFit01ZpYmfjxpo7/6wMEbMA0GCSqGSIb3DQEBCwUAMEgx
+CzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEPMA0GA1UECgwGR29vZ2xlMRswGQYD
+VQQDDBJ0ZXN0LXMyYS1tdGxzLXJvb3QwHhcNMjMwODIyMTY0NTE4WhcNNDMwODIy
+MTY0NTE4WjA5MQswCQYDVQQGEwJVUzELMAkGA1UECAwCQ0ExHTAbBgNVBAMMFHRl
+c3QtczJhLW10bHMtY2xpZW50MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAqrQQMyxNtmdCB+uY3szgRsfPrKC+TV9Fusnd8PfaCVuGTGcSBKM018nV2TDn
+3IYFQ1HgLpGwGwOFDBb3y0o9i2/l2VJySriX1GSNX6nDmVasQlO1wuOLCP7/LRmO
+7b6Kise5W0IFhYaptKyWnekn2pS0tAjimqpfn2w0U6FDGtQUqg/trQQmGtTSJHjb
+A+OFd0EFC18KGP8Q+jOMaMkJRmpeEiAPyHPDoMhqQNT26RApv9j2Uzo4SuXzHH6T
+cAdm1+zG+EXY/UZKX9oDkSbwIJvN+gCmNyORLalJ12gsGYOCjMd8K0mlXBqrmmbO
+VHVbUm9062lhE7x59AA8DK4DoQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCPOvtL
+dq2hxFHlIy0YUK8jp/DtwJZPwzx1id5FtWwd0CxBS1StIgmkHMxtkJGz1iyQLplI
+je+Msd4sTsb5zZi/8kGKehi8Wj4lghp4oP30cpob41OvM68M9RC/wSOVk9igSww+
+l3zof6wKRIswsi5VHrL16ruIVVoDlyFbKr8yk+cp9OPOV8hNNN7ewY9xC8OgnTt8
+YtdaLe6uTplKBLW+j3GtshigRhyfkGJyPFYL4LAeDJCHlC1qmBnkyP0ijMp6vneM
+E8TLavnMTMcpihWTWpyKeRkO6HDRsP4AofQAp7VAiAdSOplga+w2qgrVICV+m8MK
+BTq2PBvc59T6OFLq
+-----END CERTIFICATE-----
diff --git a/vendor/github.com/google/s2a-go/testdata/mds_client_key.pem b/vendor/github.com/google/s2a-go/testdata/mds_client_key.pem
new file mode 100644
index 000000000..9d112d1e9
--- /dev/null
+++ b/vendor/github.com/google/s2a-go/testdata/mds_client_key.pem
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCqtBAzLE22Z0IH
+65jezOBGx8+soL5NX0W6yd3w99oJW4ZMZxIEozTXydXZMOfchgVDUeAukbAbA4UM
+FvfLSj2Lb+XZUnJKuJfUZI1fqcOZVqxCU7XC44sI/v8tGY7tvoqKx7lbQgWFhqm0
+rJad6SfalLS0COKaql+fbDRToUMa1BSqD+2tBCYa1NIkeNsD44V3QQULXwoY/xD6
+M4xoyQlGal4SIA/Ic8OgyGpA1PbpECm/2PZTOjhK5fMcfpNwB2bX7Mb4Rdj9Rkpf
+2gORJvAgm836AKY3I5EtqUnXaCwZg4KMx3wrSaVcGquaZs5UdVtSb3TraWETvHn0
+ADwMrgOhAgMBAAECggEAUccupZ1ZY4OHTi0PkNk8rpwFwTFGyeFVEf2ofkr24RnA
+NnUAXEllxOUUNlcoFOz9s3kTeavg3qgqgpa0QmdAIb9LMXg+ec6CKkW7trMpGho8
+LxBUWNfSoU4sKEqAvyPT0lWJVo9D/up6/avbAi6TIbOw+Djzel4ZrlHTpabxc3WT
+EilXzn4q54b3MzxCQeQjcnzTieW4Q5semG2kLiXFToHIY2di01P/O8awUjgrD+uW
+/Cb6H49MnHm9VPkqea1iwZeMQd6Gh5FrC7RezsBjdB1JBcfsv6PFt2ySInjB8SF+
+XR5Gr3Cc5sh9s0LfprZ9Dq0rlSWmwasPMI1COK6SswKBgQDczgeWd3erQ1JX9LEI
+wollawqC9y7uJhEsw1hrPqA3uqZYiLUc7Nmi4laZ12mcGoXNDS3R3XmD58qGmGaU
+lxEVTb8KDVWBgw450VoBKzSMQnCP6zn4nZxTYxeqMKjDGf6TRB6TZc843qsG3eRC
+k91yxrCQ/0HV6PT48C+lieDzLwKBgQDF6aNKiyrswr457undBnM1H8q/Y6xC5ZlK
+UtiQdhuyBnicvz0U8WPxBY/8gha0OXWuSnBqq/z77iFVNv/zT6p9K7kM7nBGd8cB
+8KO6FNbyaHWFrhCI5zNzRTH4oha0hfvUOoti09vqavCtWD4L+D/63ba1wNLKPO9o
+4gWbCnUCLwKBgQC/vus372csgrnvR761LLrEJ8BpGt7WUJh5luoht7DKtHvgRleB
+Vu1oVcV+s2Iy/ZVUDC3OIdZ0hcWKPK5YOxfKuEk+IXYvke+4peTTPwHTC59UW6Fs
+FPK8N0FFuhvT0a8RlAY5WiAp8rPysp6WcnHMSl7qi8BQUozp4Sp/RsziYQKBgBXv
+r4mzoy5a53rEYGd/L4XT4EUWZyGDEVqLlDVu4eL5lKTLDZokp08vrqXuRVX0iHap
+CYzJQ2EpI8iuL/BoBB2bmwcz5n3pCMXORld5t9lmeqA2it6hwbIlGUTVsm6P6zm6
+w3hQwy9YaxTLkxUAjxbfPEEo/jQsTNzzMGve3NlBAoGAbgJExpDyMDnaD2Vi5eyr
+63b54BsqeLHqxJmADifyRCj7G1SJMm3zMKkNNOS0vsXgoiId973STFf1XQiojiv8
+Slbxyv5rczcY0n3LOuQYcM5OzsjzpNFZsT2dDnMfNRUF3rx3Geu/FuJ9scF1b00r
+fVMrcL3jSf/W1Xh4TgtyoU8=
+-----END PRIVATE KEY-----
diff --git a/vendor/github.com/google/s2a-go/testdata/mds_root_cert.pem b/vendor/github.com/google/s2a-go/testdata/mds_root_cert.pem
new file mode 100644
index 000000000..44e436f6e
--- /dev/null
+++ b/vendor/github.com/google/s2a-go/testdata/mds_root_cert.pem
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDcTCCAlmgAwIBAgIUDUkgI+2FZtuUHyUUi0ZBH7JvN00wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQ8wDQYDVQQKDAZHb29nbGUx
+GzAZBgNVBAMMEnRlc3QtczJhLW10bHMtcm9vdDAeFw0yMzA4MjEyMTI5MTVaFw00
+MzA4MjEyMTI5MTVaMEgxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEPMA0GA1UE
+CgwGR29vZ2xlMRswGQYDVQQDDBJ0ZXN0LXMyYS1tdGxzLXJvb3QwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCbFEQfpvla27bATedrN4BAWsI9GSwSnJLW
+QWzXcnAk6cKxQBAhnaKHRxHY8ttLhNTtxQeub894CLzJvHE/0xDhuMzjtCCCZ7i2
+r08tKZ1KcEzPJCPNlxlzAXPA45XU3LRlbGvju/PBPhm6n1hCEKTNI/KETJ5DEaYg
+Cf2LcXVsl/zW20MwDZ+e2w/9a2a6n6DdpW1ekOR550hXAUOIxvmXRBeYeGLFvp1n
+rQgZBhRaxP03UB+PQD2oMi/4mfsS96uGCXdzzX8qV46O8m132HUbnA/wagIwboEe
+d7Bx237dERDyHw5GFnll7orgA0FOtoEufXdeQxWVvTjO0+PVPgsvAgMBAAGjUzBR
+MB0GA1UdDgQWBBRyMtg/yutV8hw8vOq0i8x0eBQi7DAfBgNVHSMEGDAWgBRyMtg/
+yutV8hw8vOq0i8x0eBQi7DAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUA
+A4IBAQArN/gdqWMxd5Rvq2eJMTp6I4RepJOT7Go4sMsRsy1caJqqcoS2EvREDZMN
+XNEBcyQBB5kYd6TCcZGoLnEtWYXQ4jjEiXG1g7/+rWxyqw0ZYuP7FWzuHg3Uor/x
+fApbEKwptP5ywVc+33h4qreGcqXkVCCn+sAcstGgrqubdGZW2T5gazUMyammOOuN
+9IWL1PbvXmgEKD+80NUIrk09zanYyrElGdU/zw/kUbZ3Jf6WUBtJGhTzRQ1qZeKa
+VnpCbLoG3vObEB8mxDUAlIzwAtfvw4U32BVIZA8xrocz6OOoAnSW1bTlo3EOIo/G
+MTV7jmY9TBPtfhRuO/cG650+F+cw
+-----END CERTIFICATE-----
diff --git a/vendor/github.com/google/s2a-go/testdata/mds_server_cert.pem b/vendor/github.com/google/s2a-go/testdata/mds_server_cert.pem
new file mode 100644
index 000000000..68c606134
--- /dev/null
+++ b/vendor/github.com/google/s2a-go/testdata/mds_server_cert.pem
@@ -0,0 +1,21 @@
+-----BEGIN CERTIFICATE-----
+MIIDbjCCAlagAwIBAgIUbexZ5sZl86Al9dsI2PkOgtqKnkgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMQ8wDQYDVQQKDAZHb29nbGUx
+GzAZBgNVBAMMEnRlc3QtczJhLW10bHMtcm9vdDAeFw0yMzA4MjIwMDMyMDRaFw00
+MzA4MjIwMDMyMDRaMDkxCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTEdMBsGA1UE
+AwwUdGVzdC1zMmEtbXRscy1zZXJ2ZXIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQCMEzybsGPqfh92GLwy43mt8kQDF3ztr8y06RwU1hVnY7QqYK4obpvh
+HkJVnTz9gwNBF3n5nUalqRzactlf2PCydN9oSYNCO8svVmo7vw1CleKAKFAiV5Qn
+H76QlqD15oJreh7nSM8R4qj5KukIHvt0cN0gD6CJQzIURDtsKJwkW3yQjYyT/FAK
+GYtFrB6buDn3Eg3Hsw6z7uj7CzLBsSl7BIGrQILbpbI9nFNT3rUTUhXZKY/3UtJA
+Ob66AjTmMbD16RGYZR4JsPx6CstheifJ6YSI79r5KgD37zX0jMXFWimvb2SmZmFe
+LoohtC8K7uTyjm/dROx6nHXdDt5TQYXHAgMBAAGjXzBdMBsGA1UdEQQUMBKHEAAA
+AAAAAAAAAAAAAAAAAAAwHQYDVR0OBBYEFI3i2+tIk6YYn0MIxC0q93jk1VsUMB8G
+A1UdIwQYMBaAFHIy2D/K61XyHDy86rSLzHR4FCLsMA0GCSqGSIb3DQEBCwUAA4IB
+AQAUhk+s/lrIAULBbU7E22C8f93AzTxE1mhyHGNlfPPJP3t1Dl+h4X4WkFpkz5gT
+EcNXB//Vvoq99HbEK5/92sxsIPexKdJBdcggeHXIgLDkOrEZEb0Nnh9eaAuU2QDn
+JW44hMB+aF6mEaJvOHE6DRkQw3hwFYFisFKKHtlQ3TyOhw5CHGzSExPZusdSFNIe
+2E7V/0QzGPJEFnEFUNe9N8nTH2P385Paoi+5+Iizlp/nztVXfzv0Cj/i+qGgtDUs
+HB+gBU2wxMw8eYyuNzACH70wqGR1Parj8/JoyYhx0S4+Gjzy3JH3CcAMaxyfH/dI
+4Wcvfz/isxgmH1UqIt3oc6ad
+-----END CERTIFICATE-----
diff --git a/vendor/github.com/google/s2a-go/testdata/mds_server_key.pem b/vendor/github.com/google/s2a-go/testdata/mds_server_key.pem
new file mode 100644
index 000000000..b14ad0f72
--- /dev/null
+++ b/vendor/github.com/google/s2a-go/testdata/mds_server_key.pem
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCMEzybsGPqfh92
+GLwy43mt8kQDF3ztr8y06RwU1hVnY7QqYK4obpvhHkJVnTz9gwNBF3n5nUalqRza
+ctlf2PCydN9oSYNCO8svVmo7vw1CleKAKFAiV5QnH76QlqD15oJreh7nSM8R4qj5
+KukIHvt0cN0gD6CJQzIURDtsKJwkW3yQjYyT/FAKGYtFrB6buDn3Eg3Hsw6z7uj7
+CzLBsSl7BIGrQILbpbI9nFNT3rUTUhXZKY/3UtJAOb66AjTmMbD16RGYZR4JsPx6
+CstheifJ6YSI79r5KgD37zX0jMXFWimvb2SmZmFeLoohtC8K7uTyjm/dROx6nHXd
+Dt5TQYXHAgMBAAECggEAIB5zGdIG/yh/Z1GBqfuOFaxFGx5iJ5BVlLAVH9P9IXFz
+yPnVRXEjbinFlSMSbqEBeIX9EpcVMXxHIPIP1RIGEy2IYr3kiqXyT771ahDDZh6/
+Spqz0UQatSPqyvW3H9uE0Uc12dvQm23JSCUmPRX5m7gbhDQBIChXzdzdcU4Yi59V
+4xmJUvbsAcLw5CBM6kwV+1NGVH9+3mUdhrr9M6B6+sVB/xnaqMGEDfQGiwL8U7EY
+QOuc46KXu3Pd/qCdVLn60IrdjSzDJKeC5UZZ+ejNAo+DfbtOovBj3qu3OCUg4XVy
+0CDBJ1sTdLvUfF4Gb+crjPsd+qBbXcjVfqdadwhsoQKBgQDBF1Pys/NitW8okJwp
+2fiDIASP3TiI+MthWHGyuoZGPvmXQ3H6iuLSm8c/iYI2WPTf53Xff1VcFm1GmQms
+GCsYM8Ax94zCeO6Ei1sYYxwcBloEZfOeV37MPA4pjJF4Lt+n5nveNxP+lrsjksJz
+wToSEgWPDT1b/xcdt4/5j9J85wKBgQC5tiLx+33mwH4DoaFRmSl0+VuSNYFw6DTQ
+SQ+kWqWGH4NENc9wf4Dj2VUZQhpXNhXVSxj+aP2d/ck1NrTJAWqYEXCDtFQOGSa2
+cGPRr+Fhy5NIEaEvR7IXcMBZzx3koYmWVBHricyrXs5FvHrT3N14mGDUG8n24U3f
+R799bau0IQKBgQC97UM+lHCPJCWNggiJRgSifcje9VtZp1btjoBvq/bNe74nYkjn
+htsrC91Fiu1Qpdlfr50K1IXSyaB886VG6JLjAGxI+dUzqJ38M9LLvxj0G+9JKjsi
+AbAQFfZcOg8QZxLJZPVsE0MQhZTXndC06VhEVAOxvPUg214Sde8hK61/+wKBgCRw
+O10VhnePT2pw/VEgZ0T/ZFtEylgYB7zSiRIrgwzVBBGPKVueePC8BPmGwdpYz2Hh
+cU8B1Ll6QU+Co2hJMdwSl+wPpup5PuJPHRbYlrV0lzpt0x2OyL/WrLcyb2Ab3f40
+EqwPhqwdVwXR3JvTW1U9OMqFhVQ+kuP7lPQMX8NhAoGBAJOgZ7Tokipc4Mi68Olw
+SCaOPvjjy4sW2rTRuKyjc1wTAzy7SJ3vXHfGkkN99nTLJFwAyJhWUpnRdwAXGi+x
+gyOa95ImsEfRSwEjbluWfF8/P0IU8GR+ZTqT4NnNCOsi8T/xst4Szd1ECJNnnZDe
+1ChfPP1AH+/75MJCvu6wQBQv
+-----END PRIVATE KEY-----
diff --git a/vendor/github.com/google/s2a-go/testdata/self_signed_cert.pem b/vendor/github.com/google/s2a-go/testdata/self_signed_cert.pem
new file mode 100644
index 000000000..ad1bad598
--- /dev/null
+++ b/vendor/github.com/google/s2a-go/testdata/self_signed_cert.pem
@@ -0,0 +1,19 @@
+-----BEGIN CERTIFICATE-----
+MIIDITCCAgkCFBS8mLoytMpMWBwpAtnRaq3eIKnsMA0GCSqGSIb3DQEBCwUAME0x
+CzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTENMAsGA1UECgwEVGVzdDEiMCAGA1UE
+AwwZdGVzdC1zMmEtbXRscy1zZWxmLXNpZ25lZDAeFw0yMzA4MjIyMTE2MDFaFw00
+MzA4MjIyMTE2MDFaME0xCzAJBgNVBAYTAlVTMQswCQYDVQQIDAJDQTENMAsGA1UE
+CgwEVGVzdDEiMCAGA1UEAwwZdGVzdC1zMmEtbXRscy1zZWxmLXNpZ25lZDCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKFFPsYasKZeCFLEXl3RpE/ZOXFe
+2lhutIalSpZvCmso+mQGoZ4cHK7At+kDjBi5CrnXkYcw7quQAhHgU0frhWdj7tsW
+HUUtq7T8eaGWKBnVD9fl+MjtAl1BmhXwV9qRBbj4EesSKGDSGpKf66dOtzw83JbB
+cU7XlPAH1c1zo2GXC1himcZ+SVGHVrOjn4NmeFs8g94/Dke8dWkHwv5YTMVugFK4
+5KxKgSOKkr4ka7PCBzgxCnW4wYSZNRHcxrqkiArO2HAQq0ACr7u+fVDYH//9mP2Z
+ADo/zch7O5yhkiNbjXJIRrptDWEuVYMRloYDhT773h7bV/Q0Wo0NQGtasJ8CAwEA
+ATANBgkqhkiG9w0BAQsFAAOCAQEAPjbH0TMyegF/MDvglkc0sXr6DqlmTxDCZZmG
+lYPZ5Xy062+rxIHghMARbvO4BxepiG37KsP2agvOldm4TtU8nQ8LyswmSIFm4BQ+
+XQWwdsWyYyd8l0d5sXAdaN6AXwy50fvqCepmEqyreMY6dtLzlwo9gVCBFB7QuAPt
+Nc14phpEUZt/KPNuY6cUlB7bz3tmnFbwxUrWj1p0KBEYsr7+KEVZxR+z0wtlU7S9
+ZBrmUvx0fq5Ef7JWtHW0w4ofg1op742sdYl+53C26GZ76ts4MmqVz2/94DScgRaU
+gT0GLVuuCZXRDVeTXqTb4mditRCfzFPe9cCegYhGhSqBs8yh5A==
+-----END CERTIFICATE-----
diff --git a/vendor/github.com/google/s2a-go/testdata/self_signed_key.pem b/vendor/github.com/google/s2a-go/testdata/self_signed_key.pem
new file mode 100644
index 000000000..bcf08e4f1
--- /dev/null
+++ b/vendor/github.com/google/s2a-go/testdata/self_signed_key.pem
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQChRT7GGrCmXghS
+xF5d0aRP2TlxXtpYbrSGpUqWbwprKPpkBqGeHByuwLfpA4wYuQq515GHMO6rkAIR
+4FNH64VnY+7bFh1FLau0/HmhligZ1Q/X5fjI7QJdQZoV8FfakQW4+BHrEihg0hqS
+n+unTrc8PNyWwXFO15TwB9XNc6NhlwtYYpnGfklRh1azo5+DZnhbPIPePw5HvHVp
+B8L+WEzFboBSuOSsSoEjipK+JGuzwgc4MQp1uMGEmTUR3Ma6pIgKzthwEKtAAq+7
+vn1Q2B///Zj9mQA6P83IezucoZIjW41ySEa6bQ1hLlWDEZaGA4U++94e21f0NFqN
+DUBrWrCfAgMBAAECggEAR8e8YwyqJ8KezcgdgIC5M9kp2i4v3UCZFX0or8CI0J2S
+pUbWVLuKgLXCpfIwPyjNf15Vpei/spkMcsx4BQDthdFTFSzIpmvni0z9DlD5VFYj
+ESOJElV7wepbHPy2/c+izmuL/ic81aturGiFyRgeMq+cN3WuaztFTXkPTrzzsZGF
+p/Mx3gqm7Hoc3d2xlv+8L5GjCtEJPlQgZJV+s3ennBjOAd8CC7d9qJetE3Er46pn
+r5jedV3bQRZYBzmooYNHjbAs26++wYac/jTE0/U6nKS17eWq4BQZUtlMXUw5N81B
+7LKn7C03rj2KCn+Nf5uin9ALmoy888LXCDdvL/NZkQKBgQDduv1Heu+tOZuNYUdQ
+Hswmd8sVNAAWGZxdxixHMv58zrgbLFXSX6K89X2l5Sj9XON8TH46MuSFdjSwwWw5
+fBrhVEhA5srcqpvVWIBE05yqPpt0s1NQktMWJKELWlG8jOhVKwM5OYDpdxtwehpz
+1g70XJz+nF/LTV8RdTK+OWDDpQKBgQC6MhdbGHUz/56dY3gZpE5TXnN2hkNbZCgk
+emr6z85VHhQflZbedhCzB9PUnZnCKWOGQHQdxRTtRfd46LVboZqCdYO1ZNQv6toP
+ysS7dTpZZFy7CpQaW0Y6/jS65jW6xIDKR1W40vgltZ3sfpG37JaowpzWdw2WuOnw
+Bg0rcJAf8wKBgQCqE+p/z97UwuF8eufWnyj9QNo382E1koOMspv4KTdnyLETtthF
+vDH6O1wbykG8xmmASLRyM+NyNA+KnXNETNvZh2q8zctBpGRQK8iIAsGjHM7ln0AD
+B/x+ea5GJQuZU4RK/+lDFca6TjBwAFkWDVX/PqL18kDQkxKfM4SuwRhmOQKBgDGh
+eoJIsa0LnP787Z2AI3Srf4F/ZmLs/ppCm1OBotEjdF+64v0nYWonUvqgi8SqfaHi
+elEZIGvis4ViGj1zhRjzNAlc+AZRxpBhDzGcnNIJI4Kj3jhsTfsZmXqcNIQ1LtM8
+Uogyi/yZPaA1WKg7Aym2vlGYaGHdplXZdxc2KOSrAoGABRkD9l2OVcwK7RyNgFxo
+mjxx0tfUdDBhHIi2igih1FiHpeP9E+4/kE/K7PnU9DoDrL1jW1MTpXaYV4seOylk
+k9z/9QfcRa9ePD2N4FqbHWSYp5n3aLoIcGq/9jyjTwayZbbIhWO+vNuHE9wIvecZ
+8x3gNkxJRb4NaLIoNzAhCoo=
+-----END PRIVATE KEY-----
diff --git a/vendor/github.com/json-iterator/go/.codecov.yml b/vendor/github.com/json-iterator/go/.codecov.yml
new file mode 100644
index 000000000..955dc0be5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/.codecov.yml
@@ -0,0 +1,3 @@
+ignore:
+ - "output_tests/.*"
+
diff --git a/vendor/github.com/json-iterator/go/.gitignore b/vendor/github.com/json-iterator/go/.gitignore
new file mode 100644
index 000000000..15556530a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/.gitignore
@@ -0,0 +1,4 @@
+/vendor
+/bug_test.go
+/coverage.txt
+/.idea
diff --git a/vendor/github.com/json-iterator/go/.travis.yml b/vendor/github.com/json-iterator/go/.travis.yml
new file mode 100644
index 000000000..449e67cd0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+go:
+ - 1.8.x
+ - 1.x
+
+before_install:
+ - go get -t -v ./...
+
+script:
+ - ./test.sh
+
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
diff --git a/vendor/github.com/json-iterator/go/Gopkg.lock b/vendor/github.com/json-iterator/go/Gopkg.lock
new file mode 100644
index 000000000..c8a9fbb38
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/Gopkg.lock
@@ -0,0 +1,21 @@
+# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
+
+
+[[projects]]
+ name = "github.com/modern-go/concurrent"
+ packages = ["."]
+ revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
+ version = "1.0.0"
+
+[[projects]]
+ name = "github.com/modern-go/reflect2"
+ packages = ["."]
+ revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
+ version = "1.0.1"
+
+[solve-meta]
+ analyzer-name = "dep"
+ analyzer-version = 1
+ inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
+ solver-name = "gps-cdcl"
+ solver-version = 1
diff --git a/vendor/github.com/json-iterator/go/Gopkg.toml b/vendor/github.com/json-iterator/go/Gopkg.toml
new file mode 100644
index 000000000..313a0f887
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/Gopkg.toml
@@ -0,0 +1,26 @@
+# Gopkg.toml example
+#
+# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
+# for detailed Gopkg.toml documentation.
+#
+# required = ["github.com/user/thing/cmd/thing"]
+# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
+#
+# [[constraint]]
+# name = "github.com/user/project"
+# version = "1.0.0"
+#
+# [[constraint]]
+# name = "github.com/user/project2"
+# branch = "dev"
+# source = "github.com/myfork/project2"
+#
+# [[override]]
+# name = "github.com/x/y"
+# version = "2.4.0"
+
+ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
+
+[[constraint]]
+ name = "github.com/modern-go/reflect2"
+ version = "1.0.1"
diff --git a/vendor/github.com/json-iterator/go/LICENSE b/vendor/github.com/json-iterator/go/LICENSE
new file mode 100644
index 000000000..2cf4f5ab2
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2016 json-iterator
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/json-iterator/go/README.md b/vendor/github.com/json-iterator/go/README.md
new file mode 100644
index 000000000..c589addf9
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/README.md
@@ -0,0 +1,85 @@
+[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge)
+[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/json-iterator/go)
+[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go)
+[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go)
+[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go)
+[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
+[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
+
+A high-performance 100% compatible drop-in replacement of "encoding/json"
+
+# Benchmark
+
+![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
+
+Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
+
+Raw Result (easyjson requires static code generation)
+
+| | ns/op | allocation bytes | allocation times |
+| --------------- | ----------- | ---------------- | ---------------- |
+| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
+| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
+| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
+| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
+| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
+| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
+
+Always benchmark with your own workload.
+The result depends heavily on the data input.
+
+# Usage
+
+100% compatibility with standard lib
+
+Replace
+
+```go
+import "encoding/json"
+json.Marshal(&data)
+```
+
+with
+
+```go
+import jsoniter "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Marshal(&data)
+```
+
+Replace
+
+```go
+import "encoding/json"
+json.Unmarshal(input, &data)
+```
+
+with
+
+```go
+import jsoniter "github.com/json-iterator/go"
+
+var json = jsoniter.ConfigCompatibleWithStandardLibrary
+json.Unmarshal(input, &data)
+```
+
+[More documentation](http://jsoniter.com/migrate-from-go-std.html)
+
+# How to get
+
+```
+go get github.com/json-iterator/go
+```
+
+# Contribution Welcomed !
+
+Contributors
+
+- [thockin](https://github.com/thockin)
+- [mattn](https://github.com/mattn)
+- [cch123](https://github.com/cch123)
+- [Oleg Shaldybin](https://github.com/olegshaldybin)
+- [Jason Toffaletti](https://github.com/toffaletti)
+
+Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
diff --git a/vendor/github.com/json-iterator/go/adapter.go b/vendor/github.com/json-iterator/go/adapter.go
new file mode 100644
index 000000000..92d2cc4a3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/adapter.go
@@ -0,0 +1,150 @@
+package jsoniter
+
+import (
+ "bytes"
+ "io"
+)
+
+// RawMessage to make replace json with jsoniter
+type RawMessage []byte
+
+// Unmarshal adapts to json/encoding Unmarshal API
+//
+// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
+// Refer to https://godoc.org/encoding/json#Unmarshal for more information
+func Unmarshal(data []byte, v interface{}) error {
+ return ConfigDefault.Unmarshal(data, v)
+}
+
+// UnmarshalFromString is a convenient method to read from string instead of []byte
+func UnmarshalFromString(str string, v interface{}) error {
+ return ConfigDefault.UnmarshalFromString(str, v)
+}
+
+// Get quick method to get value from deeply nested JSON structure
+func Get(data []byte, path ...interface{}) Any {
+ return ConfigDefault.Get(data, path...)
+}
+
+// Marshal adapts to json/encoding Marshal API
+//
+// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
+// Refer to https://godoc.org/encoding/json#Marshal for more information
+func Marshal(v interface{}) ([]byte, error) {
+ return ConfigDefault.Marshal(v)
+}
+
+// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
+func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ return ConfigDefault.MarshalIndent(v, prefix, indent)
+}
+
+// MarshalToString convenient method to write as string instead of []byte
+func MarshalToString(v interface{}) (string, error) {
+ return ConfigDefault.MarshalToString(v)
+}
+
+// NewDecoder adapts to json/stream NewDecoder API.
+//
+// NewDecoder returns a new decoder that reads from r.
+//
+// Instead of a json/encoding Decoder, an Decoder is returned
+// Refer to https://godoc.org/encoding/json#NewDecoder for more information
+func NewDecoder(reader io.Reader) *Decoder {
+ return ConfigDefault.NewDecoder(reader)
+}
+
+// Decoder reads and decodes JSON values from an input stream.
+// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
+type Decoder struct {
+ iter *Iterator
+}
+
+// Decode decode JSON into interface{}
+func (adapter *Decoder) Decode(obj interface{}) error {
+ if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
+ if !adapter.iter.loadMore() {
+ return io.EOF
+ }
+ }
+ adapter.iter.ReadVal(obj)
+ err := adapter.iter.Error
+ if err == io.EOF {
+ return nil
+ }
+ return adapter.iter.Error
+}
+
+// More is there more?
+func (adapter *Decoder) More() bool {
+ iter := adapter.iter
+ if iter.Error != nil {
+ return false
+ }
+ c := iter.nextToken()
+ if c == 0 {
+ return false
+ }
+ iter.unreadByte()
+ return c != ']' && c != '}'
+}
+
+// Buffered remaining buffer
+func (adapter *Decoder) Buffered() io.Reader {
+ remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
+ return bytes.NewReader(remaining)
+}
+
+// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
+// Number instead of as a float64.
+func (adapter *Decoder) UseNumber() {
+ cfg := adapter.iter.cfg.configBeforeFrozen
+ cfg.UseNumber = true
+ adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
+}
+
+// DisallowUnknownFields causes the Decoder to return an error when the destination
+// is a struct and the input contains object keys which do not match any
+// non-ignored, exported fields in the destination.
+func (adapter *Decoder) DisallowUnknownFields() {
+ cfg := adapter.iter.cfg.configBeforeFrozen
+ cfg.DisallowUnknownFields = true
+ adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
+}
+
+// NewEncoder same as json.NewEncoder
+func NewEncoder(writer io.Writer) *Encoder {
+ return ConfigDefault.NewEncoder(writer)
+}
+
+// Encoder same as json.Encoder
+type Encoder struct {
+ stream *Stream
+}
+
+// Encode encode interface{} as JSON to io.Writer
+func (adapter *Encoder) Encode(val interface{}) error {
+ adapter.stream.WriteVal(val)
+ adapter.stream.WriteRaw("\n")
+ adapter.stream.Flush()
+ return adapter.stream.Error
+}
+
+// SetIndent set the indention. Prefix is not supported
+func (adapter *Encoder) SetIndent(prefix, indent string) {
+ config := adapter.stream.cfg.configBeforeFrozen
+ config.IndentionStep = len(indent)
+ adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
+}
+
+// SetEscapeHTML escape html by default, set to false to disable
+func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
+ config := adapter.stream.cfg.configBeforeFrozen
+ config.EscapeHTML = escapeHTML
+ adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
+}
+
+// Valid reports whether data is a valid JSON encoding.
+func Valid(data []byte) bool {
+ return ConfigDefault.Valid(data)
+}
diff --git a/vendor/github.com/json-iterator/go/any.go b/vendor/github.com/json-iterator/go/any.go
new file mode 100644
index 000000000..f6b8aeab0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any.go
@@ -0,0 +1,325 @@
+package jsoniter
+
+import (
+ "errors"
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "strconv"
+ "unsafe"
+)
+
+// Any generic object representation.
+// The lazy json implementation holds []byte and parse lazily.
+type Any interface {
+ LastError() error
+ ValueType() ValueType
+ MustBeValid() Any
+ ToBool() bool
+ ToInt() int
+ ToInt32() int32
+ ToInt64() int64
+ ToUint() uint
+ ToUint32() uint32
+ ToUint64() uint64
+ ToFloat32() float32
+ ToFloat64() float64
+ ToString() string
+ ToVal(val interface{})
+ Get(path ...interface{}) Any
+ Size() int
+ Keys() []string
+ GetInterface() interface{}
+ WriteTo(stream *Stream)
+}
+
+type baseAny struct{}
+
+func (any *baseAny) Get(path ...interface{}) Any {
+ return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
+}
+
+func (any *baseAny) Size() int {
+ return 0
+}
+
+func (any *baseAny) Keys() []string {
+ return []string{}
+}
+
+func (any *baseAny) ToVal(obj interface{}) {
+ panic("not implemented")
+}
+
+// WrapInt32 turn int32 into Any interface
+func WrapInt32(val int32) Any {
+ return &int32Any{baseAny{}, val}
+}
+
+// WrapInt64 turn int64 into Any interface
+func WrapInt64(val int64) Any {
+ return &int64Any{baseAny{}, val}
+}
+
+// WrapUint32 turn uint32 into Any interface
+func WrapUint32(val uint32) Any {
+ return &uint32Any{baseAny{}, val}
+}
+
+// WrapUint64 turn uint64 into Any interface
+func WrapUint64(val uint64) Any {
+ return &uint64Any{baseAny{}, val}
+}
+
+// WrapFloat64 turn float64 into Any interface
+func WrapFloat64(val float64) Any {
+ return &floatAny{baseAny{}, val}
+}
+
+// WrapString turn string into Any interface
+func WrapString(val string) Any {
+ return &stringAny{baseAny{}, val}
+}
+
+// Wrap turn a go object into Any interface
+func Wrap(val interface{}) Any {
+ if val == nil {
+ return &nilAny{}
+ }
+ asAny, isAny := val.(Any)
+ if isAny {
+ return asAny
+ }
+ typ := reflect2.TypeOf(val)
+ switch typ.Kind() {
+ case reflect.Slice:
+ return wrapArray(val)
+ case reflect.Struct:
+ return wrapStruct(val)
+ case reflect.Map:
+ return wrapMap(val)
+ case reflect.String:
+ return WrapString(val.(string))
+ case reflect.Int:
+ if strconv.IntSize == 32 {
+ return WrapInt32(int32(val.(int)))
+ }
+ return WrapInt64(int64(val.(int)))
+ case reflect.Int8:
+ return WrapInt32(int32(val.(int8)))
+ case reflect.Int16:
+ return WrapInt32(int32(val.(int16)))
+ case reflect.Int32:
+ return WrapInt32(val.(int32))
+ case reflect.Int64:
+ return WrapInt64(val.(int64))
+ case reflect.Uint:
+ if strconv.IntSize == 32 {
+ return WrapUint32(uint32(val.(uint)))
+ }
+ return WrapUint64(uint64(val.(uint)))
+ case reflect.Uintptr:
+ if ptrSize == 32 {
+ return WrapUint32(uint32(val.(uintptr)))
+ }
+ return WrapUint64(uint64(val.(uintptr)))
+ case reflect.Uint8:
+ return WrapUint32(uint32(val.(uint8)))
+ case reflect.Uint16:
+ return WrapUint32(uint32(val.(uint16)))
+ case reflect.Uint32:
+ return WrapUint32(uint32(val.(uint32)))
+ case reflect.Uint64:
+ return WrapUint64(val.(uint64))
+ case reflect.Float32:
+ return WrapFloat64(float64(val.(float32)))
+ case reflect.Float64:
+ return WrapFloat64(val.(float64))
+ case reflect.Bool:
+ if val.(bool) == true {
+ return &trueAny{}
+ }
+ return &falseAny{}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
+}
+
+// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
+func (iter *Iterator) ReadAny() Any {
+ return iter.readAny()
+}
+
+func (iter *Iterator) readAny() Any {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.unreadByte()
+ return &stringAny{baseAny{}, iter.ReadString()}
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return &nilAny{}
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ return &trueAny{}
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ return &falseAny{}
+ case '{':
+ return iter.readObjectAny()
+ case '[':
+ return iter.readArrayAny()
+ case '-':
+ return iter.readNumberAny(false)
+ case 0:
+ return &invalidAny{baseAny{}, errors.New("input is empty")}
+ default:
+ return iter.readNumberAny(true)
+ }
+}
+
+func (iter *Iterator) readNumberAny(positive bool) Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipNumber()
+ lazyBuf := iter.stopCapture()
+ return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readObjectAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipObject()
+ lazyBuf := iter.stopCapture()
+ return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func (iter *Iterator) readArrayAny() Any {
+ iter.startCapture(iter.head - 1)
+ iter.skipArray()
+ lazyBuf := iter.stopCapture()
+ return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
+}
+
+func locateObjectField(iter *Iterator, target string) []byte {
+ var found []byte
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ if field == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ return true
+ })
+ return found
+}
+
+func locateArrayElement(iter *Iterator, target int) []byte {
+ var found []byte
+ n := 0
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ if n == target {
+ found = iter.SkipAndReturnBytes()
+ return false
+ }
+ iter.Skip()
+ n++
+ return true
+ })
+ return found
+}
+
+func locatePath(iter *Iterator, path []interface{}) Any {
+ for i, pathKeyObj := range path {
+ switch pathKey := pathKeyObj.(type) {
+ case string:
+ valueBytes := locateObjectField(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int:
+ valueBytes := locateArrayElement(iter, pathKey)
+ if valueBytes == nil {
+ return newInvalidAny(path[i:])
+ }
+ iter.ResetBytes(valueBytes)
+ case int32:
+ if '*' == pathKey {
+ return iter.readAny().Get(path[i:]...)
+ }
+ return newInvalidAny(path[i:])
+ default:
+ return newInvalidAny(path[i:])
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return &invalidAny{baseAny{}, iter.Error}
+ }
+ return iter.readAny()
+}
+
+var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
+
+func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ == anyType {
+ return &directAnyCodec{}
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{
+ valType: typ,
+ }
+ }
+ return nil
+}
+
+func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == anyType {
+ return &directAnyCodec{}
+ }
+ if typ.Implements(anyType) {
+ return &anyCodec{
+ valType: typ,
+ }
+ }
+ return nil
+}
+
+type anyCodec struct {
+ valType reflect2.Type
+}
+
+func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ panic("not implemented")
+}
+
+func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := codec.valType.UnsafeIndirect(ptr)
+ any := obj.(Any)
+ any.WriteTo(stream)
+}
+
+func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ obj := codec.valType.UnsafeIndirect(ptr)
+ any := obj.(Any)
+ return any.Size() == 0
+}
+
+type directAnyCodec struct {
+}
+
+func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *(*Any)(ptr) = iter.readAny()
+}
+
+func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ any := *(*Any)(ptr)
+ if any == nil {
+ stream.WriteNil()
+ return
+ }
+ any.WriteTo(stream)
+}
+
+func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ any := *(*Any)(ptr)
+ return any.Size() == 0
+}
diff --git a/vendor/github.com/json-iterator/go/any_array.go b/vendor/github.com/json-iterator/go/any_array.go
new file mode 100644
index 000000000..0449e9aa4
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_array.go
@@ -0,0 +1,278 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type arrayLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *arrayLazyAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *arrayLazyAny) ToBool() bool {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.ReadArray()
+}
+
+func (any *arrayLazyAny) ToInt() int {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt32() int32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToInt64() int64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint() uint {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint32() uint32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToUint64() uint64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat32() float32 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToFloat64() float64 {
+ if any.ToBool() {
+ return 1
+ }
+ return 0
+}
+
+func (any *arrayLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *arrayLazyAny) ToVal(val interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(val)
+}
+
+func (any *arrayLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateArrayElement(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ arr := make([]Any, 0)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ found := iter.readAny().Get(path[1:]...)
+ if found.ValueType() != InvalidValue {
+ arr = append(arr, found)
+ }
+ return true
+ })
+ return wrapArray(arr)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ size++
+ iter.Skip()
+ return true
+ })
+ return size
+}
+
+func (any *arrayLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *arrayLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type arrayAny struct {
+ baseAny
+ val reflect.Value
+}
+
+func wrapArray(val interface{}) *arrayAny {
+ return &arrayAny{baseAny{}, reflect.ValueOf(val)}
+}
+
+func (any *arrayAny) ValueType() ValueType {
+ return ArrayValue
+}
+
+func (any *arrayAny) MustBeValid() Any {
+ return any
+}
+
+func (any *arrayAny) LastError() error {
+ return nil
+}
+
+func (any *arrayAny) ToBool() bool {
+ return any.val.Len() != 0
+}
+
+func (any *arrayAny) ToInt() int {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt32() int32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToInt64() int64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint() uint {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint32() uint32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToUint64() uint64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat32() float32 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToFloat64() float64 {
+ if any.val.Len() == 0 {
+ return 0
+ }
+ return 1
+}
+
+func (any *arrayAny) ToString() string {
+ str, _ := MarshalToString(any.val.Interface())
+ return str
+}
+
+func (any *arrayAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int:
+ if firstPath < 0 || firstPath >= any.val.Len() {
+ return newInvalidAny(path)
+ }
+ return Wrap(any.val.Index(firstPath).Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := make([]Any, 0)
+ for i := 0; i < any.val.Len(); i++ {
+ mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll = append(mappedAll, mapped)
+ }
+ }
+ return wrapArray(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *arrayAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *arrayAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *arrayAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/any_bool.go b/vendor/github.com/json-iterator/go/any_bool.go
new file mode 100644
index 000000000..9452324af
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_bool.go
@@ -0,0 +1,137 @@
+package jsoniter
+
+type trueAny struct {
+ baseAny
+}
+
+func (any *trueAny) LastError() error {
+ return nil
+}
+
+func (any *trueAny) ToBool() bool {
+ return true
+}
+
+func (any *trueAny) ToInt() int {
+ return 1
+}
+
+func (any *trueAny) ToInt32() int32 {
+ return 1
+}
+
+func (any *trueAny) ToInt64() int64 {
+ return 1
+}
+
+func (any *trueAny) ToUint() uint {
+ return 1
+}
+
+func (any *trueAny) ToUint32() uint32 {
+ return 1
+}
+
+func (any *trueAny) ToUint64() uint64 {
+ return 1
+}
+
+func (any *trueAny) ToFloat32() float32 {
+ return 1
+}
+
+func (any *trueAny) ToFloat64() float64 {
+ return 1
+}
+
+func (any *trueAny) ToString() string {
+ return "true"
+}
+
+func (any *trueAny) WriteTo(stream *Stream) {
+ stream.WriteTrue()
+}
+
+func (any *trueAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *trueAny) GetInterface() interface{} {
+ return true
+}
+
+func (any *trueAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *trueAny) MustBeValid() Any {
+ return any
+}
+
+type falseAny struct {
+ baseAny
+}
+
+func (any *falseAny) LastError() error {
+ return nil
+}
+
+func (any *falseAny) ToBool() bool {
+ return false
+}
+
+func (any *falseAny) ToInt() int {
+ return 0
+}
+
+func (any *falseAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *falseAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *falseAny) ToUint() uint {
+ return 0
+}
+
+func (any *falseAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *falseAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *falseAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *falseAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *falseAny) ToString() string {
+ return "false"
+}
+
+func (any *falseAny) WriteTo(stream *Stream) {
+ stream.WriteFalse()
+}
+
+func (any *falseAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *falseAny) GetInterface() interface{} {
+ return false
+}
+
+func (any *falseAny) ValueType() ValueType {
+ return BoolValue
+}
+
+func (any *falseAny) MustBeValid() Any {
+ return any
+}
diff --git a/vendor/github.com/json-iterator/go/any_float.go b/vendor/github.com/json-iterator/go/any_float.go
new file mode 100644
index 000000000..35fdb0949
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_float.go
@@ -0,0 +1,83 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type floatAny struct {
+ baseAny
+ val float64
+}
+
+func (any *floatAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *floatAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *floatAny) MustBeValid() Any {
+ return any
+}
+
+func (any *floatAny) LastError() error {
+ return nil
+}
+
+func (any *floatAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *floatAny) ToInt() int {
+ return int(any.val)
+}
+
+func (any *floatAny) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *floatAny) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *floatAny) ToUint() uint {
+ if any.val > 0 {
+ return uint(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint32() uint32 {
+ if any.val > 0 {
+ return uint32(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToUint64() uint64 {
+ if any.val > 0 {
+ return uint64(any.val)
+ }
+ return 0
+}
+
+func (any *floatAny) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *floatAny) ToFloat64() float64 {
+ return any.val
+}
+
+func (any *floatAny) ToString() string {
+ return strconv.FormatFloat(any.val, 'E', -1, 64)
+}
+
+func (any *floatAny) WriteTo(stream *Stream) {
+ stream.WriteFloat64(any.val)
+}
+
+func (any *floatAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_int32.go b/vendor/github.com/json-iterator/go/any_int32.go
new file mode 100644
index 000000000..1b56f3991
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_int32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int32Any struct {
+ baseAny
+ val int32
+}
+
+func (any *int32Any) LastError() error {
+ return nil
+}
+
+func (any *int32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int32Any) ToInt32() int32 {
+ return any.val
+}
+
+func (any *int32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *int32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int32Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *int32Any) WriteTo(stream *Stream) {
+ stream.WriteInt32(any.val)
+}
+
+func (any *int32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_int64.go b/vendor/github.com/json-iterator/go/any_int64.go
new file mode 100644
index 000000000..c440d72b6
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_int64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type int64Any struct {
+ baseAny
+ val int64
+}
+
+func (any *int64Any) LastError() error {
+ return nil
+}
+
+func (any *int64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *int64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *int64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *int64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *int64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *int64Any) ToInt64() int64 {
+ return any.val
+}
+
+func (any *int64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *int64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *int64Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *int64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *int64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *int64Any) ToString() string {
+ return strconv.FormatInt(any.val, 10)
+}
+
+func (any *int64Any) WriteTo(stream *Stream) {
+ stream.WriteInt64(any.val)
+}
+
+func (any *int64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *int64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_invalid.go b/vendor/github.com/json-iterator/go/any_invalid.go
new file mode 100644
index 000000000..1d859eac3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_invalid.go
@@ -0,0 +1,82 @@
+package jsoniter
+
+import "fmt"
+
+type invalidAny struct {
+ baseAny
+ err error
+}
+
+func newInvalidAny(path []interface{}) *invalidAny {
+ return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
+}
+
+func (any *invalidAny) LastError() error {
+ return any.err
+}
+
+func (any *invalidAny) ValueType() ValueType {
+ return InvalidValue
+}
+
+func (any *invalidAny) MustBeValid() Any {
+ panic(any.err)
+}
+
+func (any *invalidAny) ToBool() bool {
+ return false
+}
+
+func (any *invalidAny) ToInt() int {
+ return 0
+}
+
+func (any *invalidAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *invalidAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *invalidAny) ToUint() uint {
+ return 0
+}
+
+func (any *invalidAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *invalidAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *invalidAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *invalidAny) ToString() string {
+ return ""
+}
+
+func (any *invalidAny) WriteTo(stream *Stream) {
+}
+
+func (any *invalidAny) Get(path ...interface{}) Any {
+ if any.err == nil {
+ return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
+}
+
+func (any *invalidAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *invalidAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/any_nil.go b/vendor/github.com/json-iterator/go/any_nil.go
new file mode 100644
index 000000000..d04cb54c1
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_nil.go
@@ -0,0 +1,69 @@
+package jsoniter
+
+type nilAny struct {
+ baseAny
+}
+
+func (any *nilAny) LastError() error {
+ return nil
+}
+
+func (any *nilAny) ValueType() ValueType {
+ return NilValue
+}
+
+func (any *nilAny) MustBeValid() Any {
+ return any
+}
+
+func (any *nilAny) ToBool() bool {
+ return false
+}
+
+func (any *nilAny) ToInt() int {
+ return 0
+}
+
+func (any *nilAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *nilAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *nilAny) ToUint() uint {
+ return 0
+}
+
+func (any *nilAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *nilAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *nilAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *nilAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *nilAny) ToString() string {
+ return ""
+}
+
+func (any *nilAny) WriteTo(stream *Stream) {
+ stream.WriteNil()
+}
+
+func (any *nilAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *nilAny) GetInterface() interface{} {
+ return nil
+}
diff --git a/vendor/github.com/json-iterator/go/any_number.go b/vendor/github.com/json-iterator/go/any_number.go
new file mode 100644
index 000000000..9d1e901a6
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_number.go
@@ -0,0 +1,123 @@
+package jsoniter
+
+import (
+ "io"
+ "unsafe"
+)
+
+type numberLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *numberLazyAny) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *numberLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *numberLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *numberLazyAny) ToBool() bool {
+ return any.ToFloat64() != 0
+}
+
+func (any *numberLazyAny) ToInt() int {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToInt32() int32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToInt64() int64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadInt64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint() uint {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint32() uint32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToUint64() uint64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadUint64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToFloat32() float32 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat32()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToFloat64() float64 {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ val := iter.ReadFloat64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ any.err = iter.Error
+ }
+ return val
+}
+
+func (any *numberLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *numberLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *numberLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
diff --git a/vendor/github.com/json-iterator/go/any_object.go b/vendor/github.com/json-iterator/go/any_object.go
new file mode 100644
index 000000000..c44ef5c98
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_object.go
@@ -0,0 +1,374 @@
+package jsoniter
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+type objectLazyAny struct {
+ baseAny
+ cfg *frozenConfig
+ buf []byte
+ err error
+}
+
+func (any *objectLazyAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectLazyAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectLazyAny) LastError() error {
+ return any.err
+}
+
+func (any *objectLazyAny) ToBool() bool {
+ return true
+}
+
+func (any *objectLazyAny) ToInt() int {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectLazyAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectLazyAny) ToString() string {
+ return *(*string)(unsafe.Pointer(&any.buf))
+}
+
+func (any *objectLazyAny) ToVal(obj interface{}) {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadVal(obj)
+}
+
+func (any *objectLazyAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ valueBytes := locateObjectField(iter, firstPath)
+ if valueBytes == nil {
+ return newInvalidAny(path)
+ }
+ iter.ResetBytes(valueBytes)
+ return locatePath(iter, path[1:])
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ mapped := locatePath(iter, path[1:])
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[field] = mapped
+ }
+ return true
+ })
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectLazyAny) Keys() []string {
+ keys := []string{}
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadMapCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ keys = append(keys, field)
+ return true
+ })
+ return keys
+}
+
+func (any *objectLazyAny) Size() int {
+ size := 0
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ size++
+ return true
+ })
+ return size
+}
+
+func (any *objectLazyAny) WriteTo(stream *Stream) {
+ stream.Write(any.buf)
+}
+
+func (any *objectLazyAny) GetInterface() interface{} {
+ iter := any.cfg.BorrowIterator(any.buf)
+ defer any.cfg.ReturnIterator(iter)
+ return iter.Read()
+}
+
+type objectAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapStruct(val interface{}) *objectAny {
+ return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *objectAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *objectAny) MustBeValid() Any {
+ return any
+}
+
+func (any *objectAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *objectAny) LastError() error {
+ return any.err
+}
+
+func (any *objectAny) ToBool() bool {
+ return any.val.NumField() != 0
+}
+
+func (any *objectAny) ToInt() int {
+ return 0
+}
+
+func (any *objectAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *objectAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *objectAny) ToUint() uint {
+ return 0
+}
+
+func (any *objectAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *objectAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *objectAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *objectAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *objectAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *objectAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case string:
+ field := any.val.FieldByName(firstPath)
+ if !field.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(field.Interface())
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for i := 0; i < any.val.NumField(); i++ {
+ field := any.val.Field(i)
+ if field.CanInterface() {
+ mapped := Wrap(field.Interface()).Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[any.val.Type().Field(i).Name] = mapped
+ }
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ return newInvalidAny(path)
+ }
+}
+
+func (any *objectAny) Keys() []string {
+ keys := make([]string, 0, any.val.NumField())
+ for i := 0; i < any.val.NumField(); i++ {
+ keys = append(keys, any.val.Type().Field(i).Name)
+ }
+ return keys
+}
+
+func (any *objectAny) Size() int {
+ return any.val.NumField()
+}
+
+func (any *objectAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *objectAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
+
+type mapAny struct {
+ baseAny
+ err error
+ val reflect.Value
+}
+
+func wrapMap(val interface{}) *mapAny {
+ return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
+}
+
+func (any *mapAny) ValueType() ValueType {
+ return ObjectValue
+}
+
+func (any *mapAny) MustBeValid() Any {
+ return any
+}
+
+func (any *mapAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *mapAny) LastError() error {
+ return any.err
+}
+
+func (any *mapAny) ToBool() bool {
+ return true
+}
+
+func (any *mapAny) ToInt() int {
+ return 0
+}
+
+func (any *mapAny) ToInt32() int32 {
+ return 0
+}
+
+func (any *mapAny) ToInt64() int64 {
+ return 0
+}
+
+func (any *mapAny) ToUint() uint {
+ return 0
+}
+
+func (any *mapAny) ToUint32() uint32 {
+ return 0
+}
+
+func (any *mapAny) ToUint64() uint64 {
+ return 0
+}
+
+func (any *mapAny) ToFloat32() float32 {
+ return 0
+}
+
+func (any *mapAny) ToFloat64() float64 {
+ return 0
+}
+
+func (any *mapAny) ToString() string {
+ str, err := MarshalToString(any.val.Interface())
+ any.err = err
+ return str
+}
+
+func (any *mapAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ switch firstPath := path[0].(type) {
+ case int32:
+ if '*' == firstPath {
+ mappedAll := map[string]Any{}
+ for _, key := range any.val.MapKeys() {
+ keyAsStr := key.String()
+ element := Wrap(any.val.MapIndex(key).Interface())
+ mapped := element.Get(path[1:]...)
+ if mapped.ValueType() != InvalidValue {
+ mappedAll[keyAsStr] = mapped
+ }
+ }
+ return wrapMap(mappedAll)
+ }
+ return newInvalidAny(path)
+ default:
+ value := any.val.MapIndex(reflect.ValueOf(firstPath))
+ if !value.IsValid() {
+ return newInvalidAny(path)
+ }
+ return Wrap(value.Interface())
+ }
+}
+
+func (any *mapAny) Keys() []string {
+ keys := make([]string, 0, any.val.Len())
+ for _, key := range any.val.MapKeys() {
+ keys = append(keys, key.String())
+ }
+ return keys
+}
+
+func (any *mapAny) Size() int {
+ return any.val.Len()
+}
+
+func (any *mapAny) WriteTo(stream *Stream) {
+ stream.WriteVal(any.val)
+}
+
+func (any *mapAny) GetInterface() interface{} {
+ return any.val.Interface()
+}
diff --git a/vendor/github.com/json-iterator/go/any_str.go b/vendor/github.com/json-iterator/go/any_str.go
new file mode 100644
index 000000000..1f12f6612
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_str.go
@@ -0,0 +1,166 @@
+package jsoniter
+
+import (
+ "fmt"
+ "strconv"
+)
+
+type stringAny struct {
+ baseAny
+ val string
+}
+
+func (any *stringAny) Get(path ...interface{}) Any {
+ if len(path) == 0 {
+ return any
+ }
+ return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
+}
+
+func (any *stringAny) Parse() *Iterator {
+ return nil
+}
+
+func (any *stringAny) ValueType() ValueType {
+ return StringValue
+}
+
+func (any *stringAny) MustBeValid() Any {
+ return any
+}
+
+func (any *stringAny) LastError() error {
+ return nil
+}
+
+func (any *stringAny) ToBool() bool {
+ str := any.ToString()
+ if str == "0" {
+ return false
+ }
+ for _, c := range str {
+ switch c {
+ case ' ', '\n', '\r', '\t':
+ default:
+ return true
+ }
+ }
+ return false
+}
+
+func (any *stringAny) ToInt() int {
+ return int(any.ToInt64())
+
+}
+
+func (any *stringAny) ToInt32() int32 {
+ return int32(any.ToInt64())
+}
+
+func (any *stringAny) ToInt64() int64 {
+ if any.val == "" {
+ return 0
+ }
+
+ flag := 1
+ startPos := 0
+ if any.val[0] == '+' || any.val[0] == '-' {
+ startPos = 1
+ }
+
+ if any.val[0] == '-' {
+ flag = -1
+ }
+
+ endPos := startPos
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
+ return int64(flag) * parsed
+}
+
+func (any *stringAny) ToUint() uint {
+ return uint(any.ToUint64())
+}
+
+func (any *stringAny) ToUint32() uint32 {
+ return uint32(any.ToUint64())
+}
+
+func (any *stringAny) ToUint64() uint64 {
+ if any.val == "" {
+ return 0
+ }
+
+ startPos := 0
+
+ if any.val[0] == '-' {
+ return 0
+ }
+ if any.val[0] == '+' {
+ startPos = 1
+ }
+
+ endPos := startPos
+ for i := startPos; i < len(any.val); i++ {
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ break
+ }
+ }
+ parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
+ return parsed
+}
+
+func (any *stringAny) ToFloat32() float32 {
+ return float32(any.ToFloat64())
+}
+
+func (any *stringAny) ToFloat64() float64 {
+ if len(any.val) == 0 {
+ return 0
+ }
+
+ // first char invalid
+ if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
+ return 0
+ }
+
+ // extract valid num expression from string
+ // eg 123true => 123, -12.12xxa => -12.12
+ endPos := 1
+ for i := 1; i < len(any.val); i++ {
+ if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
+ endPos = i + 1
+ continue
+ }
+
+ // end position is the first char which is not digit
+ if any.val[i] >= '0' && any.val[i] <= '9' {
+ endPos = i + 1
+ } else {
+ endPos = i
+ break
+ }
+ }
+ parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
+ return parsed
+}
+
+func (any *stringAny) ToString() string {
+ return any.val
+}
+
+func (any *stringAny) WriteTo(stream *Stream) {
+ stream.WriteString(any.val)
+}
+
+func (any *stringAny) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_uint32.go b/vendor/github.com/json-iterator/go/any_uint32.go
new file mode 100644
index 000000000..656bbd33d
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_uint32.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint32Any struct {
+ baseAny
+ val uint32
+}
+
+func (any *uint32Any) LastError() error {
+ return nil
+}
+
+func (any *uint32Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint32Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint32Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint32Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint32Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint32Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint32Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint32Any) ToUint32() uint32 {
+ return any.val
+}
+
+func (any *uint32Any) ToUint64() uint64 {
+ return uint64(any.val)
+}
+
+func (any *uint32Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint32Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint32Any) ToString() string {
+ return strconv.FormatInt(int64(any.val), 10)
+}
+
+func (any *uint32Any) WriteTo(stream *Stream) {
+ stream.WriteUint32(any.val)
+}
+
+func (any *uint32Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint32Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/any_uint64.go b/vendor/github.com/json-iterator/go/any_uint64.go
new file mode 100644
index 000000000..7df2fce33
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/any_uint64.go
@@ -0,0 +1,74 @@
+package jsoniter
+
+import (
+ "strconv"
+)
+
+type uint64Any struct {
+ baseAny
+ val uint64
+}
+
+func (any *uint64Any) LastError() error {
+ return nil
+}
+
+func (any *uint64Any) ValueType() ValueType {
+ return NumberValue
+}
+
+func (any *uint64Any) MustBeValid() Any {
+ return any
+}
+
+func (any *uint64Any) ToBool() bool {
+ return any.val != 0
+}
+
+func (any *uint64Any) ToInt() int {
+ return int(any.val)
+}
+
+func (any *uint64Any) ToInt32() int32 {
+ return int32(any.val)
+}
+
+func (any *uint64Any) ToInt64() int64 {
+ return int64(any.val)
+}
+
+func (any *uint64Any) ToUint() uint {
+ return uint(any.val)
+}
+
+func (any *uint64Any) ToUint32() uint32 {
+ return uint32(any.val)
+}
+
+func (any *uint64Any) ToUint64() uint64 {
+ return any.val
+}
+
+func (any *uint64Any) ToFloat32() float32 {
+ return float32(any.val)
+}
+
+func (any *uint64Any) ToFloat64() float64 {
+ return float64(any.val)
+}
+
+func (any *uint64Any) ToString() string {
+ return strconv.FormatUint(any.val, 10)
+}
+
+func (any *uint64Any) WriteTo(stream *Stream) {
+ stream.WriteUint64(any.val)
+}
+
+func (any *uint64Any) Parse() *Iterator {
+ return nil
+}
+
+func (any *uint64Any) GetInterface() interface{} {
+ return any.val
+}
diff --git a/vendor/github.com/json-iterator/go/build.sh b/vendor/github.com/json-iterator/go/build.sh
new file mode 100644
index 000000000..b45ef6883
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/build.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+set -e
+set -x
+
+if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
+ mkdir -p /tmp/build-golang/src/github.com/json-iterator
+ ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
+fi
+export GOPATH=/tmp/build-golang
+go get -u github.com/golang/dep/cmd/dep
+cd /tmp/build-golang/src/github.com/json-iterator/go
+exec $GOPATH/bin/dep ensure -update
diff --git a/vendor/github.com/json-iterator/go/config.go b/vendor/github.com/json-iterator/go/config.go
new file mode 100644
index 000000000..2adcdc3b7
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/config.go
@@ -0,0 +1,375 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "io"
+ "reflect"
+ "sync"
+ "unsafe"
+
+ "github.com/modern-go/concurrent"
+ "github.com/modern-go/reflect2"
+)
+
+// Config customize how the API should behave.
+// The API is created from Config by Froze.
+type Config struct {
+ IndentionStep int
+ MarshalFloatWith6Digits bool
+ EscapeHTML bool
+ SortMapKeys bool
+ UseNumber bool
+ DisallowUnknownFields bool
+ TagKey string
+ OnlyTaggedField bool
+ ValidateJsonRawMessage bool
+ ObjectFieldMustBeSimpleString bool
+ CaseSensitive bool
+}
+
+// API the public interface of this package.
+// Primary Marshal and Unmarshal.
+type API interface {
+ IteratorPool
+ StreamPool
+ MarshalToString(v interface{}) (string, error)
+ Marshal(v interface{}) ([]byte, error)
+ MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
+ UnmarshalFromString(str string, v interface{}) error
+ Unmarshal(data []byte, v interface{}) error
+ Get(data []byte, path ...interface{}) Any
+ NewEncoder(writer io.Writer) *Encoder
+ NewDecoder(reader io.Reader) *Decoder
+ Valid(data []byte) bool
+ RegisterExtension(extension Extension)
+ DecoderOf(typ reflect2.Type) ValDecoder
+ EncoderOf(typ reflect2.Type) ValEncoder
+}
+
+// ConfigDefault the default API
+var ConfigDefault = Config{
+ EscapeHTML: true,
+}.Froze()
+
+// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
+var ConfigCompatibleWithStandardLibrary = Config{
+ EscapeHTML: true,
+ SortMapKeys: true,
+ ValidateJsonRawMessage: true,
+}.Froze()
+
+// ConfigFastest marshals float with only 6 digits precision
+var ConfigFastest = Config{
+ EscapeHTML: false,
+ MarshalFloatWith6Digits: true, // will lose precession
+ ObjectFieldMustBeSimpleString: true, // do not unescape object field
+}.Froze()
+
+type frozenConfig struct {
+ configBeforeFrozen Config
+ sortMapKeys bool
+ indentionStep int
+ objectFieldMustBeSimpleString bool
+ onlyTaggedField bool
+ disallowUnknownFields bool
+ decoderCache *concurrent.Map
+ encoderCache *concurrent.Map
+ encoderExtension Extension
+ decoderExtension Extension
+ extraExtensions []Extension
+ streamPool *sync.Pool
+ iteratorPool *sync.Pool
+ caseSensitive bool
+}
+
+func (cfg *frozenConfig) initCache() {
+ cfg.decoderCache = concurrent.NewMap()
+ cfg.encoderCache = concurrent.NewMap()
+}
+
+func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
+ cfg.decoderCache.Store(cacheKey, decoder)
+}
+
+func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
+ cfg.encoderCache.Store(cacheKey, encoder)
+}
+
+func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
+ decoder, found := cfg.decoderCache.Load(cacheKey)
+ if found {
+ return decoder.(ValDecoder)
+ }
+ return nil
+}
+
+func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
+ encoder, found := cfg.encoderCache.Load(cacheKey)
+ if found {
+ return encoder.(ValEncoder)
+ }
+ return nil
+}
+
+var cfgCache = concurrent.NewMap()
+
+func getFrozenConfigFromCache(cfg Config) *frozenConfig {
+ obj, found := cfgCache.Load(cfg)
+ if found {
+ return obj.(*frozenConfig)
+ }
+ return nil
+}
+
+func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
+ cfgCache.Store(cfg, frozenConfig)
+}
+
+// Froze forge API from config
+func (cfg Config) Froze() API {
+ api := &frozenConfig{
+ sortMapKeys: cfg.SortMapKeys,
+ indentionStep: cfg.IndentionStep,
+ objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
+ onlyTaggedField: cfg.OnlyTaggedField,
+ disallowUnknownFields: cfg.DisallowUnknownFields,
+ caseSensitive: cfg.CaseSensitive,
+ }
+ api.streamPool = &sync.Pool{
+ New: func() interface{} {
+ return NewStream(api, nil, 512)
+ },
+ }
+ api.iteratorPool = &sync.Pool{
+ New: func() interface{} {
+ return NewIterator(api)
+ },
+ }
+ api.initCache()
+ encoderExtension := EncoderExtension{}
+ decoderExtension := DecoderExtension{}
+ if cfg.MarshalFloatWith6Digits {
+ api.marshalFloatWith6Digits(encoderExtension)
+ }
+ if cfg.EscapeHTML {
+ api.escapeHTML(encoderExtension)
+ }
+ if cfg.UseNumber {
+ api.useNumber(decoderExtension)
+ }
+ if cfg.ValidateJsonRawMessage {
+ api.validateJsonRawMessage(encoderExtension)
+ }
+ api.encoderExtension = encoderExtension
+ api.decoderExtension = decoderExtension
+ api.configBeforeFrozen = cfg
+ return api
+}
+
+func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
+ api := getFrozenConfigFromCache(cfg)
+ if api != nil {
+ return api
+ }
+ api = cfg.Froze().(*frozenConfig)
+ for _, extension := range extraExtensions {
+ api.RegisterExtension(extension)
+ }
+ addFrozenConfigToCache(cfg, api)
+ return api
+}
+
+func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
+ encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
+ rawMessage := *(*json.RawMessage)(ptr)
+ iter := cfg.BorrowIterator([]byte(rawMessage))
+ defer cfg.ReturnIterator(iter)
+ iter.Read()
+ if iter.Error != nil && iter.Error != io.EOF {
+ stream.WriteRaw("null")
+ } else {
+ stream.WriteRaw(string(rawMessage))
+ }
+ }, func(ptr unsafe.Pointer) bool {
+ return len(*((*json.RawMessage)(ptr))) == 0
+ }}
+ extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
+ extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
+}
+
+func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
+ extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
+ exitingValue := *((*interface{})(ptr))
+ if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
+ iter.ReadVal(exitingValue)
+ return
+ }
+ if iter.WhatIsNext() == NumberValue {
+ *((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
+ } else {
+ *((*interface{})(ptr)) = iter.Read()
+ }
+ }}
+}
+func (cfg *frozenConfig) getTagKey() string {
+ tagKey := cfg.configBeforeFrozen.TagKey
+ if tagKey == "" {
+ return "json"
+ }
+ return tagKey
+}
+
+func (cfg *frozenConfig) RegisterExtension(extension Extension) {
+ cfg.extraExtensions = append(cfg.extraExtensions, extension)
+ copied := cfg.configBeforeFrozen
+ cfg.configBeforeFrozen = copied
+}
+
+type lossyFloat32Encoder struct {
+}
+
+func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32Lossy(*((*float32)(ptr)))
+}
+
+func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type lossyFloat64Encoder struct {
+}
+
+func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64Lossy(*((*float64)(ptr)))
+}
+
+func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+// EnableLossyFloatMarshalling keeps 10**(-6) precision
+// for float variables for better performance.
+func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
+ // for better performance
+ extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
+ extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
+}
+
+type htmlEscapedStringEncoder struct {
+}
+
+func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteStringWithHTMLEscaped(str)
+}
+
+func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
+ encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
+}
+
+func (cfg *frozenConfig) cleanDecoders() {
+ typeDecoders = map[string]ValDecoder{}
+ fieldDecoders = map[string]ValDecoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) cleanEncoders() {
+ typeEncoders = map[string]ValEncoder{}
+ fieldEncoders = map[string]ValEncoder{}
+ *cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
+}
+
+func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return "", stream.Error
+ }
+ return string(stream.Buffer()), nil
+}
+
+func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
+ stream := cfg.BorrowStream(nil)
+ defer cfg.ReturnStream(stream)
+ stream.WriteVal(v)
+ if stream.Error != nil {
+ return nil, stream.Error
+ }
+ result := stream.Buffer()
+ copied := make([]byte, len(result))
+ copy(copied, result)
+ return copied, nil
+}
+
+func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ if prefix != "" {
+ panic("prefix is not supported")
+ }
+ for _, r := range indent {
+ if r != ' ' {
+ panic("indent can only be space")
+ }
+ }
+ newCfg := cfg.configBeforeFrozen
+ newCfg.IndentionStep = len(indent)
+ return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
+}
+
+func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
+ data := []byte(str)
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.ReadVal(v)
+ c := iter.nextToken()
+ if c == 0 {
+ if iter.Error == io.EOF {
+ return nil
+ }
+ return iter.Error
+ }
+ iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
+ return iter.Error
+}
+
+func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ return locatePath(iter, path)
+}
+
+func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.ReadVal(v)
+ c := iter.nextToken()
+ if c == 0 {
+ if iter.Error == io.EOF {
+ return nil
+ }
+ return iter.Error
+ }
+ iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
+ return iter.Error
+}
+
+func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
+ stream := NewStream(cfg, writer, 512)
+ return &Encoder{stream}
+}
+
+func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
+ iter := Parse(cfg, reader, 512)
+ return &Decoder{iter}
+}
+
+func (cfg *frozenConfig) Valid(data []byte) bool {
+ iter := cfg.BorrowIterator(data)
+ defer cfg.ReturnIterator(iter)
+ iter.Skip()
+ return iter.Error == nil
+}
diff --git a/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
new file mode 100644
index 000000000..3095662b0
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/fuzzy_mode_convert_table.md
@@ -0,0 +1,7 @@
+| json type \ dest type | bool | int | uint | float |string|
+| --- | --- | --- | --- |--|--|
+| number | positive => true
negative => true
zero => false| 23.2 => 23
-32.1 => -32| 12.1 => 12
-12.1 => 0|as normal|same as origin|
+| string | empty string => false
string "0" => false
other strings => true | "123.32" => 123
"-123.4" => -123
"123.23xxxw" => 123
"abcde12" => 0
"-32.1" => -32| 13.2 => 13
-1.1 => 0 |12.1 => 12.1
-12.3 => -12.3
12.4xxa => 12.4
+1.1e2 =>110 |same as origin|
+| bool | true => true
false => false| true => 1
false => 0 | true => 1
false => 0 |true => 1
false => 0|true => "true"
false => "false"|
+| object | true | 0 | 0 |0|originnal json|
+| array | empty array => false
nonempty array => true| [] => 0
[1,2] => 1 | [] => 0
[1,2] => 1 |[] => 0
[1,2] => 1|original json|
\ No newline at end of file
diff --git a/vendor/github.com/json-iterator/go/iter.go b/vendor/github.com/json-iterator/go/iter.go
new file mode 100644
index 000000000..29b31cf78
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter.go
@@ -0,0 +1,349 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+)
+
+// ValueType the type for JSON element
+type ValueType int
+
+const (
+ // InvalidValue invalid JSON element
+ InvalidValue ValueType = iota
+ // StringValue JSON element "string"
+ StringValue
+ // NumberValue JSON element 100 or 0.10
+ NumberValue
+ // NilValue JSON element null
+ NilValue
+ // BoolValue JSON element true or false
+ BoolValue
+ // ArrayValue JSON element []
+ ArrayValue
+ // ObjectValue JSON element {}
+ ObjectValue
+)
+
+var hexDigits []byte
+var valueTypes []ValueType
+
+func init() {
+ hexDigits = make([]byte, 256)
+ for i := 0; i < len(hexDigits); i++ {
+ hexDigits[i] = 255
+ }
+ for i := '0'; i <= '9'; i++ {
+ hexDigits[i] = byte(i - '0')
+ }
+ for i := 'a'; i <= 'f'; i++ {
+ hexDigits[i] = byte((i - 'a') + 10)
+ }
+ for i := 'A'; i <= 'F'; i++ {
+ hexDigits[i] = byte((i - 'A') + 10)
+ }
+ valueTypes = make([]ValueType, 256)
+ for i := 0; i < len(valueTypes); i++ {
+ valueTypes[i] = InvalidValue
+ }
+ valueTypes['"'] = StringValue
+ valueTypes['-'] = NumberValue
+ valueTypes['0'] = NumberValue
+ valueTypes['1'] = NumberValue
+ valueTypes['2'] = NumberValue
+ valueTypes['3'] = NumberValue
+ valueTypes['4'] = NumberValue
+ valueTypes['5'] = NumberValue
+ valueTypes['6'] = NumberValue
+ valueTypes['7'] = NumberValue
+ valueTypes['8'] = NumberValue
+ valueTypes['9'] = NumberValue
+ valueTypes['t'] = BoolValue
+ valueTypes['f'] = BoolValue
+ valueTypes['n'] = NilValue
+ valueTypes['['] = ArrayValue
+ valueTypes['{'] = ObjectValue
+}
+
+// Iterator is a io.Reader like object, with JSON specific read functions.
+// Error is not returned as return value, but stored as Error member on this iterator instance.
+type Iterator struct {
+ cfg *frozenConfig
+ reader io.Reader
+ buf []byte
+ head int
+ tail int
+ depth int
+ captureStartedAt int
+ captured []byte
+ Error error
+ Attachment interface{} // open for customized decoder
+}
+
+// NewIterator creates an empty Iterator instance
+func NewIterator(cfg API) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: nil,
+ head: 0,
+ tail: 0,
+ depth: 0,
+ }
+}
+
+// Parse creates an Iterator instance from io.Reader
+func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: reader,
+ buf: make([]byte, bufSize),
+ head: 0,
+ tail: 0,
+ depth: 0,
+ }
+}
+
+// ParseBytes creates an Iterator instance from byte array
+func ParseBytes(cfg API, input []byte) *Iterator {
+ return &Iterator{
+ cfg: cfg.(*frozenConfig),
+ reader: nil,
+ buf: input,
+ head: 0,
+ tail: len(input),
+ depth: 0,
+ }
+}
+
+// ParseString creates an Iterator instance from string
+func ParseString(cfg API, input string) *Iterator {
+ return ParseBytes(cfg, []byte(input))
+}
+
+// Pool returns a pool can provide more iterator with same configuration
+func (iter *Iterator) Pool() IteratorPool {
+ return iter.cfg
+}
+
+// Reset reuse iterator instance by specifying another reader
+func (iter *Iterator) Reset(reader io.Reader) *Iterator {
+ iter.reader = reader
+ iter.head = 0
+ iter.tail = 0
+ iter.depth = 0
+ return iter
+}
+
+// ResetBytes reuse iterator instance by specifying another byte array as input
+func (iter *Iterator) ResetBytes(input []byte) *Iterator {
+ iter.reader = nil
+ iter.buf = input
+ iter.head = 0
+ iter.tail = len(input)
+ iter.depth = 0
+ return iter
+}
+
+// WhatIsNext gets ValueType of relatively next json element
+func (iter *Iterator) WhatIsNext() ValueType {
+ valueType := valueTypes[iter.nextToken()]
+ iter.unreadByte()
+ return valueType
+}
+
+func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i
+ return false
+ }
+ return true
+}
+
+func (iter *Iterator) isObjectEnd() bool {
+ c := iter.nextToken()
+ if c == ',' {
+ return false
+ }
+ if c == '}' {
+ return true
+ }
+ iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
+ return true
+}
+
+func (iter *Iterator) nextToken() byte {
+ // a variation of skip whitespaces, returning the next non-whitespace token
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\t', '\r':
+ continue
+ }
+ iter.head = i + 1
+ return c
+ }
+ if !iter.loadMore() {
+ return 0
+ }
+ }
+}
+
+// ReportError record a error in iterator instance with current position.
+func (iter *Iterator) ReportError(operation string, msg string) {
+ if iter.Error != nil {
+ if iter.Error != io.EOF {
+ return
+ }
+ }
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ peekEnd := iter.head + 10
+ if peekEnd > iter.tail {
+ peekEnd = iter.tail
+ }
+ parsing := string(iter.buf[peekStart:peekEnd])
+ contextStart := iter.head - 50
+ if contextStart < 0 {
+ contextStart = 0
+ }
+ contextEnd := iter.head + 50
+ if contextEnd > iter.tail {
+ contextEnd = iter.tail
+ }
+ context := string(iter.buf[contextStart:contextEnd])
+ iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
+ operation, msg, iter.head-peekStart, parsing, context)
+}
+
+// CurrentBuffer gets current buffer as string for debugging purpose
+func (iter *Iterator) CurrentBuffer() string {
+ peekStart := iter.head - 10
+ if peekStart < 0 {
+ peekStart = 0
+ }
+ return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
+ string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
+}
+
+func (iter *Iterator) readByte() (ret byte) {
+ if iter.head == iter.tail {
+ if iter.loadMore() {
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+ }
+ return 0
+ }
+ ret = iter.buf[iter.head]
+ iter.head++
+ return ret
+}
+
+func (iter *Iterator) loadMore() bool {
+ if iter.reader == nil {
+ if iter.Error == nil {
+ iter.head = iter.tail
+ iter.Error = io.EOF
+ }
+ return false
+ }
+ if iter.captured != nil {
+ iter.captured = append(iter.captured,
+ iter.buf[iter.captureStartedAt:iter.tail]...)
+ iter.captureStartedAt = 0
+ }
+ for {
+ n, err := iter.reader.Read(iter.buf)
+ if n == 0 {
+ if err != nil {
+ if iter.Error == nil {
+ iter.Error = err
+ }
+ return false
+ }
+ } else {
+ iter.head = 0
+ iter.tail = n
+ return true
+ }
+ }
+}
+
+func (iter *Iterator) unreadByte() {
+ if iter.Error != nil {
+ return
+ }
+ iter.head--
+ return
+}
+
+// Read read the next JSON element as generic interface{}.
+func (iter *Iterator) Read() interface{} {
+ valueType := iter.WhatIsNext()
+ switch valueType {
+ case StringValue:
+ return iter.ReadString()
+ case NumberValue:
+ if iter.cfg.configBeforeFrozen.UseNumber {
+ return json.Number(iter.readNumberAsString())
+ }
+ return iter.ReadFloat64()
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ return nil
+ case BoolValue:
+ return iter.ReadBool()
+ case ArrayValue:
+ arr := []interface{}{}
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ arr = append(arr, elem)
+ return true
+ })
+ return arr
+ case ObjectValue:
+ obj := map[string]interface{}{}
+ iter.ReadMapCB(func(Iter *Iterator, field string) bool {
+ var elem interface{}
+ iter.ReadVal(&elem)
+ obj[field] = elem
+ return true
+ })
+ return obj
+ default:
+ iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
+ return nil
+ }
+}
+
+// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
+const maxDepth = 10000
+
+func (iter *Iterator) incrementDepth() (success bool) {
+ iter.depth++
+ if iter.depth <= maxDepth {
+ return true
+ }
+ iter.ReportError("incrementDepth", "exceeded max depth")
+ return false
+}
+
+func (iter *Iterator) decrementDepth() (success bool) {
+ iter.depth--
+ if iter.depth >= 0 {
+ return true
+ }
+ iter.ReportError("decrementDepth", "unexpected negative nesting")
+ return false
+}
diff --git a/vendor/github.com/json-iterator/go/iter_array.go b/vendor/github.com/json-iterator/go/iter_array.go
new file mode 100644
index 000000000..204fe0e09
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_array.go
@@ -0,0 +1,64 @@
+package jsoniter
+
+// ReadArray read array element, tells if the array has more element to read.
+func (iter *Iterator) ReadArray() (ret bool) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false // null
+ case '[':
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ return true
+ }
+ return false
+ case ']':
+ return false
+ case ',':
+ return true
+ default:
+ iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
+ return
+ }
+}
+
+// ReadArrayCB read array with callback
+func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
+ c := iter.nextToken()
+ if c == '[' {
+ if !iter.incrementDepth() {
+ return false
+ }
+ c = iter.nextToken()
+ if c != ']' {
+ iter.unreadByte()
+ if !callback(iter) {
+ iter.decrementDepth()
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ if !callback(iter) {
+ iter.decrementDepth()
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != ']' {
+ iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
+ iter.decrementDepth()
+ return false
+ }
+ return iter.decrementDepth()
+ }
+ return iter.decrementDepth()
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
+ return false
+}
diff --git a/vendor/github.com/json-iterator/go/iter_float.go b/vendor/github.com/json-iterator/go/iter_float.go
new file mode 100644
index 000000000..8a3d8b6fb
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_float.go
@@ -0,0 +1,342 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "io"
+ "math/big"
+ "strconv"
+ "strings"
+ "unsafe"
+)
+
+var floatDigits []int8
+
+const invalidCharForNumber = int8(-1)
+const endOfNumber = int8(-2)
+const dotInNumber = int8(-3)
+
+func init() {
+ floatDigits = make([]int8, 256)
+ for i := 0; i < len(floatDigits); i++ {
+ floatDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ floatDigits[i] = i - int8('0')
+ }
+ floatDigits[','] = endOfNumber
+ floatDigits[']'] = endOfNumber
+ floatDigits['}'] = endOfNumber
+ floatDigits[' '] = endOfNumber
+ floatDigits['\t'] = endOfNumber
+ floatDigits['\n'] = endOfNumber
+ floatDigits['.'] = dotInNumber
+}
+
+// ReadBigFloat read big.Float
+func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ prec := 64
+ if len(str) > prec {
+ prec = len(str)
+ }
+ val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
+ if err != nil {
+ iter.Error = err
+ return nil
+ }
+ return val
+}
+
+// ReadBigInt read big.Int
+func (iter *Iterator) ReadBigInt() (ret *big.Int) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return nil
+ }
+ ret = big.NewInt(0)
+ var success bool
+ ret, success = ret.SetString(str, 10)
+ if !success {
+ iter.ReportError("ReadBigInt", "invalid big int")
+ return nil
+ }
+ return ret
+}
+
+//ReadFloat32 read float32
+func (iter *Iterator) ReadFloat32() (ret float32) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat32()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat32()
+}
+
+func (iter *Iterator) readPositiveFloat32() (ret float32) {
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c := iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat32", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat32", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat32", "leading zero is invalid")
+ return
+ }
+ }
+ value := uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat32SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float32(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat32SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float32(float64(value) / float64(pow10[decimalPlaces]))
+ }
+ // too many decimal places
+ return iter.readFloat32SlowPath()
+ case invalidCharForNumber, dotInNumber:
+ return iter.readFloat32SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat32SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ }
+ return iter.readFloat32SlowPath()
+}
+
+func (iter *Iterator) readNumberAsString() (ret string) {
+ strBuf := [16]byte{}
+ str := strBuf[0:0]
+load_loop:
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ str = append(str, c)
+ continue
+ default:
+ iter.head = i
+ break load_loop
+ }
+ }
+ if !iter.loadMore() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ if len(str) == 0 {
+ iter.ReportError("readNumberAsString", "invalid number")
+ }
+ return *(*string)(unsafe.Pointer(&str))
+}
+
+func (iter *Iterator) readFloat32SlowPath() (ret float32) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat32SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 32)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return float32(val)
+}
+
+// ReadFloat64 read float64
+func (iter *Iterator) ReadFloat64() (ret float64) {
+ c := iter.nextToken()
+ if c == '-' {
+ return -iter.readPositiveFloat64()
+ }
+ iter.unreadByte()
+ return iter.readPositiveFloat64()
+}
+
+func (iter *Iterator) readPositiveFloat64() (ret float64) {
+ i := iter.head
+ // first char
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c := iter.buf[i]
+ i++
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.ReportError("readFloat64", "empty number")
+ return
+ case dotInNumber:
+ iter.ReportError("readFloat64", "leading dot is invalid")
+ return
+ case 0:
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ c = iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.ReportError("readFloat64", "leading zero is invalid")
+ return
+ }
+ }
+ value := uint64(ind)
+ // chars before dot
+non_decimal_loop:
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case invalidCharForNumber:
+ return iter.readFloat64SlowPath()
+ case endOfNumber:
+ iter.head = i
+ return float64(value)
+ case dotInNumber:
+ break non_decimal_loop
+ }
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
+ }
+ // chars after dot
+ if c == '.' {
+ i++
+ decimalPlaces := 0
+ if i == iter.tail {
+ return iter.readFloat64SlowPath()
+ }
+ for ; i < iter.tail; i++ {
+ c = iter.buf[i]
+ ind := floatDigits[c]
+ switch ind {
+ case endOfNumber:
+ if decimalPlaces > 0 && decimalPlaces < len(pow10) {
+ iter.head = i
+ return float64(value) / float64(pow10[decimalPlaces])
+ }
+ // too many decimal places
+ return iter.readFloat64SlowPath()
+ case invalidCharForNumber, dotInNumber:
+ return iter.readFloat64SlowPath()
+ }
+ decimalPlaces++
+ if value > uint64SafeToMultiple10 {
+ return iter.readFloat64SlowPath()
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ if value > maxFloat64 {
+ return iter.readFloat64SlowPath()
+ }
+ }
+ }
+ return iter.readFloat64SlowPath()
+}
+
+func (iter *Iterator) readFloat64SlowPath() (ret float64) {
+ str := iter.readNumberAsString()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ errMsg := validateFloat(str)
+ if errMsg != "" {
+ iter.ReportError("readFloat64SlowPath", errMsg)
+ return
+ }
+ val, err := strconv.ParseFloat(str, 64)
+ if err != nil {
+ iter.Error = err
+ return
+ }
+ return val
+}
+
+func validateFloat(str string) string {
+ // strconv.ParseFloat is not validating `1.` or `1.e1`
+ if len(str) == 0 {
+ return "empty number"
+ }
+ if str[0] == '-' {
+ return "-- is not valid"
+ }
+ dotPos := strings.IndexByte(str, '.')
+ if dotPos != -1 {
+ if dotPos == len(str)-1 {
+ return "dot can not be last character"
+ }
+ switch str[dotPos+1] {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ return "missing digit after dot"
+ }
+ }
+ return ""
+}
+
+// ReadNumber read json.Number
+func (iter *Iterator) ReadNumber() (ret json.Number) {
+ return json.Number(iter.readNumberAsString())
+}
diff --git a/vendor/github.com/json-iterator/go/iter_int.go b/vendor/github.com/json-iterator/go/iter_int.go
new file mode 100644
index 000000000..d786a89fe
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_int.go
@@ -0,0 +1,346 @@
+package jsoniter
+
+import (
+ "math"
+ "strconv"
+)
+
+var intDigits []int8
+
+const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
+const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
+const maxFloat64 = 1<<53 - 1
+
+func init() {
+ intDigits = make([]int8, 256)
+ for i := 0; i < len(intDigits); i++ {
+ intDigits[i] = invalidCharForNumber
+ }
+ for i := int8('0'); i <= int8('9'); i++ {
+ intDigits[i] = i - int8('0')
+ }
+}
+
+// ReadUint read uint
+func (iter *Iterator) ReadUint() uint {
+ if strconv.IntSize == 32 {
+ return uint(iter.ReadUint32())
+ }
+ return uint(iter.ReadUint64())
+}
+
+// ReadInt read int
+func (iter *Iterator) ReadInt() int {
+ if strconv.IntSize == 32 {
+ return int(iter.ReadInt32())
+ }
+ return int(iter.ReadInt64())
+}
+
+// ReadInt8 read int8
+func (iter *Iterator) ReadInt8() (ret int8) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt8+1 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int8(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt8 {
+ iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int8(val)
+}
+
+// ReadUint8 read uint8
+func (iter *Iterator) ReadUint8() (ret uint8) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint8 {
+ iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint8(val)
+}
+
+// ReadInt16 read int16
+func (iter *Iterator) ReadInt16() (ret int16) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt16+1 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int16(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt16 {
+ iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int16(val)
+}
+
+// ReadUint16 read uint16
+func (iter *Iterator) ReadUint16() (ret uint16) {
+ val := iter.readUint32(iter.nextToken())
+ if val > math.MaxUint16 {
+ iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return uint16(val)
+}
+
+// ReadInt32 read int32
+func (iter *Iterator) ReadInt32() (ret int32) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint32(iter.readByte())
+ if val > math.MaxInt32+1 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return -int32(val)
+ }
+ val := iter.readUint32(c)
+ if val > math.MaxInt32 {
+ iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
+ return
+ }
+ return int32(val)
+}
+
+// ReadUint32 read uint32
+func (iter *Iterator) ReadUint32() (ret uint32) {
+ return iter.readUint32(iter.nextToken())
+}
+
+func (iter *Iterator) readUint32(c byte) (ret uint32) {
+ ind := intDigits[c]
+ if ind == 0 {
+ iter.assertInteger()
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint32(ind)
+ if iter.tail-iter.head > 10 {
+ i := iter.head
+ ind2 := intDigits[iter.buf[i]]
+ if ind2 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ i++
+ ind3 := intDigits[iter.buf[i]]
+ if ind3 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10 + uint32(ind2)
+ }
+ //iter.head = i + 1
+ //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
+ i++
+ ind4 := intDigits[iter.buf[i]]
+ if ind4 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100 + uint32(ind2)*10 + uint32(ind3)
+ }
+ i++
+ ind5 := intDigits[iter.buf[i]]
+ if ind5 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
+ }
+ i++
+ ind6 := intDigits[iter.buf[i]]
+ if ind6 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
+ }
+ i++
+ ind7 := intDigits[iter.buf[i]]
+ if ind7 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
+ }
+ i++
+ ind8 := intDigits[iter.buf[i]]
+ if ind8 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
+ }
+ i++
+ ind9 := intDigits[iter.buf[i]]
+ value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
+ iter.head = i
+ if ind9 == invalidCharForNumber {
+ iter.assertInteger()
+ return value
+ }
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ if value > uint32SafeToMultiply10 {
+ value2 := (value << 3) + (value << 1) + uint32(ind)
+ if value2 < value {
+ iter.ReportError("readUint32", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint32(ind)
+ }
+ if !iter.loadMore() {
+ iter.assertInteger()
+ return value
+ }
+ }
+}
+
+// ReadInt64 read int64
+func (iter *Iterator) ReadInt64() (ret int64) {
+ c := iter.nextToken()
+ if c == '-' {
+ val := iter.readUint64(iter.readByte())
+ if val > math.MaxInt64+1 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return -int64(val)
+ }
+ val := iter.readUint64(c)
+ if val > math.MaxInt64 {
+ iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
+ return
+ }
+ return int64(val)
+}
+
+// ReadUint64 read uint64
+func (iter *Iterator) ReadUint64() uint64 {
+ return iter.readUint64(iter.nextToken())
+}
+
+func (iter *Iterator) readUint64(c byte) (ret uint64) {
+ ind := intDigits[c]
+ if ind == 0 {
+ iter.assertInteger()
+ return 0 // single zero
+ }
+ if ind == invalidCharForNumber {
+ iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
+ return
+ }
+ value := uint64(ind)
+ if iter.tail-iter.head > 10 {
+ i := iter.head
+ ind2 := intDigits[iter.buf[i]]
+ if ind2 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ i++
+ ind3 := intDigits[iter.buf[i]]
+ if ind3 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10 + uint64(ind2)
+ }
+ //iter.head = i + 1
+ //value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
+ i++
+ ind4 := intDigits[iter.buf[i]]
+ if ind4 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100 + uint64(ind2)*10 + uint64(ind3)
+ }
+ i++
+ ind5 := intDigits[iter.buf[i]]
+ if ind5 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
+ }
+ i++
+ ind6 := intDigits[iter.buf[i]]
+ if ind6 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
+ }
+ i++
+ ind7 := intDigits[iter.buf[i]]
+ if ind7 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
+ }
+ i++
+ ind8 := intDigits[iter.buf[i]]
+ if ind8 == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
+ }
+ i++
+ ind9 := intDigits[iter.buf[i]]
+ value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
+ iter.head = i
+ if ind9 == invalidCharForNumber {
+ iter.assertInteger()
+ return value
+ }
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ ind = intDigits[iter.buf[i]]
+ if ind == invalidCharForNumber {
+ iter.head = i
+ iter.assertInteger()
+ return value
+ }
+ if value > uint64SafeToMultiple10 {
+ value2 := (value << 3) + (value << 1) + uint64(ind)
+ if value2 < value {
+ iter.ReportError("readUint64", "overflow")
+ return
+ }
+ value = value2
+ continue
+ }
+ value = (value << 3) + (value << 1) + uint64(ind)
+ }
+ if !iter.loadMore() {
+ iter.assertInteger()
+ return value
+ }
+ }
+}
+
+func (iter *Iterator) assertInteger() {
+ if iter.head < iter.tail && iter.buf[iter.head] == '.' {
+ iter.ReportError("assertInteger", "can not decode float as int")
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_object.go b/vendor/github.com/json-iterator/go/iter_object.go
new file mode 100644
index 000000000..58ee89c84
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_object.go
@@ -0,0 +1,267 @@
+package jsoniter
+
+import (
+ "fmt"
+ "strings"
+)
+
+// ReadObject read one field from object.
+// If object ended, returns empty string.
+// Otherwise, returns the field name.
+func (iter *Iterator) ReadObject() (ret string) {
+ c := iter.nextToken()
+ switch c {
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l')
+ return "" // null
+ case '{':
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ }
+ if c == '}' {
+ return "" // end of object
+ }
+ iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
+ return
+ case ',':
+ field := iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ return field
+ case '}':
+ return "" // end of object
+ default:
+ iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
+ return
+ }
+}
+
+// CaseInsensitive
+func (iter *Iterator) readFieldHash() int64 {
+ hash := int64(0x811c9dc5)
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
+ return 0
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ b := iter.buf[i]
+ if b == '\\' {
+ iter.head = i
+ for _, b := range iter.readStringSlowPath() {
+ if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return hash
+ }
+ if b == '"' {
+ iter.head = i + 1
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
+ return 0
+ }
+ return hash
+ }
+ if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
+ b += 'a' - 'A'
+ }
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ if !iter.loadMore() {
+ iter.ReportError("readFieldHash", `incomplete field name`)
+ return 0
+ }
+ }
+}
+
+func calcHash(str string, caseSensitive bool) int64 {
+ if !caseSensitive {
+ str = strings.ToLower(str)
+ }
+ hash := int64(0x811c9dc5)
+ for _, b := range []byte(str) {
+ hash ^= int64(b)
+ hash *= 0x1000193
+ }
+ return int64(hash)
+}
+
+// ReadObjectCB read object with callback, the key is ascii only and field name not copied
+func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ var field string
+ if c == '{' {
+ if !iter.incrementDepth() {
+ return false
+ }
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ if !callback(iter, field) {
+ iter.decrementDepth()
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ field = iter.ReadString()
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ if !callback(iter, field) {
+ iter.decrementDepth()
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadObjectCB", `object not ended with }`)
+ iter.decrementDepth()
+ return false
+ }
+ return iter.decrementDepth()
+ }
+ if c == '}' {
+ return iter.decrementDepth()
+ }
+ iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
+ iter.decrementDepth()
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+// ReadMapCB read map with callback, the key can be any string
+func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
+ c := iter.nextToken()
+ if c == '{' {
+ if !iter.incrementDepth() {
+ return false
+ }
+ c = iter.nextToken()
+ if c == '"' {
+ iter.unreadByte()
+ field := iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ iter.decrementDepth()
+ return false
+ }
+ if !callback(iter, field) {
+ iter.decrementDepth()
+ return false
+ }
+ c = iter.nextToken()
+ for c == ',' {
+ field = iter.ReadString()
+ if iter.nextToken() != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ iter.decrementDepth()
+ return false
+ }
+ if !callback(iter, field) {
+ iter.decrementDepth()
+ return false
+ }
+ c = iter.nextToken()
+ }
+ if c != '}' {
+ iter.ReportError("ReadMapCB", `object not ended with }`)
+ iter.decrementDepth()
+ return false
+ }
+ return iter.decrementDepth()
+ }
+ if c == '}' {
+ return iter.decrementDepth()
+ }
+ iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
+ iter.decrementDepth()
+ return false
+ }
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return true // null
+ }
+ iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectStart() bool {
+ c := iter.nextToken()
+ if c == '{' {
+ c = iter.nextToken()
+ if c == '}' {
+ return false
+ }
+ iter.unreadByte()
+ return true
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return false
+ }
+ iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
+ return false
+}
+
+func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
+ str := iter.ReadStringAsSlice()
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if iter.buf[iter.head] != ':' {
+ iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
+ return
+ }
+ iter.head++
+ if iter.skipWhitespacesWithoutLoadMore() {
+ if ret == nil {
+ ret = make([]byte, len(str))
+ copy(ret, str)
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+ if ret == nil {
+ return str
+ }
+ return ret
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip.go b/vendor/github.com/json-iterator/go/iter_skip.go
new file mode 100644
index 000000000..e91eefb15
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip.go
@@ -0,0 +1,130 @@
+package jsoniter
+
+import "fmt"
+
+// ReadNil reads a json object as nil and
+// returns whether it's a nil or not
+func (iter *Iterator) ReadNil() (ret bool) {
+ c := iter.nextToken()
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ return true
+ }
+ iter.unreadByte()
+ return false
+}
+
+// ReadBool reads a json object as BoolValue
+func (iter *Iterator) ReadBool() (ret bool) {
+ c := iter.nextToken()
+ if c == 't' {
+ iter.skipThreeBytes('r', 'u', 'e')
+ return true
+ }
+ if c == 'f' {
+ iter.skipFourBytes('a', 'l', 's', 'e')
+ return false
+ }
+ iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
+ return
+}
+
+// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
+// The []byte can be kept, it is a copy of data.
+func (iter *Iterator) SkipAndReturnBytes() []byte {
+ iter.startCapture(iter.head)
+ iter.Skip()
+ return iter.stopCapture()
+}
+
+// SkipAndAppendBytes skips next JSON element and appends its content to
+// buffer, returning the result.
+func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
+ iter.startCaptureTo(buf, iter.head)
+ iter.Skip()
+ return iter.stopCapture()
+}
+
+func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
+ if iter.captured != nil {
+ panic("already in capture mode")
+ }
+ iter.captureStartedAt = captureStartedAt
+ iter.captured = buf
+}
+
+func (iter *Iterator) startCapture(captureStartedAt int) {
+ iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
+}
+
+func (iter *Iterator) stopCapture() []byte {
+ if iter.captured == nil {
+ panic("not in capture mode")
+ }
+ captured := iter.captured
+ remaining := iter.buf[iter.captureStartedAt:iter.head]
+ iter.captureStartedAt = -1
+ iter.captured = nil
+ return append(captured, remaining...)
+}
+
+// Skip skips a json object and positions to relatively the next json object
+func (iter *Iterator) Skip() {
+ c := iter.nextToken()
+ switch c {
+ case '"':
+ iter.skipString()
+ case 'n':
+ iter.skipThreeBytes('u', 'l', 'l') // null
+ case 't':
+ iter.skipThreeBytes('r', 'u', 'e') // true
+ case 'f':
+ iter.skipFourBytes('a', 'l', 's', 'e') // false
+ case '0':
+ iter.unreadByte()
+ iter.ReadFloat32()
+ case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ iter.skipNumber()
+ case '[':
+ iter.skipArray()
+ case '{':
+ iter.skipObject()
+ default:
+ iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
+ return
+ }
+}
+
+func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+ if iter.readByte() != b4 {
+ iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
+ return
+ }
+}
+
+func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
+ if iter.readByte() != b1 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b2 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+ if iter.readByte() != b3 {
+ iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip_sloppy.go b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go
new file mode 100644
index 000000000..9303de41e
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip_sloppy.go
@@ -0,0 +1,163 @@
+//+build jsoniter_sloppy
+
+package jsoniter
+
+// sloppy but faster implementation, do not validate the input json
+
+func (iter *Iterator) skipNumber() {
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case ' ', '\n', '\r', '\t', ',', '}', ']':
+ iter.head = i
+ return
+ }
+ }
+ if !iter.loadMore() {
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipArray() {
+ level := 1
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '[': // If open symbol, increase level
+ level++
+ if !iter.incrementDepth() {
+ return
+ }
+ case ']': // If close symbol, increase level
+ level--
+ if !iter.decrementDepth() {
+ return
+ }
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete array")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipObject() {
+ level := 1
+ if !iter.incrementDepth() {
+ return
+ }
+
+ for {
+ for i := iter.head; i < iter.tail; i++ {
+ switch iter.buf[i] {
+ case '"': // If inside string, skip it
+ iter.head = i + 1
+ iter.skipString()
+ i = iter.head - 1 // it will be i++ soon
+ case '{': // If open symbol, increase level
+ level++
+ if !iter.incrementDepth() {
+ return
+ }
+ case '}': // If close symbol, increase level
+ level--
+ if !iter.decrementDepth() {
+ return
+ }
+
+ // If we have returned to the original level, we're done
+ if level == 0 {
+ iter.head = i + 1
+ return
+ }
+ }
+ }
+ if !iter.loadMore() {
+ iter.ReportError("skipObject", "incomplete object")
+ return
+ }
+ }
+}
+
+func (iter *Iterator) skipString() {
+ for {
+ end, escaped := iter.findStringEnd()
+ if end == -1 {
+ if !iter.loadMore() {
+ iter.ReportError("skipString", "incomplete string")
+ return
+ }
+ if escaped {
+ iter.head = 1 // skip the first char as last char read is \
+ }
+ } else {
+ iter.head = end
+ return
+ }
+ }
+}
+
+// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go
+// Tries to find the end of string
+// Support if string contains escaped quote symbols.
+func (iter *Iterator) findStringEnd() (int, bool) {
+ escaped := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ if !escaped {
+ return i + 1, false
+ }
+ j := i - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return i + 1, true
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+ }
+ } else if c == '\\' {
+ escaped = true
+ }
+ }
+ j := iter.tail - 1
+ for {
+ if j < iter.head || iter.buf[j] != '\\' {
+ // even number of backslashes
+ // either end of buffer, or " found
+ return -1, false // do not end with \
+ }
+ j--
+ if j < iter.head || iter.buf[j] != '\\' {
+ // odd number of backslashes
+ // it is \" or \\\"
+ break
+ }
+ j--
+
+ }
+ return -1, true // end with \
+}
diff --git a/vendor/github.com/json-iterator/go/iter_skip_strict.go b/vendor/github.com/json-iterator/go/iter_skip_strict.go
new file mode 100644
index 000000000..6cf66d043
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_skip_strict.go
@@ -0,0 +1,99 @@
+//+build !jsoniter_sloppy
+
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+)
+
+func (iter *Iterator) skipNumber() {
+ if !iter.trySkipNumber() {
+ iter.unreadByte()
+ if iter.Error != nil && iter.Error != io.EOF {
+ return
+ }
+ iter.ReadFloat64()
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = nil
+ iter.ReadBigFloat()
+ }
+ }
+}
+
+func (iter *Iterator) trySkipNumber() bool {
+ dotFound := false
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ case '.':
+ if dotFound {
+ iter.ReportError("validateNumber", `more than one dot found in number`)
+ return true // already failed
+ }
+ if i+1 == iter.tail {
+ return false
+ }
+ c = iter.buf[i+1]
+ switch c {
+ case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ default:
+ iter.ReportError("validateNumber", `missing digit after dot`)
+ return true // already failed
+ }
+ dotFound = true
+ default:
+ switch c {
+ case ',', ']', '}', ' ', '\t', '\n', '\r':
+ if iter.head == i {
+ return false // if - without following digits
+ }
+ iter.head = i
+ return true // must be valid
+ }
+ return false // may be invalid
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipString() {
+ if !iter.trySkipString() {
+ iter.unreadByte()
+ iter.ReadString()
+ }
+}
+
+func (iter *Iterator) trySkipString() bool {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ iter.head = i + 1
+ return true // valid
+ } else if c == '\\' {
+ return false
+ } else if c < ' ' {
+ iter.ReportError("trySkipString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return true // already failed
+ }
+ }
+ return false
+}
+
+func (iter *Iterator) skipObject() {
+ iter.unreadByte()
+ iter.ReadObjectCB(func(iter *Iterator, field string) bool {
+ iter.Skip()
+ return true
+ })
+}
+
+func (iter *Iterator) skipArray() {
+ iter.unreadByte()
+ iter.ReadArrayCB(func(iter *Iterator) bool {
+ iter.Skip()
+ return true
+ })
+}
diff --git a/vendor/github.com/json-iterator/go/iter_str.go b/vendor/github.com/json-iterator/go/iter_str.go
new file mode 100644
index 000000000..adc487ea8
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/iter_str.go
@@ -0,0 +1,215 @@
+package jsoniter
+
+import (
+ "fmt"
+ "unicode/utf16"
+)
+
+// ReadString read string from iterator
+func (iter *Iterator) ReadString() (ret string) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ c := iter.buf[i]
+ if c == '"' {
+ ret = string(iter.buf[iter.head:i])
+ iter.head = i + 1
+ return ret
+ } else if c == '\\' {
+ break
+ } else if c < ' ' {
+ iter.ReportError("ReadString",
+ fmt.Sprintf(`invalid control character found: %d`, c))
+ return
+ }
+ }
+ return iter.readStringSlowPath()
+ } else if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return ""
+ }
+ iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readStringSlowPath() (ret string) {
+ var str []byte
+ var c byte
+ for iter.Error == nil {
+ c = iter.readByte()
+ if c == '"' {
+ return string(str)
+ }
+ if c == '\\' {
+ c = iter.readByte()
+ str = iter.readEscapedChar(c, str)
+ } else {
+ str = append(str, c)
+ }
+ }
+ iter.ReportError("readStringSlowPath", "unexpected end of input")
+ return
+}
+
+func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
+ switch c {
+ case 'u':
+ r := iter.readU4()
+ if utf16.IsSurrogate(r) {
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != '\\' {
+ iter.unreadByte()
+ str = appendRune(str, r)
+ return str
+ }
+ c = iter.readByte()
+ if iter.Error != nil {
+ return nil
+ }
+ if c != 'u' {
+ str = appendRune(str, r)
+ return iter.readEscapedChar(c, str)
+ }
+ r2 := iter.readU4()
+ if iter.Error != nil {
+ return nil
+ }
+ combined := utf16.DecodeRune(r, r2)
+ if combined == '\uFFFD' {
+ str = appendRune(str, r)
+ str = appendRune(str, r2)
+ } else {
+ str = appendRune(str, combined)
+ }
+ } else {
+ str = appendRune(str, r)
+ }
+ case '"':
+ str = append(str, '"')
+ case '\\':
+ str = append(str, '\\')
+ case '/':
+ str = append(str, '/')
+ case 'b':
+ str = append(str, '\b')
+ case 'f':
+ str = append(str, '\f')
+ case 'n':
+ str = append(str, '\n')
+ case 'r':
+ str = append(str, '\r')
+ case 't':
+ str = append(str, '\t')
+ default:
+ iter.ReportError("readEscapedChar",
+ `invalid escape char after \`)
+ return nil
+ }
+ return str
+}
+
+// ReadStringAsSlice read string from iterator without copying into string form.
+// The []byte can not be kept, as it will change after next iterator call.
+func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
+ c := iter.nextToken()
+ if c == '"' {
+ for i := iter.head; i < iter.tail; i++ {
+ // require ascii string and no escape
+ // for: field name, base64, number
+ if iter.buf[i] == '"' {
+ // fast path: reuse the underlying buffer
+ ret = iter.buf[iter.head:i]
+ iter.head = i + 1
+ return ret
+ }
+ }
+ readLen := iter.tail - iter.head
+ copied := make([]byte, readLen, readLen*2)
+ copy(copied, iter.buf[iter.head:iter.tail])
+ iter.head = iter.tail
+ for iter.Error == nil {
+ c := iter.readByte()
+ if c == '"' {
+ return copied
+ }
+ copied = append(copied, c)
+ }
+ return copied
+ }
+ iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
+ return
+}
+
+func (iter *Iterator) readU4() (ret rune) {
+ for i := 0; i < 4; i++ {
+ c := iter.readByte()
+ if iter.Error != nil {
+ return
+ }
+ if c >= '0' && c <= '9' {
+ ret = ret*16 + rune(c-'0')
+ } else if c >= 'a' && c <= 'f' {
+ ret = ret*16 + rune(c-'a'+10)
+ } else if c >= 'A' && c <= 'F' {
+ ret = ret*16 + rune(c-'A'+10)
+ } else {
+ iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
+ return
+ }
+ }
+ return ret
+}
+
+const (
+ t1 = 0x00 // 0000 0000
+ tx = 0x80 // 1000 0000
+ t2 = 0xC0 // 1100 0000
+ t3 = 0xE0 // 1110 0000
+ t4 = 0xF0 // 1111 0000
+ t5 = 0xF8 // 1111 1000
+
+ maskx = 0x3F // 0011 1111
+ mask2 = 0x1F // 0001 1111
+ mask3 = 0x0F // 0000 1111
+ mask4 = 0x07 // 0000 0111
+
+ rune1Max = 1<<7 - 1
+ rune2Max = 1<<11 - 1
+ rune3Max = 1<<16 - 1
+
+ surrogateMin = 0xD800
+ surrogateMax = 0xDFFF
+
+ maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
+ runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
+)
+
+func appendRune(p []byte, r rune) []byte {
+ // Negative values are erroneous. Making it unsigned addresses the problem.
+ switch i := uint32(r); {
+ case i <= rune1Max:
+ p = append(p, byte(r))
+ return p
+ case i <= rune2Max:
+ p = append(p, t2|byte(r>>6))
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ case i > maxRune, surrogateMin <= i && i <= surrogateMax:
+ r = runeError
+ fallthrough
+ case i <= rune3Max:
+ p = append(p, t3|byte(r>>12))
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ default:
+ p = append(p, t4|byte(r>>18))
+ p = append(p, tx|byte(r>>12)&maskx)
+ p = append(p, tx|byte(r>>6)&maskx)
+ p = append(p, tx|byte(r)&maskx)
+ return p
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/jsoniter.go b/vendor/github.com/json-iterator/go/jsoniter.go
new file mode 100644
index 000000000..c2934f916
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/jsoniter.go
@@ -0,0 +1,18 @@
+// Package jsoniter implements encoding and decoding of JSON as defined in
+// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
+// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
+// and variable type declarations (if any).
+// jsoniter interfaces gives 100% compatibility with code using standard lib.
+//
+// "JSON and Go"
+// (https://golang.org/doc/articles/json_and_go.html)
+// gives a description of how Marshal/Unmarshal operate
+// between arbitrary or predefined json objects and bytes,
+// and it applies to jsoniter.Marshal/Unmarshal as well.
+//
+// Besides, jsoniter.Iterator provides a different set of interfaces
+// iterating given bytes/string/reader
+// and yielding parsed elements one by one.
+// This set of interfaces reads input as required and gives
+// better performance.
+package jsoniter
diff --git a/vendor/github.com/json-iterator/go/pool.go b/vendor/github.com/json-iterator/go/pool.go
new file mode 100644
index 000000000..e2389b56c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/pool.go
@@ -0,0 +1,42 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// IteratorPool a thread safe pool of iterators with same configuration
+type IteratorPool interface {
+ BorrowIterator(data []byte) *Iterator
+ ReturnIterator(iter *Iterator)
+}
+
+// StreamPool a thread safe pool of streams with same configuration
+type StreamPool interface {
+ BorrowStream(writer io.Writer) *Stream
+ ReturnStream(stream *Stream)
+}
+
+func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
+ stream := cfg.streamPool.Get().(*Stream)
+ stream.Reset(writer)
+ return stream
+}
+
+func (cfg *frozenConfig) ReturnStream(stream *Stream) {
+ stream.out = nil
+ stream.Error = nil
+ stream.Attachment = nil
+ cfg.streamPool.Put(stream)
+}
+
+func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
+ iter := cfg.iteratorPool.Get().(*Iterator)
+ iter.ResetBytes(data)
+ return iter
+}
+
+func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
+ iter.Error = nil
+ iter.Attachment = nil
+ cfg.iteratorPool.Put(iter)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect.go b/vendor/github.com/json-iterator/go/reflect.go
new file mode 100644
index 000000000..39acb320a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect.go
@@ -0,0 +1,337 @@
+package jsoniter
+
+import (
+ "fmt"
+ "reflect"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+// ValDecoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValDecoder with json.Decoder.
+// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
+//
+// Reflection on type to create decoders, which is then cached
+// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
+// 1. create instance of new value, for example *int will need a int to be allocated
+// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
+// 3. assignment to map, both key and value will be reflect.Value
+// For a simple struct binding, it will be reflect.Value free and allocation free
+type ValDecoder interface {
+ Decode(ptr unsafe.Pointer, iter *Iterator)
+}
+
+// ValEncoder is an internal type registered to cache as needed.
+// Don't confuse jsoniter.ValEncoder with json.Encoder.
+// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
+type ValEncoder interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+ Encode(ptr unsafe.Pointer, stream *Stream)
+}
+
+type checkIsEmpty interface {
+ IsEmpty(ptr unsafe.Pointer) bool
+}
+
+type ctx struct {
+ *frozenConfig
+ prefix string
+ encoders map[reflect2.Type]ValEncoder
+ decoders map[reflect2.Type]ValDecoder
+}
+
+func (b *ctx) caseSensitive() bool {
+ if b.frozenConfig == nil {
+ // default is case-insensitive
+ return false
+ }
+ return b.frozenConfig.caseSensitive
+}
+
+func (b *ctx) append(prefix string) *ctx {
+ return &ctx{
+ frozenConfig: b.frozenConfig,
+ prefix: b.prefix + " " + prefix,
+ encoders: b.encoders,
+ decoders: b.decoders,
+ }
+}
+
+// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
+func (iter *Iterator) ReadVal(obj interface{}) {
+ depth := iter.depth
+ cacheKey := reflect2.RTypeOf(obj)
+ decoder := iter.cfg.getDecoderFromCache(cacheKey)
+ if decoder == nil {
+ typ := reflect2.TypeOf(obj)
+ if typ == nil || typ.Kind() != reflect.Ptr {
+ iter.ReportError("ReadVal", "can only unmarshal into pointer")
+ return
+ }
+ decoder = iter.cfg.DecoderOf(typ)
+ }
+ ptr := reflect2.PtrOf(obj)
+ if ptr == nil {
+ iter.ReportError("ReadVal", "can not read into nil pointer")
+ return
+ }
+ decoder.Decode(ptr, iter)
+ if iter.depth != depth {
+ iter.ReportError("ReadVal", "unexpected mismatched nesting")
+ return
+ }
+}
+
+// WriteVal copy the go interface into underlying JSON, same as json.Marshal
+func (stream *Stream) WriteVal(val interface{}) {
+ if nil == val {
+ stream.WriteNil()
+ return
+ }
+ cacheKey := reflect2.RTypeOf(val)
+ encoder := stream.cfg.getEncoderFromCache(cacheKey)
+ if encoder == nil {
+ typ := reflect2.TypeOf(val)
+ encoder = stream.cfg.EncoderOf(typ)
+ }
+ encoder.Encode(reflect2.PtrOf(val), stream)
+}
+
+func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder {
+ cacheKey := typ.RType()
+ decoder := cfg.getDecoderFromCache(cacheKey)
+ if decoder != nil {
+ return decoder
+ }
+ ctx := &ctx{
+ frozenConfig: cfg,
+ prefix: "",
+ decoders: map[reflect2.Type]ValDecoder{},
+ encoders: map[reflect2.Type]ValEncoder{},
+ }
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ decoder = decoderOfType(ctx, ptrType.Elem())
+ cfg.addDecoderToCache(cacheKey, decoder)
+ return decoder
+}
+
+func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := getTypeDecoderFromExtension(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfType(ctx, typ)
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
+ for _, extension := range ctx.extraExtensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ return decoder
+}
+
+func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := ctx.decoders[typ]
+ if decoder != nil {
+ return decoder
+ }
+ placeholder := &placeholderDecoder{}
+ ctx.decoders[typ] = placeholder
+ decoder = _createDecoderOfType(ctx, typ)
+ placeholder.decoder = decoder
+ return decoder
+}
+
+func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := createDecoderOfJsonRawMessage(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfJsonNumber(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfMarshaler(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfAny(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ decoder = createDecoderOfNative(ctx, typ)
+ if decoder != nil {
+ return decoder
+ }
+ switch typ.Kind() {
+ case reflect.Interface:
+ ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType)
+ if isIFace {
+ return &ifaceDecoder{valType: ifaceType}
+ }
+ return &efaceDecoder{}
+ case reflect.Struct:
+ return decoderOfStruct(ctx, typ)
+ case reflect.Array:
+ return decoderOfArray(ctx, typ)
+ case reflect.Slice:
+ return decoderOfSlice(ctx, typ)
+ case reflect.Map:
+ return decoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return decoderOfOptional(ctx, typ)
+ default:
+ return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
+ }
+}
+
+func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder {
+ cacheKey := typ.RType()
+ encoder := cfg.getEncoderFromCache(cacheKey)
+ if encoder != nil {
+ return encoder
+ }
+ ctx := &ctx{
+ frozenConfig: cfg,
+ prefix: "",
+ decoders: map[reflect2.Type]ValDecoder{},
+ encoders: map[reflect2.Type]ValEncoder{},
+ }
+ encoder = encoderOfType(ctx, typ)
+ if typ.LikePtr() {
+ encoder = &onePtrEncoder{encoder}
+ }
+ cfg.addEncoderToCache(cacheKey, encoder)
+ return encoder
+}
+
+type onePtrEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
+}
+
+func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
+}
+
+func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := getTypeEncoderFromExtension(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfType(ctx, typ)
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
+ for _, extension := range ctx.extraExtensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ return encoder
+}
+
+func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := ctx.encoders[typ]
+ if encoder != nil {
+ return encoder
+ }
+ placeholder := &placeholderEncoder{}
+ ctx.encoders[typ] = placeholder
+ encoder = _createEncoderOfType(ctx, typ)
+ placeholder.encoder = encoder
+ return encoder
+}
+func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := createEncoderOfJsonRawMessage(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfJsonNumber(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfMarshaler(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfAny(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ encoder = createEncoderOfNative(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ kind := typ.Kind()
+ switch kind {
+ case reflect.Interface:
+ return &dynamicEncoder{typ}
+ case reflect.Struct:
+ return encoderOfStruct(ctx, typ)
+ case reflect.Array:
+ return encoderOfArray(ctx, typ)
+ case reflect.Slice:
+ return encoderOfSlice(ctx, typ)
+ case reflect.Map:
+ return encoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return encoderOfOptional(ctx, typ)
+ default:
+ return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
+ }
+}
+
+type lazyErrorDecoder struct {
+ err error
+}
+
+func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.WhatIsNext() != NilValue {
+ if iter.Error == nil {
+ iter.Error = decoder.err
+ }
+ } else {
+ iter.Skip()
+ }
+}
+
+type lazyErrorEncoder struct {
+ err error
+}
+
+func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if ptr == nil {
+ stream.WriteNil()
+ } else if stream.Error == nil {
+ stream.Error = encoder.err
+ }
+}
+
+func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type placeholderDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.decoder.Decode(ptr, iter)
+}
+
+type placeholderEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(ptr, stream)
+}
+
+func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(ptr)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_array.go b/vendor/github.com/json-iterator/go/reflect_array.go
new file mode 100644
index 000000000..13a0b7b08
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_array.go
@@ -0,0 +1,104 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "unsafe"
+)
+
+func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
+ arrayType := typ.(*reflect2.UnsafeArrayType)
+ decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
+ return &arrayDecoder{arrayType, decoder}
+}
+
+func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder {
+ arrayType := typ.(*reflect2.UnsafeArrayType)
+ if arrayType.Len() == 0 {
+ return emptyArrayEncoder{}
+ }
+ encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
+ return &arrayEncoder{arrayType, encoder}
+}
+
+type emptyArrayEncoder struct{}
+
+func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteEmptyArray()
+}
+
+func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return true
+}
+
+type arrayEncoder struct {
+ arrayType *reflect2.UnsafeArrayType
+ elemEncoder ValEncoder
+}
+
+func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteArrayStart()
+ elemPtr := unsafe.Pointer(ptr)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ for i := 1; i < encoder.arrayType.Len(); i++ {
+ stream.WriteMore()
+ elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
+ }
+}
+
+func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type arrayDecoder struct {
+ arrayType *reflect2.UnsafeArrayType
+ elemDecoder ValDecoder
+}
+
+func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
+ }
+}
+
+func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ arrayType := decoder.arrayType
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ return
+ }
+ if c != '[' {
+ iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == ']' {
+ return
+ }
+ iter.unreadByte()
+ elemPtr := arrayType.UnsafeGetIndex(ptr, 0)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ length := 1
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ if length >= arrayType.Len() {
+ iter.Skip()
+ continue
+ }
+ idx := length
+ length += 1
+ elemPtr = arrayType.UnsafeGetIndex(ptr, idx)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ }
+ if c != ']' {
+ iter.ReportError("decode array", "expect ], but found "+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_dynamic.go b/vendor/github.com/json-iterator/go/reflect_dynamic.go
new file mode 100644
index 000000000..8b6bc8b43
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_dynamic.go
@@ -0,0 +1,70 @@
+package jsoniter
+
+import (
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "unsafe"
+)
+
+type dynamicEncoder struct {
+ valType reflect2.Type
+}
+
+func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ stream.WriteVal(obj)
+}
+
+func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.valType.UnsafeIndirect(ptr) == nil
+}
+
+type efaceDecoder struct {
+}
+
+func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ pObj := (*interface{})(ptr)
+ obj := *pObj
+ if obj == nil {
+ *pObj = iter.Read()
+ return
+ }
+ typ := reflect2.TypeOf(obj)
+ if typ.Kind() != reflect.Ptr {
+ *pObj = iter.Read()
+ return
+ }
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ ptrElemType := ptrType.Elem()
+ if iter.WhatIsNext() == NilValue {
+ if ptrElemType.Kind() != reflect.Ptr {
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *pObj = nil
+ return
+ }
+ }
+ if reflect2.IsNil(obj) {
+ obj := ptrElemType.New()
+ iter.ReadVal(obj)
+ *pObj = obj
+ return
+ }
+ iter.ReadVal(obj)
+}
+
+type ifaceDecoder struct {
+ valType *reflect2.UnsafeIFaceType
+}
+
+func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew())
+ return
+ }
+ obj := decoder.valType.UnsafeIndirect(ptr)
+ if reflect2.IsNil(obj) {
+ iter.ReportError("decode non empty interface", "can not unmarshal into nil")
+ return
+ }
+ iter.ReadVal(obj)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_extension.go b/vendor/github.com/json-iterator/go/reflect_extension.go
new file mode 100644
index 000000000..74a97bfe5
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_extension.go
@@ -0,0 +1,483 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "reflect"
+ "sort"
+ "strings"
+ "unicode"
+ "unsafe"
+)
+
+var typeDecoders = map[string]ValDecoder{}
+var fieldDecoders = map[string]ValDecoder{}
+var typeEncoders = map[string]ValEncoder{}
+var fieldEncoders = map[string]ValEncoder{}
+var extensions = []Extension{}
+
+// StructDescriptor describe how should we encode/decode the struct
+type StructDescriptor struct {
+ Type reflect2.Type
+ Fields []*Binding
+}
+
+// GetField get one field from the descriptor by its name.
+// Can not use map here to keep field orders.
+func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
+ for _, binding := range structDescriptor.Fields {
+ if binding.Field.Name() == fieldName {
+ return binding
+ }
+ }
+ return nil
+}
+
+// Binding describe how should we encode/decode the struct field
+type Binding struct {
+ levels []int
+ Field reflect2.StructField
+ FromNames []string
+ ToNames []string
+ Encoder ValEncoder
+ Decoder ValDecoder
+}
+
+// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder.
+// Can also rename fields by UpdateStructDescriptor.
+type Extension interface {
+ UpdateStructDescriptor(structDescriptor *StructDescriptor)
+ CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
+ CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
+ CreateDecoder(typ reflect2.Type) ValDecoder
+ CreateEncoder(typ reflect2.Type) ValEncoder
+ DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
+ DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder
+}
+
+// DummyExtension embed this type get dummy implementation for all methods of Extension
+type DummyExtension struct {
+}
+
+// UpdateStructDescriptor No-op
+func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateMapKeyDecoder No-op
+func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// CreateDecoder No-op
+func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateEncoder No-op
+func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type EncoderExtension map[reflect2.Type]ValEncoder
+
+// UpdateStructDescriptor No-op
+func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateDecoder No-op
+func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateEncoder get encoder from map
+func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return extension[typ]
+}
+
+// CreateMapKeyDecoder No-op
+func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type DecoderExtension map[reflect2.Type]ValDecoder
+
+// UpdateStructDescriptor No-op
+func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
+}
+
+// CreateMapKeyDecoder No-op
+func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
+ return nil
+}
+
+// CreateMapKeyEncoder No-op
+func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// CreateDecoder get decoder from map
+func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
+ return extension[typ]
+}
+
+// CreateEncoder No-op
+func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
+ return nil
+}
+
+// DecorateDecoder No-op
+func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
+ return decoder
+}
+
+// DecorateEncoder No-op
+func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
+ return encoder
+}
+
+type funcDecoder struct {
+ fun DecoderFunc
+}
+
+func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.fun(ptr, iter)
+}
+
+type funcEncoder struct {
+ fun EncoderFunc
+ isEmptyFunc func(ptr unsafe.Pointer) bool
+}
+
+func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.fun(ptr, stream)
+}
+
+func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ if encoder.isEmptyFunc == nil {
+ return false
+ }
+ return encoder.isEmptyFunc(ptr)
+}
+
+// DecoderFunc the function form of TypeDecoder
+type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
+
+// EncoderFunc the function form of TypeEncoder
+type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
+
+// RegisterTypeDecoderFunc register TypeDecoder for a type with function
+func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
+ typeDecoders[typ] = &funcDecoder{fun}
+}
+
+// RegisterTypeDecoder register TypeDecoder for a typ
+func RegisterTypeDecoder(typ string, decoder ValDecoder) {
+ typeDecoders[typ] = decoder
+}
+
+// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function
+func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
+ RegisterFieldDecoder(typ, field, &funcDecoder{fun})
+}
+
+// RegisterFieldDecoder register TypeDecoder for a struct field
+func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
+ fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
+}
+
+// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function
+func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
+}
+
+// RegisterTypeEncoder register TypeEncoder for a type
+func RegisterTypeEncoder(typ string, encoder ValEncoder) {
+ typeEncoders[typ] = encoder
+}
+
+// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function
+func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
+ RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
+}
+
+// RegisterFieldEncoder register TypeEncoder for a struct field
+func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
+ fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
+}
+
+// RegisterExtension register extension
+func RegisterExtension(extension Extension) {
+ extensions = append(extensions, extension)
+}
+
+func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := _getTypeDecoderFromExtension(ctx, typ)
+ if decoder != nil {
+ for _, extension := range extensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
+ for _, extension := range ctx.extraExtensions {
+ decoder = extension.DecorateDecoder(typ, decoder)
+ }
+ }
+ return decoder
+}
+func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
+ for _, extension := range extensions {
+ decoder := extension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ decoder := ctx.decoderExtension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ decoder := extension.CreateDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+ typeName := typ.String()
+ decoder = typeDecoders[typeName]
+ if decoder != nil {
+ return decoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ decoder := typeDecoders[ptrType.Elem().String()]
+ if decoder != nil {
+ return &OptionalDecoder{ptrType.Elem(), decoder}
+ }
+ }
+ return nil
+}
+
+func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := _getTypeEncoderFromExtension(ctx, typ)
+ if encoder != nil {
+ for _, extension := range extensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
+ for _, extension := range ctx.extraExtensions {
+ encoder = extension.DecorateEncoder(typ, encoder)
+ }
+ }
+ return encoder
+}
+
+func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
+ for _, extension := range extensions {
+ encoder := extension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ encoder := ctx.encoderExtension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ encoder := extension.CreateEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+ typeName := typ.String()
+ encoder = typeEncoders[typeName]
+ if encoder != nil {
+ return encoder
+ }
+ if typ.Kind() == reflect.Ptr {
+ typePtr := typ.(*reflect2.UnsafePtrType)
+ encoder := typeEncoders[typePtr.Elem().String()]
+ if encoder != nil {
+ return &OptionalEncoder{encoder}
+ }
+ }
+ return nil
+}
+
+func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
+ structType := typ.(*reflect2.UnsafeStructType)
+ embeddedBindings := []*Binding{}
+ bindings := []*Binding{}
+ for i := 0; i < structType.NumField(); i++ {
+ field := structType.Field(i)
+ tag, hastag := field.Tag().Lookup(ctx.getTagKey())
+ if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
+ continue
+ }
+ if tag == "-" || field.Name() == "_" {
+ continue
+ }
+ tagParts := strings.Split(tag, ",")
+ if field.Anonymous() && (tag == "" || tagParts[0] == "") {
+ if field.Type().Kind() == reflect.Struct {
+ structDescriptor := describeStruct(ctx, field.Type())
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
+ binding.Decoder = &structFieldDecoder{field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ } else if field.Type().Kind() == reflect.Ptr {
+ ptrType := field.Type().(*reflect2.UnsafePtrType)
+ if ptrType.Elem().Kind() == reflect.Struct {
+ structDescriptor := describeStruct(ctx, ptrType.Elem())
+ for _, binding := range structDescriptor.Fields {
+ binding.levels = append([]int{i}, binding.levels...)
+ omitempty := binding.Encoder.(*structFieldEncoder).omitempty
+ binding.Encoder = &dereferenceEncoder{binding.Encoder}
+ binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
+ binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder}
+ binding.Decoder = &structFieldDecoder{field, binding.Decoder}
+ embeddedBindings = append(embeddedBindings, binding)
+ }
+ continue
+ }
+ }
+ }
+ fieldNames := calcFieldNames(field.Name(), tagParts[0], tag)
+ fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name())
+ decoder := fieldDecoders[fieldCacheKey]
+ if decoder == nil {
+ decoder = decoderOfType(ctx.append(field.Name()), field.Type())
+ }
+ encoder := fieldEncoders[fieldCacheKey]
+ if encoder == nil {
+ encoder = encoderOfType(ctx.append(field.Name()), field.Type())
+ }
+ binding := &Binding{
+ Field: field,
+ FromNames: fieldNames,
+ ToNames: fieldNames,
+ Decoder: decoder,
+ Encoder: encoder,
+ }
+ binding.levels = []int{i}
+ bindings = append(bindings, binding)
+ }
+ return createStructDescriptor(ctx, typ, bindings, embeddedBindings)
+}
+func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
+ structDescriptor := &StructDescriptor{
+ Type: typ,
+ Fields: bindings,
+ }
+ for _, extension := range extensions {
+ extension.UpdateStructDescriptor(structDescriptor)
+ }
+ ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
+ ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
+ for _, extension := range ctx.extraExtensions {
+ extension.UpdateStructDescriptor(structDescriptor)
+ }
+ processTags(structDescriptor, ctx.frozenConfig)
+ // merge normal & embedded bindings & sort with original order
+ allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))
+ sort.Sort(allBindings)
+ structDescriptor.Fields = allBindings
+ return structDescriptor
+}
+
+type sortableBindings []*Binding
+
+func (bindings sortableBindings) Len() int {
+ return len(bindings)
+}
+
+func (bindings sortableBindings) Less(i, j int) bool {
+ left := bindings[i].levels
+ right := bindings[j].levels
+ k := 0
+ for {
+ if left[k] < right[k] {
+ return true
+ } else if left[k] > right[k] {
+ return false
+ }
+ k++
+ }
+}
+
+func (bindings sortableBindings) Swap(i, j int) {
+ bindings[i], bindings[j] = bindings[j], bindings[i]
+}
+
+func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) {
+ for _, binding := range structDescriptor.Fields {
+ shouldOmitEmpty := false
+ tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",")
+ for _, tagPart := range tagParts[1:] {
+ if tagPart == "omitempty" {
+ shouldOmitEmpty = true
+ } else if tagPart == "string" {
+ if binding.Field.Type().Kind() == reflect.String {
+ binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg}
+ binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg}
+ } else {
+ binding.Decoder = &stringModeNumberDecoder{binding.Decoder}
+ binding.Encoder = &stringModeNumberEncoder{binding.Encoder}
+ }
+ }
+ }
+ binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
+ binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
+ }
+}
+
+func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
+ // ignore?
+ if wholeTag == "-" {
+ return []string{}
+ }
+ // rename?
+ var fieldNames []string
+ if tagProvidedFieldName == "" {
+ fieldNames = []string{originalFieldName}
+ } else {
+ fieldNames = []string{tagProvidedFieldName}
+ }
+ // private?
+ isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
+ if isNotExported {
+ fieldNames = []string{}
+ }
+ return fieldNames
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_json_number.go b/vendor/github.com/json-iterator/go/reflect_json_number.go
new file mode 100644
index 000000000..98d45c1ec
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_json_number.go
@@ -0,0 +1,112 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "strconv"
+ "unsafe"
+)
+
+type Number string
+
+// String returns the literal text of the number.
+func (n Number) String() string { return string(n) }
+
+// Float64 returns the number as a float64.
+func (n Number) Float64() (float64, error) {
+ return strconv.ParseFloat(string(n), 64)
+}
+
+// Int64 returns the number as an int64.
+func (n Number) Int64() (int64, error) {
+ return strconv.ParseInt(string(n), 10, 64)
+}
+
+func CastJsonNumber(val interface{}) (string, bool) {
+ switch typedVal := val.(type) {
+ case json.Number:
+ return string(typedVal), true
+ case Number:
+ return string(typedVal), true
+ }
+ return "", false
+}
+
+var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem()
+var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem()
+
+func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}
+ }
+ return nil
+}
+
+func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ.AssignableTo(jsonNumberType) {
+ return &jsonNumberCodec{}
+ }
+ if typ.AssignableTo(jsoniterNumberType) {
+ return &jsoniterNumberCodec{}
+ }
+ return nil
+}
+
+type jsonNumberCodec struct {
+}
+
+func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*json.Number)(ptr)) = json.Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*json.Number)(ptr)) = ""
+ default:
+ *((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ number := *((*json.Number)(ptr))
+ if len(number) == 0 {
+ stream.writeByte('0')
+ } else {
+ stream.WriteRaw(string(number))
+ }
+}
+
+func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.Number)(ptr))) == 0
+}
+
+type jsoniterNumberCodec struct {
+}
+
+func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ switch iter.WhatIsNext() {
+ case StringValue:
+ *((*Number)(ptr)) = Number(iter.ReadString())
+ case NilValue:
+ iter.skipFourBytes('n', 'u', 'l', 'l')
+ *((*Number)(ptr)) = ""
+ default:
+ *((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
+ }
+}
+
+func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ number := *((*Number)(ptr))
+ if len(number) == 0 {
+ stream.writeByte('0')
+ } else {
+ stream.WriteRaw(string(number))
+ }
+}
+
+func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*Number)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_json_raw_message.go b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go
new file mode 100644
index 000000000..eba434f2f
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_json_raw_message.go
@@ -0,0 +1,76 @@
+package jsoniter
+
+import (
+ "encoding/json"
+ "github.com/modern-go/reflect2"
+ "unsafe"
+)
+
+var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
+var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()
+
+func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}
+ }
+ return nil
+}
+
+func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ == jsonRawMessageType {
+ return &jsonRawMessageCodec{}
+ }
+ if typ == jsoniterRawMessageType {
+ return &jsoniterRawMessageCodec{}
+ }
+ return nil
+}
+
+type jsonRawMessageCodec struct {
+}
+
+func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ *((*json.RawMessage)(ptr)) = nil
+ } else {
+ *((*json.RawMessage)(ptr)) = iter.SkipAndReturnBytes()
+ }
+}
+
+func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*json.RawMessage)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
+ }
+}
+
+func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*json.RawMessage)(ptr))) == 0
+}
+
+type jsoniterRawMessageCodec struct {
+}
+
+func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ *((*RawMessage)(ptr)) = nil
+ } else {
+ *((*RawMessage)(ptr)) = iter.SkipAndReturnBytes()
+ }
+}
+
+func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*RawMessage)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ stream.WriteRaw(string(*((*RawMessage)(ptr))))
+ }
+}
+
+func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*RawMessage)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_map.go b/vendor/github.com/json-iterator/go/reflect_map.go
new file mode 100644
index 000000000..582967130
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_map.go
@@ -0,0 +1,346 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "sort"
+ "unsafe"
+)
+
+func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
+ mapType := typ.(*reflect2.UnsafeMapType)
+ keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key())
+ elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem())
+ return &mapDecoder{
+ mapType: mapType,
+ keyType: mapType.Key(),
+ elemType: mapType.Elem(),
+ keyDecoder: keyDecoder,
+ elemDecoder: elemDecoder,
+ }
+}
+
+func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
+ mapType := typ.(*reflect2.UnsafeMapType)
+ if ctx.sortMapKeys {
+ return &sortKeysMapEncoder{
+ mapType: mapType,
+ keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
+ elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
+ }
+ }
+ return &mapEncoder{
+ mapType: mapType,
+ keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
+ elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
+ }
+}
+
+func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
+ decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ decoder := extension.CreateMapKeyDecoder(typ)
+ if decoder != nil {
+ return decoder
+ }
+ }
+
+ ptrType := reflect2.PtrTo(typ)
+ if ptrType.Implements(unmarshalerType) {
+ return &referenceDecoder{
+ &unmarshalerDecoder{
+ valType: ptrType,
+ },
+ }
+ }
+ if typ.Implements(unmarshalerType) {
+ return &unmarshalerDecoder{
+ valType: typ,
+ }
+ }
+ if ptrType.Implements(textUnmarshalerType) {
+ return &referenceDecoder{
+ &textUnmarshalerDecoder{
+ valType: ptrType,
+ },
+ }
+ }
+ if typ.Implements(textUnmarshalerType) {
+ return &textUnmarshalerDecoder{
+ valType: typ,
+ }
+ }
+
+ switch typ.Kind() {
+ case reflect.String:
+ return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
+ case reflect.Bool,
+ reflect.Uint8, reflect.Int8,
+ reflect.Uint16, reflect.Int16,
+ reflect.Uint32, reflect.Int32,
+ reflect.Uint64, reflect.Int64,
+ reflect.Uint, reflect.Int,
+ reflect.Float32, reflect.Float64,
+ reflect.Uintptr:
+ typ = reflect2.DefaultTypeOfKind(typ.Kind())
+ return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
+ default:
+ return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
+ }
+}
+
+func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
+ encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ for _, extension := range ctx.extraExtensions {
+ encoder := extension.CreateMapKeyEncoder(typ)
+ if encoder != nil {
+ return encoder
+ }
+ }
+
+ if typ == textMarshalerType {
+ return &directTextMarshalerEncoder{
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ }
+ if typ.Implements(textMarshalerType) {
+ return &textMarshalerEncoder{
+ valType: typ,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ }
+
+ switch typ.Kind() {
+ case reflect.String:
+ return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
+ case reflect.Bool,
+ reflect.Uint8, reflect.Int8,
+ reflect.Uint16, reflect.Int16,
+ reflect.Uint32, reflect.Int32,
+ reflect.Uint64, reflect.Int64,
+ reflect.Uint, reflect.Int,
+ reflect.Float32, reflect.Float64,
+ reflect.Uintptr:
+ typ = reflect2.DefaultTypeOfKind(typ.Kind())
+ return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
+ default:
+ if typ.Kind() == reflect.Interface {
+ return &dynamicMapKeyEncoder{ctx, typ}
+ }
+ return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
+ }
+}
+
+type mapDecoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyType reflect2.Type
+ elemType reflect2.Type
+ keyDecoder ValDecoder
+ elemDecoder ValDecoder
+}
+
+func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ mapType := decoder.mapType
+ c := iter.nextToken()
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ *(*unsafe.Pointer)(ptr) = nil
+ mapType.UnsafeSet(ptr, mapType.UnsafeNew())
+ return
+ }
+ if mapType.UnsafeIsNil(ptr) {
+ mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0))
+ }
+ if c != '{' {
+ iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == '}' {
+ return
+ }
+ iter.unreadByte()
+ key := decoder.keyType.UnsafeNew()
+ decoder.keyDecoder.Decode(key, iter)
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return
+ }
+ elem := decoder.elemType.UnsafeNew()
+ decoder.elemDecoder.Decode(elem, iter)
+ decoder.mapType.UnsafeSetIndex(ptr, key, elem)
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ key := decoder.keyType.UnsafeNew()
+ decoder.keyDecoder.Decode(key, iter)
+ c = iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
+ return
+ }
+ elem := decoder.elemType.UnsafeNew()
+ decoder.elemDecoder.Decode(elem, iter)
+ decoder.mapType.UnsafeSetIndex(ptr, key, elem)
+ }
+ if c != '}' {
+ iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c}))
+ }
+}
+
+type numericMapKeyDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
+ return
+ }
+ decoder.decoder.Decode(ptr, iter)
+ c = iter.nextToken()
+ if c != '"' {
+ iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
+ return
+ }
+}
+
+type numericMapKeyEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.writeByte('"')
+ encoder.encoder.Encode(ptr, stream)
+ stream.writeByte('"')
+}
+
+func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type dynamicMapKeyEncoder struct {
+ ctx *ctx
+ valType reflect2.Type
+}
+
+func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
+}
+
+func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
+}
+
+type mapEncoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyEncoder ValEncoder
+ elemEncoder ValEncoder
+}
+
+func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *(*unsafe.Pointer)(ptr) == nil {
+ stream.WriteNil()
+ return
+ }
+ stream.WriteObjectStart()
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ for i := 0; iter.HasNext(); i++ {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ key, elem := iter.UnsafeNext()
+ encoder.keyEncoder.Encode(key, stream)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ stream.writeByte(':')
+ }
+ encoder.elemEncoder.Encode(elem, stream)
+ }
+ stream.WriteObjectEnd()
+}
+
+func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ return !iter.HasNext()
+}
+
+type sortKeysMapEncoder struct {
+ mapType *reflect2.UnsafeMapType
+ keyEncoder ValEncoder
+ elemEncoder ValEncoder
+}
+
+func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *(*unsafe.Pointer)(ptr) == nil {
+ stream.WriteNil()
+ return
+ }
+ stream.WriteObjectStart()
+ mapIter := encoder.mapType.UnsafeIterate(ptr)
+ subStream := stream.cfg.BorrowStream(nil)
+ subStream.Attachment = stream.Attachment
+ subIter := stream.cfg.BorrowIterator(nil)
+ keyValues := encodedKeyValues{}
+ for mapIter.HasNext() {
+ key, elem := mapIter.UnsafeNext()
+ subStreamIndex := subStream.Buffered()
+ encoder.keyEncoder.Encode(key, subStream)
+ if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
+ stream.Error = subStream.Error
+ }
+ encodedKey := subStream.Buffer()[subStreamIndex:]
+ subIter.ResetBytes(encodedKey)
+ decodedKey := subIter.ReadString()
+ if stream.indention > 0 {
+ subStream.writeTwoBytes(byte(':'), byte(' '))
+ } else {
+ subStream.writeByte(':')
+ }
+ encoder.elemEncoder.Encode(elem, subStream)
+ keyValues = append(keyValues, encodedKV{
+ key: decodedKey,
+ keyValue: subStream.Buffer()[subStreamIndex:],
+ })
+ }
+ sort.Sort(keyValues)
+ for i, keyValue := range keyValues {
+ if i != 0 {
+ stream.WriteMore()
+ }
+ stream.Write(keyValue.keyValue)
+ }
+ if subStream.Error != nil && stream.Error == nil {
+ stream.Error = subStream.Error
+ }
+ stream.WriteObjectEnd()
+ stream.cfg.ReturnStream(subStream)
+ stream.cfg.ReturnIterator(subIter)
+}
+
+func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ iter := encoder.mapType.UnsafeIterate(ptr)
+ return !iter.HasNext()
+}
+
+type encodedKeyValues []encodedKV
+
+type encodedKV struct {
+ key string
+ keyValue []byte
+}
+
+func (sv encodedKeyValues) Len() int { return len(sv) }
+func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
+func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key }
diff --git a/vendor/github.com/json-iterator/go/reflect_marshaler.go b/vendor/github.com/json-iterator/go/reflect_marshaler.go
new file mode 100644
index 000000000..3e21f3756
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_marshaler.go
@@ -0,0 +1,225 @@
+package jsoniter
+
+import (
+ "encoding"
+ "encoding/json"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
+var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem()
+var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem()
+var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem()
+
+func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder {
+ ptrType := reflect2.PtrTo(typ)
+ if ptrType.Implements(unmarshalerType) {
+ return &referenceDecoder{
+ &unmarshalerDecoder{ptrType},
+ }
+ }
+ if ptrType.Implements(textUnmarshalerType) {
+ return &referenceDecoder{
+ &textUnmarshalerDecoder{ptrType},
+ }
+ }
+ return nil
+}
+
+func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ == marshalerType {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &directMarshalerEncoder{
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ if typ.Implements(marshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &marshalerEncoder{
+ valType: typ,
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ ptrType := reflect2.PtrTo(typ)
+ if ctx.prefix != "" && ptrType.Implements(marshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
+ var encoder ValEncoder = &marshalerEncoder{
+ valType: ptrType,
+ checkIsEmpty: checkIsEmpty,
+ }
+ return &referenceEncoder{encoder}
+ }
+ if typ == textMarshalerType {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &directTextMarshalerEncoder{
+ checkIsEmpty: checkIsEmpty,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ }
+ return encoder
+ }
+ if typ.Implements(textMarshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, typ)
+ var encoder ValEncoder = &textMarshalerEncoder{
+ valType: typ,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ checkIsEmpty: checkIsEmpty,
+ }
+ return encoder
+ }
+ // if prefix is empty, the type is the root type
+ if ctx.prefix != "" && ptrType.Implements(textMarshalerType) {
+ checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
+ var encoder ValEncoder = &textMarshalerEncoder{
+ valType: ptrType,
+ stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
+ checkIsEmpty: checkIsEmpty,
+ }
+ return &referenceEncoder{encoder}
+ }
+ return nil
+}
+
+type marshalerEncoder struct {
+ checkIsEmpty checkIsEmpty
+ valType reflect2.Type
+}
+
+func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
+ stream.WriteNil()
+ return
+ }
+ marshaler := obj.(json.Marshaler)
+ bytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ stream.Error = err
+ } else {
+ // html escape was already done by jsoniter
+ // but the extra '\n' should be trimed
+ l := len(bytes)
+ if l > 0 && bytes[l-1] == '\n' {
+ bytes = bytes[:l-1]
+ }
+ stream.Write(bytes)
+ }
+}
+
+func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type directMarshalerEncoder struct {
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ marshaler := *(*json.Marshaler)(ptr)
+ if marshaler == nil {
+ stream.WriteNil()
+ return
+ }
+ bytes, err := marshaler.MarshalJSON()
+ if err != nil {
+ stream.Error = err
+ } else {
+ stream.Write(bytes)
+ }
+}
+
+func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type textMarshalerEncoder struct {
+ valType reflect2.Type
+ stringEncoder ValEncoder
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ obj := encoder.valType.UnsafeIndirect(ptr)
+ if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
+ stream.WriteNil()
+ return
+ }
+ marshaler := (obj).(encoding.TextMarshaler)
+ bytes, err := marshaler.MarshalText()
+ if err != nil {
+ stream.Error = err
+ } else {
+ str := string(bytes)
+ encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
+ }
+}
+
+func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type directTextMarshalerEncoder struct {
+ stringEncoder ValEncoder
+ checkIsEmpty checkIsEmpty
+}
+
+func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ marshaler := *(*encoding.TextMarshaler)(ptr)
+ if marshaler == nil {
+ stream.WriteNil()
+ return
+ }
+ bytes, err := marshaler.MarshalText()
+ if err != nil {
+ stream.Error = err
+ } else {
+ str := string(bytes)
+ encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
+ }
+}
+
+func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.checkIsEmpty.IsEmpty(ptr)
+}
+
+type unmarshalerDecoder struct {
+ valType reflect2.Type
+}
+
+func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valType := decoder.valType
+ obj := valType.UnsafeIndirect(ptr)
+ unmarshaler := obj.(json.Unmarshaler)
+ iter.nextToken()
+ iter.unreadByte() // skip spaces
+ bytes := iter.SkipAndReturnBytes()
+ err := unmarshaler.UnmarshalJSON(bytes)
+ if err != nil {
+ iter.ReportError("unmarshalerDecoder", err.Error())
+ }
+}
+
+type textUnmarshalerDecoder struct {
+ valType reflect2.Type
+}
+
+func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valType := decoder.valType
+ obj := valType.UnsafeIndirect(ptr)
+ if reflect2.IsNil(obj) {
+ ptrType := valType.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ elem := elemType.UnsafeNew()
+ ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem))
+ obj = valType.UnsafeIndirect(ptr)
+ }
+ unmarshaler := (obj).(encoding.TextUnmarshaler)
+ str := iter.ReadString()
+ err := unmarshaler.UnmarshalText([]byte(str))
+ if err != nil {
+ iter.ReportError("textUnmarshalerDecoder", err.Error())
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_native.go b/vendor/github.com/json-iterator/go/reflect_native.go
new file mode 100644
index 000000000..f88722d14
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_native.go
@@ -0,0 +1,453 @@
+package jsoniter
+
+import (
+ "encoding/base64"
+ "reflect"
+ "strconv"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+const ptrSize = 32 << uintptr(^uintptr(0)>>63)
+
+func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
+ if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
+ sliceDecoder := decoderOfSlice(ctx, typ)
+ return &base64Codec{sliceDecoder: sliceDecoder}
+ }
+ typeName := typ.String()
+ kind := typ.Kind()
+ switch kind {
+ case reflect.String:
+ if typeName != "string" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
+ }
+ return &stringCodec{}
+ case reflect.Int:
+ if typeName != "int" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &int32Codec{}
+ }
+ return &int64Codec{}
+ case reflect.Int8:
+ if typeName != "int8" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}
+ case reflect.Int16:
+ if typeName != "int16" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}
+ case reflect.Int32:
+ if typeName != "int32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}
+ case reflect.Int64:
+ if typeName != "int64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}
+ case reflect.Uint:
+ if typeName != "uint" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
+ }
+ if ptrSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}
+ case reflect.Float32:
+ if typeName != "float32" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}
+ case reflect.Float64:
+ if typeName != "float64" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}
+ case reflect.Bool:
+ if typeName != "bool" {
+ return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}
+ }
+ return nil
+}
+
+func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
+ if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
+ sliceDecoder := decoderOfSlice(ctx, typ)
+ return &base64Codec{sliceDecoder: sliceDecoder}
+ }
+ typeName := typ.String()
+ switch typ.Kind() {
+ case reflect.String:
+ if typeName != "string" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
+ }
+ return &stringCodec{}
+ case reflect.Int:
+ if typeName != "int" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &int32Codec{}
+ }
+ return &int64Codec{}
+ case reflect.Int8:
+ if typeName != "int8" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
+ }
+ return &int8Codec{}
+ case reflect.Int16:
+ if typeName != "int16" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
+ }
+ return &int16Codec{}
+ case reflect.Int32:
+ if typeName != "int32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
+ }
+ return &int32Codec{}
+ case reflect.Int64:
+ if typeName != "int64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
+ }
+ return &int64Codec{}
+ case reflect.Uint:
+ if typeName != "uint" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
+ }
+ if strconv.IntSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint8:
+ if typeName != "uint8" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
+ }
+ return &uint8Codec{}
+ case reflect.Uint16:
+ if typeName != "uint16" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
+ }
+ return &uint16Codec{}
+ case reflect.Uint32:
+ if typeName != "uint32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
+ }
+ return &uint32Codec{}
+ case reflect.Uintptr:
+ if typeName != "uintptr" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
+ }
+ if ptrSize == 32 {
+ return &uint32Codec{}
+ }
+ return &uint64Codec{}
+ case reflect.Uint64:
+ if typeName != "uint64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
+ }
+ return &uint64Codec{}
+ case reflect.Float32:
+ if typeName != "float32" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
+ }
+ return &float32Codec{}
+ case reflect.Float64:
+ if typeName != "float64" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
+ }
+ return &float64Codec{}
+ case reflect.Bool:
+ if typeName != "bool" {
+ return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
+ }
+ return &boolCodec{}
+ }
+ return nil
+}
+
+type stringCodec struct {
+}
+
+func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ *((*string)(ptr)) = iter.ReadString()
+}
+
+func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ str := *((*string)(ptr))
+ stream.WriteString(str)
+}
+
+func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*string)(ptr)) == ""
+}
+
+type int8Codec struct {
+}
+
+func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int8)(ptr)) = iter.ReadInt8()
+ }
+}
+
+func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt8(*((*int8)(ptr)))
+}
+
+func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int8)(ptr)) == 0
+}
+
+type int16Codec struct {
+}
+
+func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int16)(ptr)) = iter.ReadInt16()
+ }
+}
+
+func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt16(*((*int16)(ptr)))
+}
+
+func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int16)(ptr)) == 0
+}
+
+type int32Codec struct {
+}
+
+func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int32)(ptr)) = iter.ReadInt32()
+ }
+}
+
+func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt32(*((*int32)(ptr)))
+}
+
+func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int32)(ptr)) == 0
+}
+
+type int64Codec struct {
+}
+
+func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*int64)(ptr)) = iter.ReadInt64()
+ }
+}
+
+func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteInt64(*((*int64)(ptr)))
+}
+
+func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*int64)(ptr)) == 0
+}
+
+type uint8Codec struct {
+}
+
+func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint8)(ptr)) = iter.ReadUint8()
+ }
+}
+
+func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint8(*((*uint8)(ptr)))
+}
+
+func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint8)(ptr)) == 0
+}
+
+type uint16Codec struct {
+}
+
+func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint16)(ptr)) = iter.ReadUint16()
+ }
+}
+
+func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint16(*((*uint16)(ptr)))
+}
+
+func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint16)(ptr)) == 0
+}
+
+type uint32Codec struct {
+}
+
+func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint32)(ptr)) = iter.ReadUint32()
+ }
+}
+
+func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint32(*((*uint32)(ptr)))
+}
+
+func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint32)(ptr)) == 0
+}
+
+type uint64Codec struct {
+}
+
+func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*uint64)(ptr)) = iter.ReadUint64()
+ }
+}
+
+func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteUint64(*((*uint64)(ptr)))
+}
+
+func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*uint64)(ptr)) == 0
+}
+
+type float32Codec struct {
+}
+
+func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float32)(ptr)) = iter.ReadFloat32()
+ }
+}
+
+func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat32(*((*float32)(ptr)))
+}
+
+func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float32)(ptr)) == 0
+}
+
+type float64Codec struct {
+}
+
+func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*float64)(ptr)) = iter.ReadFloat64()
+ }
+}
+
+func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteFloat64(*((*float64)(ptr)))
+}
+
+func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*float64)(ptr)) == 0
+}
+
+type boolCodec struct {
+}
+
+func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.ReadNil() {
+ *((*bool)(ptr)) = iter.ReadBool()
+ }
+}
+
+func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteBool(*((*bool)(ptr)))
+}
+
+func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
+ return !(*((*bool)(ptr)))
+}
+
+type base64Codec struct {
+ sliceType *reflect2.UnsafeSliceType
+ sliceDecoder ValDecoder
+}
+
+func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ codec.sliceType.UnsafeSetNil(ptr)
+ return
+ }
+ switch iter.WhatIsNext() {
+ case StringValue:
+ src := iter.ReadString()
+ dst, err := base64.StdEncoding.DecodeString(src)
+ if err != nil {
+ iter.ReportError("decode base64", err.Error())
+ } else {
+ codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
+ }
+ case ArrayValue:
+ codec.sliceDecoder.Decode(ptr, iter)
+ default:
+ iter.ReportError("base64Codec", "invalid input")
+ }
+}
+
+func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if codec.sliceType.UnsafeIsNil(ptr) {
+ stream.WriteNil()
+ return
+ }
+ src := *((*[]byte)(ptr))
+ encoding := base64.StdEncoding
+ stream.writeByte('"')
+ if len(src) != 0 {
+ size := encoding.EncodedLen(len(src))
+ buf := make([]byte, size)
+ encoding.Encode(buf, src)
+ stream.buf = append(stream.buf, buf...)
+ }
+ stream.writeByte('"')
+}
+
+func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
+ return len(*((*[]byte)(ptr))) == 0
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_optional.go b/vendor/github.com/json-iterator/go/reflect_optional.go
new file mode 100644
index 000000000..fa71f4748
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_optional.go
@@ -0,0 +1,129 @@
+package jsoniter
+
+import (
+ "github.com/modern-go/reflect2"
+ "unsafe"
+)
+
+func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ decoder := decoderOfType(ctx, elemType)
+ return &OptionalDecoder{elemType, decoder}
+}
+
+func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder {
+ ptrType := typ.(*reflect2.UnsafePtrType)
+ elemType := ptrType.Elem()
+ elemEncoder := encoderOfType(ctx, elemType)
+ encoder := &OptionalEncoder{elemEncoder}
+ return encoder
+}
+
+type OptionalDecoder struct {
+ ValueType reflect2.Type
+ ValueDecoder ValDecoder
+}
+
+func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.ReadNil() {
+ *((*unsafe.Pointer)(ptr)) = nil
+ } else {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ newPtr := decoder.ValueType.UnsafeNew()
+ decoder.ValueDecoder.Decode(newPtr, iter)
+ *((*unsafe.Pointer)(ptr)) = newPtr
+ } else {
+ //reuse existing instance
+ decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+ }
+}
+
+type dereferenceDecoder struct {
+ // only to deference a pointer
+ valueType reflect2.Type
+ valueDecoder ValDecoder
+}
+
+func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ //pointer to null, we have to allocate memory to hold the value
+ newPtr := decoder.valueType.UnsafeNew()
+ decoder.valueDecoder.Decode(newPtr, iter)
+ *((*unsafe.Pointer)(ptr)) = newPtr
+ } else {
+ //reuse existing instance
+ decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
+ }
+}
+
+type OptionalEncoder struct {
+ ValueEncoder ValEncoder
+}
+
+func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return *((*unsafe.Pointer)(ptr)) == nil
+}
+
+type dereferenceEncoder struct {
+ ValueEncoder ValEncoder
+}
+
+func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if *((*unsafe.Pointer)(ptr)) == nil {
+ stream.WriteNil()
+ } else {
+ encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
+ }
+}
+
+func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ dePtr := *((*unsafe.Pointer)(ptr))
+ if dePtr == nil {
+ return true
+ }
+ return encoder.ValueEncoder.IsEmpty(dePtr)
+}
+
+func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
+ deReferenced := *((*unsafe.Pointer)(ptr))
+ if deReferenced == nil {
+ return true
+ }
+ isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil)
+ if !converted {
+ return false
+ }
+ fieldPtr := unsafe.Pointer(deReferenced)
+ return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
+}
+
+type referenceEncoder struct {
+ encoder ValEncoder
+}
+
+func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
+}
+
+func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
+}
+
+type referenceDecoder struct {
+ decoder ValDecoder
+}
+
+func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.decoder.Decode(unsafe.Pointer(&ptr), iter)
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_slice.go b/vendor/github.com/json-iterator/go/reflect_slice.go
new file mode 100644
index 000000000..9441d79df
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_slice.go
@@ -0,0 +1,99 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "unsafe"
+)
+
+func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
+ sliceType := typ.(*reflect2.UnsafeSliceType)
+ decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
+ return &sliceDecoder{sliceType, decoder}
+}
+
+func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder {
+ sliceType := typ.(*reflect2.UnsafeSliceType)
+ encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
+ return &sliceEncoder{sliceType, encoder}
+}
+
+type sliceEncoder struct {
+ sliceType *reflect2.UnsafeSliceType
+ elemEncoder ValEncoder
+}
+
+func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ if encoder.sliceType.UnsafeIsNil(ptr) {
+ stream.WriteNil()
+ return
+ }
+ length := encoder.sliceType.UnsafeLengthOf(ptr)
+ if length == 0 {
+ stream.WriteEmptyArray()
+ return
+ }
+ stream.WriteArrayStart()
+ encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream)
+ for i := 1; i < length; i++ {
+ stream.WriteMore()
+ elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i)
+ encoder.elemEncoder.Encode(elemPtr, stream)
+ }
+ stream.WriteArrayEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
+ }
+}
+
+func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.sliceType.UnsafeLengthOf(ptr) == 0
+}
+
+type sliceDecoder struct {
+ sliceType *reflect2.UnsafeSliceType
+ elemDecoder ValDecoder
+}
+
+func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.doDecode(ptr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
+ }
+}
+
+func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
+ c := iter.nextToken()
+ sliceType := decoder.sliceType
+ if c == 'n' {
+ iter.skipThreeBytes('u', 'l', 'l')
+ sliceType.UnsafeSetNil(ptr)
+ return
+ }
+ if c != '[' {
+ iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c}))
+ return
+ }
+ c = iter.nextToken()
+ if c == ']' {
+ sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0))
+ return
+ }
+ iter.unreadByte()
+ sliceType.UnsafeGrow(ptr, 1)
+ elemPtr := sliceType.UnsafeGetIndex(ptr, 0)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ length := 1
+ for c = iter.nextToken(); c == ','; c = iter.nextToken() {
+ idx := length
+ length += 1
+ sliceType.UnsafeGrow(ptr, length)
+ elemPtr = sliceType.UnsafeGetIndex(ptr, idx)
+ decoder.elemDecoder.Decode(elemPtr, iter)
+ }
+ if c != ']' {
+ iter.ReportError("decode slice", "expect ], but found "+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_struct_decoder.go b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go
new file mode 100644
index 000000000..92ae912dc
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_struct_decoder.go
@@ -0,0 +1,1097 @@
+package jsoniter
+
+import (
+ "fmt"
+ "io"
+ "strings"
+ "unsafe"
+
+ "github.com/modern-go/reflect2"
+)
+
+func decoderOfStruct(ctx *ctx, typ reflect2.Type) ValDecoder {
+ bindings := map[string]*Binding{}
+ structDescriptor := describeStruct(ctx, typ)
+ for _, binding := range structDescriptor.Fields {
+ for _, fromName := range binding.FromNames {
+ old := bindings[fromName]
+ if old == nil {
+ bindings[fromName] = binding
+ continue
+ }
+ ignoreOld, ignoreNew := resolveConflictBinding(ctx.frozenConfig, old, binding)
+ if ignoreOld {
+ delete(bindings, fromName)
+ }
+ if !ignoreNew {
+ bindings[fromName] = binding
+ }
+ }
+ }
+ fields := map[string]*structFieldDecoder{}
+ for k, binding := range bindings {
+ fields[k] = binding.Decoder.(*structFieldDecoder)
+ }
+
+ if !ctx.caseSensitive() {
+ for k, binding := range bindings {
+ if _, found := fields[strings.ToLower(k)]; !found {
+ fields[strings.ToLower(k)] = binding.Decoder.(*structFieldDecoder)
+ }
+ }
+ }
+
+ return createStructDecoder(ctx, typ, fields)
+}
+
+func createStructDecoder(ctx *ctx, typ reflect2.Type, fields map[string]*structFieldDecoder) ValDecoder {
+ if ctx.disallowUnknownFields {
+ return &generalStructDecoder{typ: typ, fields: fields, disallowUnknownFields: true}
+ }
+ knownHash := map[int64]struct{}{
+ 0: {},
+ }
+
+ switch len(fields) {
+ case 0:
+ return &skipObjectDecoder{typ}
+ case 1:
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ return &oneFieldStructDecoder{typ, fieldHash, fieldDecoder}
+ }
+ case 2:
+ var fieldHash1 int64
+ var fieldHash2 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldHash1 == 0 {
+ fieldHash1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else {
+ fieldHash2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ }
+ }
+ return &twoFieldsStructDecoder{typ, fieldHash1, fieldDecoder1, fieldHash2, fieldDecoder2}
+ case 3:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ }
+ }
+ return &threeFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3}
+ case 4:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ }
+ }
+ return &fourFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4}
+ case 5:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ }
+ }
+ return &fiveFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5}
+ case 6:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ }
+ }
+ return &sixFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6}
+ case 7:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ }
+ }
+ return &sevenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7}
+ case 8:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ }
+ }
+ return &eightFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8}
+ case 9:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldName9 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ }
+ }
+ return &nineFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8,
+ fieldName9, fieldDecoder9}
+ case 10:
+ var fieldName1 int64
+ var fieldName2 int64
+ var fieldName3 int64
+ var fieldName4 int64
+ var fieldName5 int64
+ var fieldName6 int64
+ var fieldName7 int64
+ var fieldName8 int64
+ var fieldName9 int64
+ var fieldName10 int64
+ var fieldDecoder1 *structFieldDecoder
+ var fieldDecoder2 *structFieldDecoder
+ var fieldDecoder3 *structFieldDecoder
+ var fieldDecoder4 *structFieldDecoder
+ var fieldDecoder5 *structFieldDecoder
+ var fieldDecoder6 *structFieldDecoder
+ var fieldDecoder7 *structFieldDecoder
+ var fieldDecoder8 *structFieldDecoder
+ var fieldDecoder9 *structFieldDecoder
+ var fieldDecoder10 *structFieldDecoder
+ for fieldName, fieldDecoder := range fields {
+ fieldHash := calcHash(fieldName, ctx.caseSensitive())
+ _, known := knownHash[fieldHash]
+ if known {
+ return &generalStructDecoder{typ, fields, false}
+ }
+ knownHash[fieldHash] = struct{}{}
+ if fieldName1 == 0 {
+ fieldName1 = fieldHash
+ fieldDecoder1 = fieldDecoder
+ } else if fieldName2 == 0 {
+ fieldName2 = fieldHash
+ fieldDecoder2 = fieldDecoder
+ } else if fieldName3 == 0 {
+ fieldName3 = fieldHash
+ fieldDecoder3 = fieldDecoder
+ } else if fieldName4 == 0 {
+ fieldName4 = fieldHash
+ fieldDecoder4 = fieldDecoder
+ } else if fieldName5 == 0 {
+ fieldName5 = fieldHash
+ fieldDecoder5 = fieldDecoder
+ } else if fieldName6 == 0 {
+ fieldName6 = fieldHash
+ fieldDecoder6 = fieldDecoder
+ } else if fieldName7 == 0 {
+ fieldName7 = fieldHash
+ fieldDecoder7 = fieldDecoder
+ } else if fieldName8 == 0 {
+ fieldName8 = fieldHash
+ fieldDecoder8 = fieldDecoder
+ } else if fieldName9 == 0 {
+ fieldName9 = fieldHash
+ fieldDecoder9 = fieldDecoder
+ } else {
+ fieldName10 = fieldHash
+ fieldDecoder10 = fieldDecoder
+ }
+ }
+ return &tenFieldsStructDecoder{typ,
+ fieldName1, fieldDecoder1,
+ fieldName2, fieldDecoder2,
+ fieldName3, fieldDecoder3,
+ fieldName4, fieldDecoder4,
+ fieldName5, fieldDecoder5,
+ fieldName6, fieldDecoder6,
+ fieldName7, fieldDecoder7,
+ fieldName8, fieldDecoder8,
+ fieldName9, fieldDecoder9,
+ fieldName10, fieldDecoder10}
+ }
+ return &generalStructDecoder{typ, fields, false}
+}
+
+type generalStructDecoder struct {
+ typ reflect2.Type
+ fields map[string]*structFieldDecoder
+ disallowUnknownFields bool
+}
+
+func (decoder *generalStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ var c byte
+ for c = ','; c == ','; c = iter.nextToken() {
+ decoder.decodeOneField(ptr, iter)
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ if c != '}' {
+ iter.ReportError("struct Decode", `expect }, but found `+string([]byte{c}))
+ }
+ iter.decrementDepth()
+}
+
+func (decoder *generalStructDecoder) decodeOneField(ptr unsafe.Pointer, iter *Iterator) {
+ var field string
+ var fieldDecoder *structFieldDecoder
+ if iter.cfg.objectFieldMustBeSimpleString {
+ fieldBytes := iter.ReadStringAsSlice()
+ field = *(*string)(unsafe.Pointer(&fieldBytes))
+ fieldDecoder = decoder.fields[field]
+ if fieldDecoder == nil && !iter.cfg.caseSensitive {
+ fieldDecoder = decoder.fields[strings.ToLower(field)]
+ }
+ } else {
+ field = iter.ReadString()
+ fieldDecoder = decoder.fields[field]
+ if fieldDecoder == nil && !iter.cfg.caseSensitive {
+ fieldDecoder = decoder.fields[strings.ToLower(field)]
+ }
+ }
+ if fieldDecoder == nil {
+ if decoder.disallowUnknownFields {
+ msg := "found unknown field: " + field
+ iter.ReportError("ReadObject", msg)
+ }
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ iter.Skip()
+ return
+ }
+ c := iter.nextToken()
+ if c != ':' {
+ iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
+ }
+ fieldDecoder.Decode(ptr, iter)
+}
+
+type skipObjectDecoder struct {
+ typ reflect2.Type
+}
+
+func (decoder *skipObjectDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ valueType := iter.WhatIsNext()
+ if valueType != ObjectValue && valueType != NilValue {
+ iter.ReportError("skipObjectDecoder", "expect object or null")
+ return
+ }
+ iter.Skip()
+}
+
+type oneFieldStructDecoder struct {
+ typ reflect2.Type
+ fieldHash int64
+ fieldDecoder *structFieldDecoder
+}
+
+func (decoder *oneFieldStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ if iter.readFieldHash() == decoder.fieldHash {
+ decoder.fieldDecoder.Decode(ptr, iter)
+ } else {
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type twoFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+}
+
+func (decoder *twoFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type threeFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+}
+
+func (decoder *threeFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type fourFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+}
+
+func (decoder *fourFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type fiveFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+}
+
+func (decoder *fiveFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type sixFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+}
+
+func (decoder *sixFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type sevenFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+}
+
+func (decoder *sevenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type eightFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+}
+
+func (decoder *eightFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type nineFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int64
+ fieldDecoder9 *structFieldDecoder
+}
+
+func (decoder *nineFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type tenFieldsStructDecoder struct {
+ typ reflect2.Type
+ fieldHash1 int64
+ fieldDecoder1 *structFieldDecoder
+ fieldHash2 int64
+ fieldDecoder2 *structFieldDecoder
+ fieldHash3 int64
+ fieldDecoder3 *structFieldDecoder
+ fieldHash4 int64
+ fieldDecoder4 *structFieldDecoder
+ fieldHash5 int64
+ fieldDecoder5 *structFieldDecoder
+ fieldHash6 int64
+ fieldDecoder6 *structFieldDecoder
+ fieldHash7 int64
+ fieldDecoder7 *structFieldDecoder
+ fieldHash8 int64
+ fieldDecoder8 *structFieldDecoder
+ fieldHash9 int64
+ fieldDecoder9 *structFieldDecoder
+ fieldHash10 int64
+ fieldDecoder10 *structFieldDecoder
+}
+
+func (decoder *tenFieldsStructDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if !iter.readObjectStart() {
+ return
+ }
+ if !iter.incrementDepth() {
+ return
+ }
+ for {
+ switch iter.readFieldHash() {
+ case decoder.fieldHash1:
+ decoder.fieldDecoder1.Decode(ptr, iter)
+ case decoder.fieldHash2:
+ decoder.fieldDecoder2.Decode(ptr, iter)
+ case decoder.fieldHash3:
+ decoder.fieldDecoder3.Decode(ptr, iter)
+ case decoder.fieldHash4:
+ decoder.fieldDecoder4.Decode(ptr, iter)
+ case decoder.fieldHash5:
+ decoder.fieldDecoder5.Decode(ptr, iter)
+ case decoder.fieldHash6:
+ decoder.fieldDecoder6.Decode(ptr, iter)
+ case decoder.fieldHash7:
+ decoder.fieldDecoder7.Decode(ptr, iter)
+ case decoder.fieldHash8:
+ decoder.fieldDecoder8.Decode(ptr, iter)
+ case decoder.fieldHash9:
+ decoder.fieldDecoder9.Decode(ptr, iter)
+ case decoder.fieldHash10:
+ decoder.fieldDecoder10.Decode(ptr, iter)
+ default:
+ iter.Skip()
+ }
+ if iter.isObjectEnd() {
+ break
+ }
+ }
+ if iter.Error != nil && iter.Error != io.EOF && len(decoder.typ.Type1().Name()) != 0 {
+ iter.Error = fmt.Errorf("%v.%s", decoder.typ, iter.Error.Error())
+ }
+ iter.decrementDepth()
+}
+
+type structFieldDecoder struct {
+ field reflect2.StructField
+ fieldDecoder ValDecoder
+}
+
+func (decoder *structFieldDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ fieldPtr := decoder.field.UnsafeGet(ptr)
+ decoder.fieldDecoder.Decode(fieldPtr, iter)
+ if iter.Error != nil && iter.Error != io.EOF {
+ iter.Error = fmt.Errorf("%s: %s", decoder.field.Name(), iter.Error.Error())
+ }
+}
+
+type stringModeStringDecoder struct {
+ elemDecoder ValDecoder
+ cfg *frozenConfig
+}
+
+func (decoder *stringModeStringDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ decoder.elemDecoder.Decode(ptr, iter)
+ str := *((*string)(ptr))
+ tempIter := decoder.cfg.BorrowIterator([]byte(str))
+ defer decoder.cfg.ReturnIterator(tempIter)
+ *((*string)(ptr)) = tempIter.ReadString()
+}
+
+type stringModeNumberDecoder struct {
+ elemDecoder ValDecoder
+}
+
+func (decoder *stringModeNumberDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
+ if iter.WhatIsNext() == NilValue {
+ decoder.elemDecoder.Decode(ptr, iter)
+ return
+ }
+
+ c := iter.nextToken()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+ decoder.elemDecoder.Decode(ptr, iter)
+ if iter.Error != nil {
+ return
+ }
+ c = iter.readByte()
+ if c != '"' {
+ iter.ReportError("stringModeNumberDecoder", `expect ", but found `+string([]byte{c}))
+ return
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/reflect_struct_encoder.go b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go
new file mode 100644
index 000000000..152e3ef5a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/reflect_struct_encoder.go
@@ -0,0 +1,211 @@
+package jsoniter
+
+import (
+ "fmt"
+ "github.com/modern-go/reflect2"
+ "io"
+ "reflect"
+ "unsafe"
+)
+
+func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
+ type bindingTo struct {
+ binding *Binding
+ toName string
+ ignored bool
+ }
+ orderedBindings := []*bindingTo{}
+ structDescriptor := describeStruct(ctx, typ)
+ for _, binding := range structDescriptor.Fields {
+ for _, toName := range binding.ToNames {
+ new := &bindingTo{
+ binding: binding,
+ toName: toName,
+ }
+ for _, old := range orderedBindings {
+ if old.toName != toName {
+ continue
+ }
+ old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding)
+ }
+ orderedBindings = append(orderedBindings, new)
+ }
+ }
+ if len(orderedBindings) == 0 {
+ return &emptyStructEncoder{}
+ }
+ finalOrderedFields := []structFieldTo{}
+ for _, bindingTo := range orderedBindings {
+ if !bindingTo.ignored {
+ finalOrderedFields = append(finalOrderedFields, structFieldTo{
+ encoder: bindingTo.binding.Encoder.(*structFieldEncoder),
+ toName: bindingTo.toName,
+ })
+ }
+ }
+ return &structEncoder{typ, finalOrderedFields}
+}
+
+func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
+ encoder := createEncoderOfNative(ctx, typ)
+ if encoder != nil {
+ return encoder
+ }
+ kind := typ.Kind()
+ switch kind {
+ case reflect.Interface:
+ return &dynamicEncoder{typ}
+ case reflect.Struct:
+ return &structEncoder{typ: typ}
+ case reflect.Array:
+ return &arrayEncoder{}
+ case reflect.Slice:
+ return &sliceEncoder{}
+ case reflect.Map:
+ return encoderOfMap(ctx, typ)
+ case reflect.Ptr:
+ return &OptionalEncoder{}
+ default:
+ return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)}
+ }
+}
+
+func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
+ newTagged := new.Field.Tag().Get(cfg.getTagKey()) != ""
+ oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != ""
+ if newTagged {
+ if oldTagged {
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ } else {
+ return true, false
+ }
+ } else {
+ if oldTagged {
+ return true, false
+ }
+ if len(old.levels) > len(new.levels) {
+ return true, false
+ } else if len(new.levels) > len(old.levels) {
+ return false, true
+ } else {
+ return true, true
+ }
+ }
+}
+
+type structFieldEncoder struct {
+ field reflect2.StructField
+ fieldEncoder ValEncoder
+ omitempty bool
+}
+
+func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ encoder.fieldEncoder.Encode(fieldPtr, stream)
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error())
+ }
+}
+
+func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ return encoder.fieldEncoder.IsEmpty(fieldPtr)
+}
+
+func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
+ isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil)
+ if !converted {
+ return false
+ }
+ fieldPtr := encoder.field.UnsafeGet(ptr)
+ return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
+}
+
+type IsEmbeddedPtrNil interface {
+ IsEmbeddedPtrNil(ptr unsafe.Pointer) bool
+}
+
+type structEncoder struct {
+ typ reflect2.Type
+ fields []structFieldTo
+}
+
+type structFieldTo struct {
+ encoder *structFieldEncoder
+ toName string
+}
+
+func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteObjectStart()
+ isNotFirst := false
+ for _, field := range encoder.fields {
+ if field.encoder.omitempty && field.encoder.IsEmpty(ptr) {
+ continue
+ }
+ if field.encoder.IsEmbeddedPtrNil(ptr) {
+ continue
+ }
+ if isNotFirst {
+ stream.WriteMore()
+ }
+ stream.WriteObjectField(field.toName)
+ field.encoder.Encode(ptr, stream)
+ isNotFirst = true
+ }
+ stream.WriteObjectEnd()
+ if stream.Error != nil && stream.Error != io.EOF {
+ stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
+ }
+}
+
+func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type emptyStructEncoder struct {
+}
+
+func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.WriteEmptyObject()
+}
+
+func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return false
+}
+
+type stringModeNumberEncoder struct {
+ elemEncoder ValEncoder
+}
+
+func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ stream.writeByte('"')
+ encoder.elemEncoder.Encode(ptr, stream)
+ stream.writeByte('"')
+}
+
+func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
+
+type stringModeStringEncoder struct {
+ elemEncoder ValEncoder
+ cfg *frozenConfig
+}
+
+func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
+ tempStream := encoder.cfg.BorrowStream(nil)
+ tempStream.Attachment = stream.Attachment
+ defer encoder.cfg.ReturnStream(tempStream)
+ encoder.elemEncoder.Encode(ptr, tempStream)
+ stream.WriteString(string(tempStream.Buffer()))
+}
+
+func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
+ return encoder.elemEncoder.IsEmpty(ptr)
+}
diff --git a/vendor/github.com/json-iterator/go/stream.go b/vendor/github.com/json-iterator/go/stream.go
new file mode 100644
index 000000000..23d8a3ad6
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream.go
@@ -0,0 +1,210 @@
+package jsoniter
+
+import (
+ "io"
+)
+
+// stream is a io.Writer like object, with JSON specific write functions.
+// Error is not returned as return value, but stored as Error member on this stream instance.
+type Stream struct {
+ cfg *frozenConfig
+ out io.Writer
+ buf []byte
+ Error error
+ indention int
+ Attachment interface{} // open for customized encoder
+}
+
+// NewStream create new stream instance.
+// cfg can be jsoniter.ConfigDefault.
+// out can be nil if write to internal buffer.
+// bufSize is the initial size for the internal buffer in bytes.
+func NewStream(cfg API, out io.Writer, bufSize int) *Stream {
+ return &Stream{
+ cfg: cfg.(*frozenConfig),
+ out: out,
+ buf: make([]byte, 0, bufSize),
+ Error: nil,
+ indention: 0,
+ }
+}
+
+// Pool returns a pool can provide more stream with same configuration
+func (stream *Stream) Pool() StreamPool {
+ return stream.cfg
+}
+
+// Reset reuse this stream instance by assign a new writer
+func (stream *Stream) Reset(out io.Writer) {
+ stream.out = out
+ stream.buf = stream.buf[:0]
+}
+
+// Available returns how many bytes are unused in the buffer.
+func (stream *Stream) Available() int {
+ return cap(stream.buf) - len(stream.buf)
+}
+
+// Buffered returns the number of bytes that have been written into the current buffer.
+func (stream *Stream) Buffered() int {
+ return len(stream.buf)
+}
+
+// Buffer if writer is nil, use this method to take the result
+func (stream *Stream) Buffer() []byte {
+ return stream.buf
+}
+
+// SetBuffer allows to append to the internal buffer directly
+func (stream *Stream) SetBuffer(buf []byte) {
+ stream.buf = buf
+}
+
+// Write writes the contents of p into the buffer.
+// It returns the number of bytes written.
+// If nn < len(p), it also returns an error explaining
+// why the write is short.
+func (stream *Stream) Write(p []byte) (nn int, err error) {
+ stream.buf = append(stream.buf, p...)
+ if stream.out != nil {
+ nn, err = stream.out.Write(stream.buf)
+ stream.buf = stream.buf[nn:]
+ return
+ }
+ return len(p), nil
+}
+
+// WriteByte writes a single byte.
+func (stream *Stream) writeByte(c byte) {
+ stream.buf = append(stream.buf, c)
+}
+
+func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) {
+ stream.buf = append(stream.buf, c1, c2)
+}
+
+func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3)
+}
+
+func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3, c4)
+}
+
+func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
+ stream.buf = append(stream.buf, c1, c2, c3, c4, c5)
+}
+
+// Flush writes any buffered data to the underlying io.Writer.
+func (stream *Stream) Flush() error {
+ if stream.out == nil {
+ return nil
+ }
+ if stream.Error != nil {
+ return stream.Error
+ }
+ _, err := stream.out.Write(stream.buf)
+ if err != nil {
+ if stream.Error == nil {
+ stream.Error = err
+ }
+ return err
+ }
+ stream.buf = stream.buf[:0]
+ return nil
+}
+
+// WriteRaw write string out without quotes, just like []byte
+func (stream *Stream) WriteRaw(s string) {
+ stream.buf = append(stream.buf, s...)
+}
+
+// WriteNil write null to stream
+func (stream *Stream) WriteNil() {
+ stream.writeFourBytes('n', 'u', 'l', 'l')
+}
+
+// WriteTrue write true to stream
+func (stream *Stream) WriteTrue() {
+ stream.writeFourBytes('t', 'r', 'u', 'e')
+}
+
+// WriteFalse write false to stream
+func (stream *Stream) WriteFalse() {
+ stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
+}
+
+// WriteBool write true or false into stream
+func (stream *Stream) WriteBool(val bool) {
+ if val {
+ stream.WriteTrue()
+ } else {
+ stream.WriteFalse()
+ }
+}
+
+// WriteObjectStart write { with possible indention
+func (stream *Stream) WriteObjectStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('{')
+ stream.writeIndention(0)
+}
+
+// WriteObjectField write "field": with possible indention
+func (stream *Stream) WriteObjectField(field string) {
+ stream.WriteString(field)
+ if stream.indention > 0 {
+ stream.writeTwoBytes(':', ' ')
+ } else {
+ stream.writeByte(':')
+ }
+}
+
+// WriteObjectEnd write } with possible indention
+func (stream *Stream) WriteObjectEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte('}')
+}
+
+// WriteEmptyObject write {}
+func (stream *Stream) WriteEmptyObject() {
+ stream.writeByte('{')
+ stream.writeByte('}')
+}
+
+// WriteMore write , with possible indention
+func (stream *Stream) WriteMore() {
+ stream.writeByte(',')
+ stream.writeIndention(0)
+}
+
+// WriteArrayStart write [ with possible indention
+func (stream *Stream) WriteArrayStart() {
+ stream.indention += stream.cfg.indentionStep
+ stream.writeByte('[')
+ stream.writeIndention(0)
+}
+
+// WriteEmptyArray write []
+func (stream *Stream) WriteEmptyArray() {
+ stream.writeTwoBytes('[', ']')
+}
+
+// WriteArrayEnd write ] with possible indention
+func (stream *Stream) WriteArrayEnd() {
+ stream.writeIndention(stream.cfg.indentionStep)
+ stream.indention -= stream.cfg.indentionStep
+ stream.writeByte(']')
+}
+
+func (stream *Stream) writeIndention(delta int) {
+ if stream.indention == 0 {
+ return
+ }
+ stream.writeByte('\n')
+ toWrite := stream.indention - delta
+ for i := 0; i < toWrite; i++ {
+ stream.buf = append(stream.buf, ' ')
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/stream_float.go b/vendor/github.com/json-iterator/go/stream_float.go
new file mode 100644
index 000000000..826aa594a
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_float.go
@@ -0,0 +1,111 @@
+package jsoniter
+
+import (
+ "fmt"
+ "math"
+ "strconv"
+)
+
+var pow10 []uint64
+
+func init() {
+ pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
+}
+
+// WriteFloat32 write float32 to stream
+func (stream *Stream) WriteFloat32(val float32) {
+ if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
+ stream.Error = fmt.Errorf("unsupported value: %f", val)
+ return
+ }
+ abs := math.Abs(float64(val))
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if float32(abs) < 1e-6 || float32(abs) >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
+}
+
+// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat32Lossy(val float32) {
+ if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
+ stream.Error = fmt.Errorf("unsupported value: %f", val)
+ return
+ }
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat32(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(float64(val)*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[len(stream.buf)-1] == '0' {
+ stream.buf = stream.buf[:len(stream.buf)-1]
+ }
+}
+
+// WriteFloat64 write float64 to stream
+func (stream *Stream) WriteFloat64(val float64) {
+ if math.IsInf(val, 0) || math.IsNaN(val) {
+ stream.Error = fmt.Errorf("unsupported value: %f", val)
+ return
+ }
+ abs := math.Abs(val)
+ fmt := byte('f')
+ // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
+ if abs != 0 {
+ if abs < 1e-6 || abs >= 1e21 {
+ fmt = 'e'
+ }
+ }
+ stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
+}
+
+// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
+func (stream *Stream) WriteFloat64Lossy(val float64) {
+ if math.IsInf(val, 0) || math.IsNaN(val) {
+ stream.Error = fmt.Errorf("unsupported value: %f", val)
+ return
+ }
+ if val < 0 {
+ stream.writeByte('-')
+ val = -val
+ }
+ if val > 0x4ffffff {
+ stream.WriteFloat64(val)
+ return
+ }
+ precision := 6
+ exp := uint64(1000000) // 6
+ lval := uint64(val*float64(exp) + 0.5)
+ stream.WriteUint64(lval / exp)
+ fval := lval % exp
+ if fval == 0 {
+ return
+ }
+ stream.writeByte('.')
+ for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
+ stream.writeByte('0')
+ }
+ stream.WriteUint64(fval)
+ for stream.buf[len(stream.buf)-1] == '0' {
+ stream.buf = stream.buf[:len(stream.buf)-1]
+ }
+}
diff --git a/vendor/github.com/json-iterator/go/stream_int.go b/vendor/github.com/json-iterator/go/stream_int.go
new file mode 100644
index 000000000..d1059ee4c
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_int.go
@@ -0,0 +1,190 @@
+package jsoniter
+
+var digits []uint32
+
+func init() {
+ digits = make([]uint32, 1000)
+ for i := uint32(0); i < 1000; i++ {
+ digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
+ if i < 10 {
+ digits[i] += 2 << 24
+ } else if i < 100 {
+ digits[i] += 1 << 24
+ }
+ }
+}
+
+func writeFirstBuf(space []byte, v uint32) []byte {
+ start := v >> 24
+ if start == 0 {
+ space = append(space, byte(v>>16), byte(v>>8))
+ } else if start == 1 {
+ space = append(space, byte(v>>8))
+ }
+ space = append(space, byte(v))
+ return space
+}
+
+func writeBuf(buf []byte, v uint32) []byte {
+ return append(buf, byte(v>>16), byte(v>>8), byte(v))
+}
+
+// WriteUint8 write uint8 to stream
+func (stream *Stream) WriteUint8(val uint8) {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+}
+
+// WriteInt8 write int8 to stream
+func (stream *Stream) WriteInt8(nval int8) {
+ var val uint8
+ if nval < 0 {
+ val = uint8(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint8(nval)
+ }
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+}
+
+// WriteUint16 write uint16 to stream
+func (stream *Stream) WriteUint16(val uint16) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+}
+
+// WriteInt16 write int16 to stream
+func (stream *Stream) WriteInt16(nval int16) {
+ var val uint16
+ if nval < 0 {
+ val = uint16(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint16(nval)
+ }
+ stream.WriteUint16(val)
+}
+
+// WriteUint32 write uint32 to stream
+func (stream *Stream) WriteUint32(val uint32) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q2])
+ } else {
+ r3 := q2 - q3*1000
+ stream.buf = append(stream.buf, byte(q3+'0'))
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ }
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+}
+
+// WriteInt32 write int32 to stream
+func (stream *Stream) WriteInt32(nval int32) {
+ var val uint32
+ if nval < 0 {
+ val = uint32(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint32(nval)
+ }
+ stream.WriteUint32(val)
+}
+
+// WriteUint64 write uint64 to stream
+func (stream *Stream) WriteUint64(val uint64) {
+ q1 := val / 1000
+ if q1 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[val])
+ return
+ }
+ r1 := val - q1*1000
+ q2 := q1 / 1000
+ if q2 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q1])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r2 := q1 - q2*1000
+ q3 := q2 / 1000
+ if q3 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q2])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r3 := q2 - q3*1000
+ q4 := q3 / 1000
+ if q4 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q3])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r4 := q3 - q4*1000
+ q5 := q4 / 1000
+ if q5 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q4])
+ stream.buf = writeBuf(stream.buf, digits[r4])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+ return
+ }
+ r5 := q4 - q5*1000
+ q6 := q5 / 1000
+ if q6 == 0 {
+ stream.buf = writeFirstBuf(stream.buf, digits[q5])
+ } else {
+ stream.buf = writeFirstBuf(stream.buf, digits[q6])
+ r6 := q5 - q6*1000
+ stream.buf = writeBuf(stream.buf, digits[r6])
+ }
+ stream.buf = writeBuf(stream.buf, digits[r5])
+ stream.buf = writeBuf(stream.buf, digits[r4])
+ stream.buf = writeBuf(stream.buf, digits[r3])
+ stream.buf = writeBuf(stream.buf, digits[r2])
+ stream.buf = writeBuf(stream.buf, digits[r1])
+}
+
+// WriteInt64 write int64 to stream
+func (stream *Stream) WriteInt64(nval int64) {
+ var val uint64
+ if nval < 0 {
+ val = uint64(-nval)
+ stream.buf = append(stream.buf, '-')
+ } else {
+ val = uint64(nval)
+ }
+ stream.WriteUint64(val)
+}
+
+// WriteInt write int to stream
+func (stream *Stream) WriteInt(val int) {
+ stream.WriteInt64(int64(val))
+}
+
+// WriteUint write uint to stream
+func (stream *Stream) WriteUint(val uint) {
+ stream.WriteUint64(uint64(val))
+}
diff --git a/vendor/github.com/json-iterator/go/stream_str.go b/vendor/github.com/json-iterator/go/stream_str.go
new file mode 100644
index 000000000..54c2ba0b3
--- /dev/null
+++ b/vendor/github.com/json-iterator/go/stream_str.go
@@ -0,0 +1,372 @@
+package jsoniter
+
+import (
+ "unicode/utf8"
+)
+
+// htmlSafeSet holds the value true if the ASCII character with the given
+// array position can be safely represented inside a JSON string, embedded
+// inside of HTML