vendor: run make vendor-update

This commit is contained in:
Aliaksandr Valialkin 2024-03-06 21:24:42 +02:00
parent 8efe12d66e
commit d8688c9e82
No known key found for this signature in database
GPG key ID: 52C003EE2BCDB9EB
92 changed files with 3037 additions and 1630 deletions

50
go.mod
View file

@ -11,9 +11,9 @@ require (
github.com/VictoriaMetrics/metrics v1.33.0
github.com/VictoriaMetrics/metricsql v0.75.0
github.com/aws/aws-sdk-go-v2 v1.25.2
github.com/aws/aws-sdk-go-v2/config v1.27.4
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.6
github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1
github.com/aws/aws-sdk-go-v2/config v1.27.6
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.8
github.com/aws/aws-sdk-go-v2/service/s3 v1.51.3
github.com/bmatcuk/doublestar/v4 v4.6.1
github.com/cespare/xxhash/v2 v2.2.0
github.com/cheggaaa/pb/v3 v3.1.5
@ -30,16 +30,16 @@ require (
github.com/valyala/gozstd v1.20.1
github.com/valyala/histogram v1.2.0
github.com/valyala/quicktemplate v1.7.0
golang.org/x/net v0.21.0 // indirect
golang.org/x/oauth2 v0.17.0
golang.org/x/sys v0.17.0
google.golang.org/api v0.167.0
golang.org/x/net v0.22.0 // indirect
golang.org/x/oauth2 v0.18.0
golang.org/x/sys v0.18.0
google.golang.org/api v0.168.0
gopkg.in/yaml.v2 v2.4.0
)
require (
cloud.google.com/go v0.112.1 // indirect
cloud.google.com/go/compute v1.24.0 // indirect
cloud.google.com/go/compute v1.25.0 // indirect
cloud.google.com/go/compute/metadata v0.2.3 // indirect
cloud.google.com/go/iam v1.1.6 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 // indirect
@ -47,21 +47,21 @@ require (
github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 // indirect
github.com/aws/aws-sdk-go v1.50.29 // indirect
github.com/aws/aws-sdk-go v1.50.32 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.17.4 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.17.6 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.0 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.20.1 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.28.3 // indirect
github.com/aws/smithy-go v1.20.1 // indirect
github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 // indirect
github.com/beorn7/perks v1.0.1 // indirect
@ -74,9 +74,9 @@ require (
github.com/go-logfmt/logfmt v0.6.0 // indirect
github.com/go-logr/logr v1.4.1 // indirect
github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang-jwt/jwt/v5 v5.2.0 // indirect
github.com/golang-jwt/jwt/v5 v5.2.1 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/s2a-go v0.1.7 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
@ -102,13 +102,13 @@ require (
github.com/prometheus/procfs v0.12.0 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/stretchr/testify v1.8.4 // indirect
github.com/stretchr/testify v1.9.0 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/xrash/smetrics v0.0.0-20231213231151-1d8dd44e695e // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/collector/featuregate v1.2.0 // indirect
go.opentelemetry.io/collector/pdata v1.2.0 // indirect
go.opentelemetry.io/collector/semconv v0.95.0 // indirect
go.opentelemetry.io/collector/featuregate v1.3.0 // indirect
go.opentelemetry.io/collector/pdata v1.3.0 // indirect
go.opentelemetry.io/collector/semconv v0.96.0 // indirect
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 // indirect
go.opentelemetry.io/otel v1.24.0 // indirect
@ -117,17 +117,17 @@ require (
go.uber.org/atomic v1.11.0 // indirect
go.uber.org/goleak v1.3.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/crypto v0.20.0 // indirect
golang.org/x/crypto v0.21.0 // indirect
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 // indirect
golang.org/x/sync v0.6.0 // indirect
golang.org/x/text v0.14.0 // indirect
golang.org/x/time v0.5.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/genproto v0.0.0-20240228224816-df926f6c8641 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240228224816-df926f6c8641 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240228224816-df926f6c8641 // indirect
google.golang.org/grpc v1.62.0 // indirect
google.golang.org/protobuf v1.32.0 // indirect
google.golang.org/genproto v0.0.0-20240304212257-790db918fca8 // indirect
google.golang.org/genproto/googleapis/api v0.0.0-20240304212257-790db918fca8 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8 // indirect
google.golang.org/grpc v1.62.1 // indirect
google.golang.org/protobuf v1.33.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
k8s.io/apimachinery v0.29.2 // indirect
k8s.io/client-go v0.29.2 // indirect

107
go.sum
View file

@ -21,8 +21,8 @@ cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvf
cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg=
cloud.google.com/go/compute v1.24.0/go.mod h1:kw1/T+h/+tK2LJK0wiPPx1intgdAM3j/g3hFDlscY40=
cloud.google.com/go/compute v1.25.0 h1:H1/4SqSUhjPFE7L5ddzHOfY2bCAvjwNRZPNl6Ni5oYU=
cloud.google.com/go/compute v1.25.0/go.mod h1:GR7F0ZPZH8EhChlMo9FkLd7eUTwEymjqQagxzilIxIE=
cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
@ -88,20 +88,20 @@ github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHG
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/aws/aws-sdk-go v1.38.35/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
github.com/aws/aws-sdk-go v1.50.29 h1:Ol2FYzesF2tsQrgVSnDWRFI60+FsSqKKdt7MLlZKubc=
github.com/aws/aws-sdk-go v1.50.29/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/aws/aws-sdk-go v1.50.32 h1:POt81DvegnpQKM4DMDLlHz1CO6OBnEoQ1gRhYFd7QRY=
github.com/aws/aws-sdk-go v1.50.32/go.mod h1:LF8svs817+Nz+DmiMQKTO3ubZ/6IaTpq3TjupRn3Eqk=
github.com/aws/aws-sdk-go-v2 v1.25.2 h1:/uiG1avJRgLGiQM9X3qJM8+Qa6KRGK5rRPuXE0HUM+w=
github.com/aws/aws-sdk-go-v2 v1.25.2/go.mod h1:Evoc5AsmtveRt1komDwIsjHFyrP5tDuF1D1U+6z6pNo=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1 h1:gTK2uhtAPtFcdRRJilZPx8uJLL2J85xK11nKtWL0wfU=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.1/go.mod h1:sxpLb+nZk7tIfCWChfd+h4QwHNUR57d8hA1cleTkjJo=
github.com/aws/aws-sdk-go-v2/config v1.27.4 h1:AhfWb5ZwimdsYTgP7Od8E9L1u4sKmDW2ZVeLcf2O42M=
github.com/aws/aws-sdk-go-v2/config v1.27.4/go.mod h1:zq2FFXK3A416kiukwpsd+rD4ny6JC7QSkp4QdN1Mp2g=
github.com/aws/aws-sdk-go-v2/credentials v1.17.4 h1:h5Vztbd8qLppiPwX+y0Q6WiwMZgpd9keKe2EAENgAuI=
github.com/aws/aws-sdk-go-v2/credentials v1.17.4/go.mod h1:+30tpwrkOgvkJL1rUZuRLoxcJwtI/OkeBLYnHxJtVe0=
github.com/aws/aws-sdk-go-v2/config v1.27.6 h1:WmoH1aPrxwcqAZTTnETjKr+fuvqzKd4hRrKxQUiuKP4=
github.com/aws/aws-sdk-go-v2/config v1.27.6/go.mod h1:W9RZFF2pL+OhnUSZsQS/eDMWD8v+R+yWgjj3nSlrXVU=
github.com/aws/aws-sdk-go-v2/credentials v1.17.6 h1:akhj/nSC6SEx3OmiYGG/7mAyXMem9ZNVVf+DXkikcTk=
github.com/aws/aws-sdk-go-v2/credentials v1.17.6/go.mod h1:chJZuJ7TkW4kiMwmldOJOEueBoSkUb4ynZS1d9dhygo=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2 h1:AK0J8iYBFeUk2Ax7O8YpLtFsfhdOByh2QIkHmigpRYk=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.2/go.mod h1:iRlGzMix0SExQEviAyptRWRGdYNo3+ufW/lCzvKVTUc=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.6 h1:prcsGA3onmpc7ea1W/m+SMj4uOn5vZ63uJp805UhJJs=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.6/go.mod h1:7eQrvATnVFDY0WfMYhfKkSQ1YtZlClT71fAAlsA1s34=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.8 h1:fjsaZ2EUoOaosuYMLbQAVJsPIAOV4Xn52AQmk5JbhAs=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.8/go.mod h1:WPJcs0Mze3WntafH9Df2NdJ1oSQkEQVL6piZxoS0ecY=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2 h1:bNo4LagzUKbjdxE0tIcR9pMzLR2U/Tgie1Hq1HQ3iH8=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2/go.mod h1:wRQv0nN6v9wDXuWThpovGQjqF1HFdcgWjporw14lS8k=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.2 h1:EtOU5jsPdIQNP+6Q2C5e3d65NKT1PeCiQk+9OdzO12Q=
@ -112,20 +112,20 @@ github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2 h1:en92G0Z7xlksoOylkUhuBSfJgijC
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.2/go.mod h1:HgtQ/wN5G+8QSlK62lbOtNwQ3wTSByJ4wH2rCkPt+AE=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 h1:EyBZibRTVAs6ECHZOw5/wlylS9OcTzwyjeQMudmREjE=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1/go.mod h1:JKpmtYhhPs7D97NL/ltqz7yCkERFW5dOlHyVl66ZYF8=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2 h1:zSdTXYLwuXDNPUS+V41i1SFDXG7V0ITp0D9UT9Cvl18=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2/go.mod h1:v8m8k+qVy95nYi7d56uP1QImleIIY25BPiNJYzPBdFE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2 h1:5ffmXjPtwRExp1zc7gENLgCPyHFbhEPwVTkTiH9niSk=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2/go.mod h1:Ru7vg1iQ7cR4i7SZ/JTLYN9kaXtbL69UdgG0OQWQxW0=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.4 h1:J3Q6N2sTChfYLZSTey3Qeo7n3JSm6RTJDcKev+7Sbus=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.4/go.mod h1:ZopsdDMVg1H03X7BdzpGaufOkuz27RjtKDzioP2U0Hg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.4 h1:jRiWxyuVO8PlkN72wDMVn/haVH4SDCBkUt0Lf/dxd7s=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.4/go.mod h1:Ru7vg1iQ7cR4i7SZ/JTLYN9kaXtbL69UdgG0OQWQxW0=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2 h1:1oY1AVEisRI4HNuFoLdRUB0hC63ylDAN6Me3MrfclEg=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2/go.mod h1:KZ03VgvZwSjkT7fOetQ/wF3MZUvYFirlI1H5NklUNsY=
github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1 h1:juZ+uGargZOrQGNxkVHr9HHR/0N+Yu8uekQnV7EAVRs=
github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1/go.mod h1:SoR0c7Jnq8Tpmt0KSLXIavhjmaagRqQpe9r70W3POJg=
github.com/aws/aws-sdk-go-v2/service/s3 v1.51.3 h1:7cR4xxS480TI0R6Bd75g9Npdw89VriquvQPlMNmuds4=
github.com/aws/aws-sdk-go-v2/service/s3 v1.51.3/go.mod h1:zb72GZ2MvfCX5ynVJ+Mc/NCx7hncbsko4NZm5E+p6J4=
github.com/aws/aws-sdk-go-v2/service/sso v1.20.1 h1:utEGkfdQ4L6YW/ietH7111ZYglLJvS+sLriHJ1NBJEQ=
github.com/aws/aws-sdk-go-v2/service/sso v1.20.1/go.mod h1:RsYqzYr2F2oPDdpy+PdhephuZxTfjHQe7SOBcZGoAU8=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1 h1:9/GylMS45hGGFCcMrUZDVayQE1jYSIN6da9jo7RAYIw=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.23.1/go.mod h1:YjAPFn4kGFqKC54VsHs5fn5B6d+PCY2tziEa3U/GB5Y=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.1 h1:3I2cBEYgKhrWlwyZgfpSO2BpaMY1LHPqXYk/QGlu2ew=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.1/go.mod h1:uQ7YYKZt3adCRrdCBREm1CD3efFLOUNH77MrUCvx5oA=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.3 h1:TkiFkSVX990ryWIMBCT4kPqZEgThQe1xPU/AQXavtvU=
github.com/aws/aws-sdk-go-v2/service/sts v1.28.3/go.mod h1:xYNauIUqSuvzlPVb3VB5no/n48YGhmlInD3Uh0Co8Zc=
github.com/aws/smithy-go v1.20.1 h1:4SZlSlMr36UEqC7XOyRVb27XMeZubNcBNN+9IgEPIQw=
github.com/aws/smithy-go v1.20.1/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 h1:6df1vn4bBlDDo4tARvBm7l6KA9iVMnE3NWizDeWSrps=
@ -221,8 +221,8 @@ github.com/go-zookeeper/zk v1.0.3/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw=
github.com/golang-jwt/jwt/v5 v5.2.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk=
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@ -252,8 +252,8 @@ github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
@ -474,8 +474,9 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
@ -483,8 +484,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/urfave/cli/v2 v2.27.1 h1:8xSQ6szndafKVRmfyeUMxkNUJQMjL1F2zmsZ+qHpfho=
github.com/urfave/cli/v2 v2.27.1/go.mod h1:8qnjx1vcq5s2/wpsqoZFndg2CE5tNFyrTvS6SinrnYQ=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
@ -519,12 +520,12 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/collector/featuregate v1.2.0 h1:nF8OGq5PsSNSLeuNwTWlOqThxbLW6v6DOCvSqQMc108=
go.opentelemetry.io/collector/featuregate v1.2.0/go.mod h1:mm8+xyQfgDmqhyegZRNIQmoKsNnDTwWKFLsdMoXAb7A=
go.opentelemetry.io/collector/pdata v1.2.0 h1:N6VdyEFYJyoHIKqHd0F372eNVD5b+AbH0ZQf7Z2jJ9I=
go.opentelemetry.io/collector/pdata v1.2.0/go.mod h1:mKXb6527Syb8PT4P9CZOJNbkuHOHjjGTZNNwSKESJhc=
go.opentelemetry.io/collector/semconv v0.95.0 h1:Oa7NXmoatwKLS+RzuXtdjq9oXAeg8P0g3x238fKF0Do=
go.opentelemetry.io/collector/semconv v0.95.0/go.mod h1:zOm/U3pgMIWcvrcnPbR9Xx2HinoXj46ERMK8PUV9wrs=
go.opentelemetry.io/collector/featuregate v1.3.0 h1:nrFSx+zfjdisjE9oCx25Aep3nJ9RaUjeE1qFL6eovoU=
go.opentelemetry.io/collector/featuregate v1.3.0/go.mod h1:mm8+xyQfgDmqhyegZRNIQmoKsNnDTwWKFLsdMoXAb7A=
go.opentelemetry.io/collector/pdata v1.3.0 h1:JRYN7tVHYFwmtQhIYbxWeiKSa2L1nCohyAs8sYqKFZo=
go.opentelemetry.io/collector/pdata v1.3.0/go.mod h1:t7W0Undtes53HODPdSujPLTnfSR5fzT+WpL+RTaaayo=
go.opentelemetry.io/collector/semconv v0.96.0 h1:DrZy8BpzJDnN2zFxXRj6BhfGYxNlqpFHBqyuS9fVHRY=
go.opentelemetry.io/collector/semconv v0.96.0/go.mod h1:zOm/U3pgMIWcvrcnPbR9Xx2HinoXj46ERMK8PUV9wrs=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
@ -551,8 +552,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.20.0 h1:jmAMJJZXr5KiCw05dfYK9QnqaqKLYXijU23lsEdcQqg=
golang.org/x/crypto v0.20.0/go.mod h1:Xwo95rrVNIoSMx9wa1JroENMToLWn3RNVrTBpLHgZPQ=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@ -622,16 +623,16 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210510120150-4163338589ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4=
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ=
golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA=
golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -691,12 +692,12 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U=
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -779,8 +780,8 @@ google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0M
google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
google.golang.org/api v0.167.0 h1:CKHrQD1BLRii6xdkatBDXyKzM0mkawt2QP+H3LtPmSE=
google.golang.org/api v0.167.0/go.mod h1:4FcBc686KFi7QI/U51/2GKKevfZMpM17sCdibqe/bSA=
google.golang.org/api v0.168.0 h1:MBRe+Ki4mMN93jhDDbpuRLjRddooArz4FeSObvUMmjY=
google.golang.org/api v0.168.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@ -818,12 +819,12 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20240228224816-df926f6c8641 h1:GihpvzHjeZHw+/mzsWpdxwr1LaG6E3ff/gyeZlVHbyc=
google.golang.org/genproto v0.0.0-20240228224816-df926f6c8641/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo=
google.golang.org/genproto/googleapis/api v0.0.0-20240228224816-df926f6c8641 h1:SO1wX9btGFrwj9EzH3ocqfwiPVOxfv4ggAJajzlHA5s=
google.golang.org/genproto/googleapis/api v0.0.0-20240228224816-df926f6c8641/go.mod h1:wLupoVsUfYPgOMwjzhYFbaVklw/INms+dqTp0tc1fv8=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240228224816-df926f6c8641 h1:DKU1r6Tj5s1vlU/moGhuGz7E3xRfwjdAfDzbsaQJtEY=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240228224816-df926f6c8641/go.mod h1:UCOku4NytXMJuLQE5VuqA5lX3PcHCBo8pxNyvkf4xBs=
google.golang.org/genproto v0.0.0-20240304212257-790db918fca8 h1:Fe8QycXyEd9mJgnwB9kmw00WgB43eQ/xYO5C6gceybQ=
google.golang.org/genproto v0.0.0-20240304212257-790db918fca8/go.mod h1:yA7a1bW1kwl459Ol0m0lV4hLTfrL/7Bkk4Mj2Ir1mWI=
google.golang.org/genproto/googleapis/api v0.0.0-20240304212257-790db918fca8 h1:8eadJkXbwDEMNwcB5O0s5Y5eCfyuCLdvaiOIaGTrWmQ=
google.golang.org/genproto/googleapis/api v0.0.0-20240304212257-790db918fca8/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8 h1:IR+hp6ypxjH24bkMfEJ0yHR21+gwPWdV+/IBrPQyn3k=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8/go.mod h1:UCOku4NytXMJuLQE5VuqA5lX3PcHCBo8pxNyvkf4xBs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@ -837,8 +838,8 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.62.0 h1:HQKZ/fa1bXkX1oFOvSjmZEUL8wLSaZTjCcLAlmZRtdk=
google.golang.org/grpc v1.62.0/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk=
google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@ -851,8 +852,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=

View file

@ -15,4 +15,4 @@
package internal
// Version is the current tagged release of the library.
const Version = "1.24.0"
const Version = "1.25.0"

View file

@ -1,3 +1,11 @@
# v1.27.6 (2024-03-05)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.27.5 (2024-03-04)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.27.4 (2024-02-23)
* **Dependency Update**: Updated to the latest SDK module versions

View file

@ -3,4 +3,4 @@
package config
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.27.4"
const goModuleVersion = "1.27.6"

View file

@ -1,3 +1,11 @@
# v1.17.6 (2024-03-05)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.17.5 (2024-03-04)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.17.4 (2024-02-23)
* **Dependency Update**: Updated to the latest SDK module versions

View file

@ -3,4 +3,4 @@
package credentials
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.17.4"
const goModuleVersion = "1.17.6"

View file

@ -1,3 +1,11 @@
# v1.16.8 (2024-03-05)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.16.7 (2024-03-04)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.16.6 (2024-02-23)
* **Dependency Update**: Updated to the latest SDK module versions

View file

@ -3,4 +3,4 @@
package manager
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.16.6"
const goModuleVersion = "1.16.8"

View file

@ -1,3 +1,11 @@
# v1.3.4 (2024-03-05)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.3 (2024-03-04)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.3.2 (2024-02-23)
* **Dependency Update**: Updated to the latest SDK module versions

View file

@ -3,4 +3,4 @@
package checksum
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.3.2"
const goModuleVersion = "1.3.4"

View file

@ -1,3 +1,11 @@
# v1.11.4 (2024-03-05)
* **Bug Fix**: Restore typo'd API `AddAsIsInternalPresigingMiddleware` as an alias for backwards compatibility.
# v1.11.3 (2024-03-04)
* **Bug Fix**: Correct a typo in internal AddAsIsPresigningMiddleware API.
# v1.11.2 (2024-02-23)
* **Dependency Update**: Updated to the latest SDK module versions

View file

@ -27,13 +27,21 @@ func GetIsPresigning(ctx context.Context) bool {
type isPresigningKey struct{}
// AddAsIsPresigingMiddleware adds a middleware to the head of the stack that
// AddAsIsPresigningMiddleware adds a middleware to the head of the stack that
// will update the stack's context to be flagged as being invoked for the
// purpose of presigning.
func AddAsIsPresigingMiddleware(stack *middleware.Stack) error {
func AddAsIsPresigningMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(asIsPresigningMiddleware{}, middleware.Before)
}
// AddAsIsPresigingMiddleware is an alias for backwards compatibility.
//
// Deprecated: This API was released with a typo. Use
// [AddAsIsPresigningMiddleware] instead.
func AddAsIsPresigingMiddleware(stack *middleware.Stack) error {
return AddAsIsPresigningMiddleware(stack)
}
type asIsPresigningMiddleware struct{}
func (asIsPresigningMiddleware) ID() string { return "AsIsPresigningMiddleware" }

View file

@ -3,4 +3,4 @@
package presignedurl
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.11.2"
const goModuleVersion = "1.11.4"

View file

@ -1,3 +1,12 @@
# v1.51.3 (2024-03-05)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.51.2 (2024-03-04)
* **Bug Fix**: Update internal/presigned-url dependency for corrected API name.
* **Dependency Update**: Updated to the latest SDK module versions
# v1.51.1 (2024-02-23)
* **Bug Fix**: Move all common, SDK-side middleware stack ops into the service client module to prevent cross-module compatibility issues in the future.

View file

@ -852,7 +852,7 @@ func (c presignConverter) convertToPresignMiddleware(stack *middleware.Stack, op
if err != nil {
return err
}
err = presignedurlcust.AddAsIsPresigingMiddleware(stack)
err = presignedurlcust.AddAsIsPresigningMiddleware(stack)
if err != nil {
return err
}

View file

@ -3,4 +3,4 @@
package s3
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.51.1"
const goModuleVersion = "1.51.3"

View file

@ -1,3 +1,12 @@
# v1.28.3 (2024-03-05)
* **Dependency Update**: Updated to the latest SDK module versions
# v1.28.2 (2024-03-04)
* **Bug Fix**: Update internal/presigned-url dependency for corrected API name.
* **Dependency Update**: Updated to the latest SDK module versions
# v1.28.1 (2024-02-23)
* **Bug Fix**: Move all common, SDK-side middleware stack ops into the service client module to prevent cross-module compatibility issues in the future.

View file

@ -643,7 +643,7 @@ func (c presignConverter) convertToPresignMiddleware(stack *middleware.Stack, op
if err != nil {
return err
}
err = presignedurlcust.AddAsIsPresigingMiddleware(stack)
err = presignedurlcust.AddAsIsPresigningMiddleware(stack)
if err != nil {
return err
}

View file

@ -3,4 +3,4 @@
package sts
// goModuleVersion is the tagged release for this module
const goModuleVersion = "1.28.1"
const goModuleVersion = "1.28.3"

View file

@ -5,4 +5,4 @@ package aws
const SDKName = "aws-sdk-go"
// SDKVersion is the version of this SDK
const SDKVersion = "1.50.29"
const SDKVersion = "1.50.32"

View file

@ -62,7 +62,7 @@ func (m *SigningMethodECDSA) Verify(signingString string, sig []byte, key interf
case *ecdsa.PublicKey:
ecdsaKey = k
default:
return newError("ECDSA verify expects *ecsda.PublicKey", ErrInvalidKeyType)
return newError("ECDSA verify expects *ecdsa.PublicKey", ErrInvalidKeyType)
}
if len(sig) != 2*m.KeySize {
@ -96,7 +96,7 @@ func (m *SigningMethodECDSA) Sign(signingString string, key interface{}) ([]byte
case *ecdsa.PrivateKey:
ecdsaKey = k
default:
return nil, newError("ECDSA sign expects *ecsda.PrivateKey", ErrInvalidKeyType)
return nil, newError("ECDSA sign expects *ecdsa.PrivateKey", ErrInvalidKeyType)
}
// Create the hasher

View file

@ -91,7 +91,7 @@ func (m *SigningMethodHMAC) Verify(signingString string, sig []byte, key interfa
func (m *SigningMethodHMAC) Sign(signingString string, key interface{}) ([]byte, error) {
if keyBytes, ok := key.([]byte); ok {
if !m.Hash.Available() {
return nil, newError("HMAC sign expects []byte", ErrInvalidKeyType)
return nil, ErrHashUnavailable
}
hasher := hmac.New(m.Hash.New, keyBytes)
@ -100,5 +100,5 @@ func (m *SigningMethodHMAC) Sign(signingString string, key interface{}) ([]byte,
return hasher.Sum(nil), nil
}
return nil, ErrInvalidKeyType
return nil, newError("HMAC sign expects []byte", ErrInvalidKeyType)
}

View file

@ -28,6 +28,8 @@ var (
uint32Type = reflect.TypeOf(uint32(1))
uint64Type = reflect.TypeOf(uint64(1))
uintptrType = reflect.TypeOf(uintptr(1))
float32Type = reflect.TypeOf(float32(1))
float64Type = reflect.TypeOf(float64(1))
@ -308,11 +310,11 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
case reflect.Struct:
{
// All structs enter here. We're not interested in most types.
if !canConvert(obj1Value, timeType) {
if !obj1Value.CanConvert(timeType) {
break
}
// time.Time can compared!
// time.Time can be compared!
timeObj1, ok := obj1.(time.Time)
if !ok {
timeObj1 = obj1Value.Convert(timeType).Interface().(time.Time)
@ -328,7 +330,7 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
case reflect.Slice:
{
// We only care about the []byte type.
if !canConvert(obj1Value, bytesType) {
if !obj1Value.CanConvert(bytesType) {
break
}
@ -345,6 +347,26 @@ func compare(obj1, obj2 interface{}, kind reflect.Kind) (CompareType, bool) {
return CompareType(bytes.Compare(bytesObj1, bytesObj2)), true
}
case reflect.Uintptr:
{
uintptrObj1, ok := obj1.(uintptr)
if !ok {
uintptrObj1 = obj1Value.Convert(uintptrType).Interface().(uintptr)
}
uintptrObj2, ok := obj2.(uintptr)
if !ok {
uintptrObj2 = obj2Value.Convert(uintptrType).Interface().(uintptr)
}
if uintptrObj1 > uintptrObj2 {
return compareGreater, true
}
if uintptrObj1 == uintptrObj2 {
return compareEqual, true
}
if uintptrObj1 < uintptrObj2 {
return compareLess, true
}
}
}
return compareEqual, false

View file

@ -1,16 +0,0 @@
//go:build go1.17
// +build go1.17
// TODO: once support for Go 1.16 is dropped, this file can be
// merged/removed with assertion_compare_go1.17_test.go and
// assertion_compare_legacy.go
package assert
import "reflect"
// Wrapper around reflect.Value.CanConvert, for compatibility
// reasons.
func canConvert(value reflect.Value, to reflect.Type) bool {
return value.CanConvert(to)
}

View file

@ -1,16 +0,0 @@
//go:build !go1.17
// +build !go1.17
// TODO: once support for Go 1.16 is dropped, this file can be
// merged/removed with assertion_compare_go1.17_test.go and
// assertion_compare_can_convert.go
package assert
import "reflect"
// Older versions of Go does not have the reflect.Value.CanConvert
// method.
func canConvert(value reflect.Value, to reflect.Type) bool {
return false
}

View file

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package assert
@ -107,7 +104,7 @@ func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{},
return EqualExportedValues(t, expected, actual, append([]interface{}{msg}, args...)...)
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted")
@ -616,6 +613,16 @@ func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interf
return NotErrorIs(t, err, target, append([]interface{}{msg}, args...)...)
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// assert.NotImplementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func NotImplementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
return NotImplements(t, interfaceObject, object, append([]interface{}{msg}, args...)...)
}
// NotNilf asserts that the specified object is not nil.
//
// assert.NotNilf(t, err, "error message %s", "formatted")
@ -660,10 +667,12 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string,
return NotSame(t, expected, actual, append([]interface{}{msg}, args...)...)
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "error message %s", "formatted")
// assert.NotSubsetf(t, {"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -747,10 +756,11 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg
return Same(t, expected, actual, append([]interface{}{msg}, args...)...)
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// assert.Subsetf(t, [1, 2, 3], [1, 2], "error message %s", "formatted")
// assert.Subsetf(t, {"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()

View file

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package assert
@ -189,7 +186,7 @@ func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface
return EqualExportedValuesf(a.t, expected, actual, msg, args...)
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValues(uint32(123), int32(123))
@ -200,7 +197,7 @@ func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAn
return EqualValues(a.t, expected, actual, msgAndArgs...)
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted")
@ -1221,6 +1218,26 @@ func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...in
return NotErrorIsf(a.t, err, target, msg, args...)
}
// NotImplements asserts that an object does not implement the specified interface.
//
// a.NotImplements((*MyInterface)(nil), new(MyObject))
func (a *Assertions) NotImplements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return NotImplements(a.t, interfaceObject, object, msgAndArgs...)
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// a.NotImplementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func (a *Assertions) NotImplementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
return NotImplementsf(a.t, interfaceObject, object, msg, args...)
}
// NotNil asserts that the specified object is not nil.
//
// a.NotNil(err)
@ -1309,10 +1326,12 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri
return NotSamef(a.t, expected, actual, msg, args...)
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// a.NotSubset([1, 3, 4], [1, 2])
// a.NotSubset({"x": 1, "y": 2}, {"z": 3})
func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1320,10 +1339,12 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs
return NotSubset(a.t, list, subset, msgAndArgs...)
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// a.NotSubsetf([1, 3, 4], [1, 2], "error message %s", "formatted")
// a.NotSubsetf({"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1483,10 +1504,11 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string,
return Samef(a.t, expected, actual, msg, args...)
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// a.Subset([1, 2, 3], [1, 2])
// a.Subset({"x": 1, "y": 2}, {"x": 1})
func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1494,10 +1516,11 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...
return Subset(a.t, list, subset, msgAndArgs...)
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// a.Subsetf([1, 2, 3], [1, 2], "error message %s", "formatted")
// a.Subsetf({"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) bool {
if h, ok := a.t.(tHelper); ok {
h.Helper()

View file

@ -19,7 +19,7 @@ import (
"github.com/davecgh/go-spew/spew"
"github.com/pmezard/go-difflib/difflib"
yaml "gopkg.in/yaml.v3"
"gopkg.in/yaml.v3"
)
//go:generate sh -c "cd ../_codegen && go build && cd - && ../_codegen/_codegen -output-package=assert -template=assertion_format.go.tmpl"
@ -110,7 +110,12 @@ func copyExportedFields(expected interface{}) interface{} {
return result.Interface()
case reflect.Array, reflect.Slice:
result := reflect.MakeSlice(expectedType, expectedValue.Len(), expectedValue.Len())
var result reflect.Value
if expectedKind == reflect.Array {
result = reflect.New(reflect.ArrayOf(expectedValue.Len(), expectedType.Elem())).Elem()
} else {
result = reflect.MakeSlice(expectedType, expectedValue.Len(), expectedValue.Len())
}
for i := 0; i < expectedValue.Len(); i++ {
index := expectedValue.Index(i)
if isNil(index) {
@ -140,6 +145,8 @@ func copyExportedFields(expected interface{}) interface{} {
// structures.
//
// This function does no assertion of any kind.
//
// Deprecated: Use [EqualExportedValues] instead.
func ObjectsExportedFieldsAreEqual(expected, actual interface{}) bool {
expectedCleaned := copyExportedFields(expected)
actualCleaned := copyExportedFields(actual)
@ -153,17 +160,40 @@ func ObjectsAreEqualValues(expected, actual interface{}) bool {
return true
}
actualType := reflect.TypeOf(actual)
if actualType == nil {
return false
}
expectedValue := reflect.ValueOf(expected)
if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
// Attempt comparison after type conversion
return reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual)
actualValue := reflect.ValueOf(actual)
if !expectedValue.IsValid() || !actualValue.IsValid() {
return false
}
expectedType := expectedValue.Type()
actualType := actualValue.Type()
if !expectedType.ConvertibleTo(actualType) {
return false
}
if !isNumericType(expectedType) || !isNumericType(actualType) {
// Attempt comparison after type conversion
return reflect.DeepEqual(
expectedValue.Convert(actualType).Interface(), actual,
)
}
// If BOTH values are numeric, there are chances of false positives due
// to overflow or underflow. So, we need to make sure to always convert
// the smaller type to a larger type before comparing.
if expectedType.Size() >= actualType.Size() {
return actualValue.Convert(expectedType).Interface() == expected
}
return expectedValue.Convert(actualType).Interface() == actual
}
// isNumericType returns true if the type is one of:
// int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64,
// float32, float64, complex64, complex128
func isNumericType(t reflect.Type) bool {
return t.Kind() >= reflect.Int && t.Kind() <= reflect.Complex128
}
/* CallerInfo is necessary because the assert functions use the testing object
@ -266,7 +296,7 @@ func messageFromMsgAndArgs(msgAndArgs ...interface{}) string {
// Aligns the provided message so that all lines after the first line start at the same location as the first line.
// Assumes that the first line starts at the correct location (after carriage return, tab, label, spacer and tab).
// The longestLabelLen parameter specifies the length of the longest label in the output (required becaues this is the
// The longestLabelLen parameter specifies the length of the longest label in the output (required because this is the
// basis on which the alignment occurs).
func indentMessageLines(message string, longestLabelLen int) string {
outBuf := new(bytes.Buffer)
@ -382,6 +412,25 @@ func Implements(t TestingT, interfaceObject interface{}, object interface{}, msg
return true
}
// NotImplements asserts that an object does not implement the specified interface.
//
// assert.NotImplements(t, (*MyInterface)(nil), new(MyObject))
func NotImplements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
h.Helper()
}
interfaceType := reflect.TypeOf(interfaceObject).Elem()
if object == nil {
return Fail(t, fmt.Sprintf("Cannot check if nil does not implement %v", interfaceType), msgAndArgs...)
}
if reflect.TypeOf(object).Implements(interfaceType) {
return Fail(t, fmt.Sprintf("%T implements %v", object, interfaceType), msgAndArgs...)
}
return true
}
// IsType asserts that the specified objects are of the same type.
func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool {
if h, ok := t.(tHelper); ok {
@ -496,7 +545,7 @@ func samePointers(first, second interface{}) bool {
// representations appropriate to be presented to the user.
//
// If the values are not of like type, the returned strings will be prefixed
// with the type name, and the value will be enclosed in parenthesis similar
// with the type name, and the value will be enclosed in parentheses similar
// to a type conversion in the Go grammar.
func formatUnequalValues(expected, actual interface{}) (e string, a string) {
if reflect.TypeOf(expected) != reflect.TypeOf(actual) {
@ -523,7 +572,7 @@ func truncatingFormat(data interface{}) string {
return value
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValues(t, uint32(123), int32(123))
@ -566,12 +615,19 @@ func EqualExportedValues(t TestingT, expected, actual interface{}, msgAndArgs ..
return Fail(t, fmt.Sprintf("Types expected to match exactly\n\t%v != %v", aType, bType), msgAndArgs...)
}
if aType.Kind() == reflect.Ptr {
aType = aType.Elem()
}
if bType.Kind() == reflect.Ptr {
bType = bType.Elem()
}
if aType.Kind() != reflect.Struct {
return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", aType.Kind(), reflect.Struct), msgAndArgs...)
return Fail(t, fmt.Sprintf("Types expected to both be struct or pointer to struct \n\t%v != %v", aType.Kind(), reflect.Struct), msgAndArgs...)
}
if bType.Kind() != reflect.Struct {
return Fail(t, fmt.Sprintf("Types expected to both be struct \n\t%v != %v", bType.Kind(), reflect.Struct), msgAndArgs...)
return Fail(t, fmt.Sprintf("Types expected to both be struct or pointer to struct \n\t%v != %v", bType.Kind(), reflect.Struct), msgAndArgs...)
}
expected = copyExportedFields(expected)
@ -620,17 +676,6 @@ func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
return Fail(t, "Expected value not to be nil.", msgAndArgs...)
}
// containsKind checks if a specified kind in the slice of kinds.
func containsKind(kinds []reflect.Kind, kind reflect.Kind) bool {
for i := 0; i < len(kinds); i++ {
if kind == kinds[i] {
return true
}
}
return false
}
// isNil checks if a specified object is nil or not, without Failing.
func isNil(object interface{}) bool {
if object == nil {
@ -638,16 +683,13 @@ func isNil(object interface{}) bool {
}
value := reflect.ValueOf(object)
kind := value.Kind()
isNilableKind := containsKind(
[]reflect.Kind{
switch value.Kind() {
case
reflect.Chan, reflect.Func,
reflect.Interface, reflect.Map,
reflect.Ptr, reflect.Slice, reflect.UnsafePointer},
kind)
reflect.Ptr, reflect.Slice, reflect.UnsafePointer:
if isNilableKind && value.IsNil() {
return true
return value.IsNil()
}
return false
@ -731,16 +773,14 @@ func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
}
// getLen try to get length of object.
// return (false, 0) if impossible.
func getLen(x interface{}) (ok bool, length int) {
// getLen tries to get the length of an object.
// It returns (0, false) if impossible.
func getLen(x interface{}) (length int, ok bool) {
v := reflect.ValueOf(x)
defer func() {
if e := recover(); e != nil {
ok = false
}
ok = recover() == nil
}()
return true, v.Len()
return v.Len(), true
}
// Len asserts that the specified object has specific length.
@ -751,13 +791,13 @@ func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{})
if h, ok := t.(tHelper); ok {
h.Helper()
}
ok, l := getLen(object)
l, ok := getLen(object)
if !ok {
return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", object), msgAndArgs...)
return Fail(t, fmt.Sprintf("\"%v\" could not be applied builtin len()", object), msgAndArgs...)
}
if l != length {
return Fail(t, fmt.Sprintf("\"%s\" should have %d item(s), but has %d", object, length, l), msgAndArgs...)
return Fail(t, fmt.Sprintf("\"%v\" should have %d item(s), but has %d", object, length, l), msgAndArgs...)
}
return true
}
@ -919,10 +959,11 @@ func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{})
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// assert.Subset(t, [1, 2, 3], [1, 2])
// assert.Subset(t, {"x": 1, "y": 2}, {"x": 1})
func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -975,10 +1016,12 @@ func Subset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok
return true
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// assert.NotSubset(t, [1, 3, 4], [1, 2])
// assert.NotSubset(t, {"x": 1, "y": 2}, {"z": 3})
func NotSubset(t TestingT, list, subset interface{}, msgAndArgs ...interface{}) (ok bool) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1439,7 +1482,7 @@ func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAnd
h.Helper()
}
if math.IsNaN(epsilon) {
return Fail(t, "epsilon must not be NaN")
return Fail(t, "epsilon must not be NaN", msgAndArgs...)
}
actualEpsilon, err := calcRelativeError(expected, actual)
if err != nil {
@ -1458,19 +1501,26 @@ func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, m
if h, ok := t.(tHelper); ok {
h.Helper()
}
if expected == nil || actual == nil ||
reflect.TypeOf(actual).Kind() != reflect.Slice ||
reflect.TypeOf(expected).Kind() != reflect.Slice {
if expected == nil || actual == nil {
return Fail(t, "Parameters must be slice", msgAndArgs...)
}
actualSlice := reflect.ValueOf(actual)
expectedSlice := reflect.ValueOf(expected)
actualSlice := reflect.ValueOf(actual)
for i := 0; i < actualSlice.Len(); i++ {
result := InEpsilon(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), epsilon)
if !result {
return result
if expectedSlice.Type().Kind() != reflect.Slice {
return Fail(t, "Expected value must be slice", msgAndArgs...)
}
expectedLen := expectedSlice.Len()
if !IsType(t, expected, actual) || !Len(t, actual, expectedLen) {
return false
}
for i := 0; i < expectedLen; i++ {
if !InEpsilon(t, expectedSlice.Index(i).Interface(), actualSlice.Index(i).Interface(), epsilon, "at index %d", i) {
return false
}
}
@ -1870,23 +1920,18 @@ func (c *CollectT) Errorf(format string, args ...interface{}) {
}
// FailNow panics.
func (c *CollectT) FailNow() {
func (*CollectT) FailNow() {
panic("Assertion failed")
}
// Reset clears the collected errors.
func (c *CollectT) Reset() {
c.errors = nil
// Deprecated: That was a method for internal usage that should not have been published. Now just panics.
func (*CollectT) Reset() {
panic("Reset() is deprecated")
}
// Copy copies the collected errors to the supplied t.
func (c *CollectT) Copy(t TestingT) {
if tt, ok := t.(tHelper); ok {
tt.Helper()
}
for _, err := range c.errors {
t.Errorf("%v", err)
}
// Deprecated: That was a method for internal usage that should not have been published. Now just panics.
func (*CollectT) Copy(TestingT) {
panic("Copy() is deprecated")
}
// EventuallyWithT asserts that given condition will be met in waitFor time,
@ -1912,8 +1957,8 @@ func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time
h.Helper()
}
collect := new(CollectT)
ch := make(chan bool, 1)
var lastFinishedTickErrs []error
ch := make(chan []error, 1)
timer := time.NewTimer(waitFor)
defer timer.Stop()
@ -1924,19 +1969,25 @@ func EventuallyWithT(t TestingT, condition func(collect *CollectT), waitFor time
for tick := ticker.C; ; {
select {
case <-timer.C:
collect.Copy(t)
for _, err := range lastFinishedTickErrs {
t.Errorf("%v", err)
}
return Fail(t, "Condition never satisfied", msgAndArgs...)
case <-tick:
tick = nil
collect.Reset()
go func() {
condition(collect)
ch <- len(collect.errors) == 0
collect := new(CollectT)
defer func() {
ch <- collect.errors
}()
case v := <-ch:
if v {
condition(collect)
}()
case errs := <-ch:
if len(errs) == 0 {
return true
}
// Keep the errors from the last ended condition, so that they can be copied to t if timeout is reached.
lastFinishedTickErrs = errs
tick = ticker.C
}
}

View file

@ -12,7 +12,7 @@ import (
// an error if building a new request fails.
func httpCode(handler http.HandlerFunc, method, url string, values url.Values) (int, error) {
w := httptest.NewRecorder()
req, err := http.NewRequest(method, url, nil)
req, err := http.NewRequest(method, url, http.NoBody)
if err != nil {
return -1, err
}
@ -32,12 +32,12 @@ func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, value
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
isSuccessCode := code >= http.StatusOK && code <= http.StatusPartialContent
if !isSuccessCode {
Fail(t, fmt.Sprintf("Expected HTTP success status code for %q but received %d", url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP success status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...)
}
return isSuccessCode
@ -54,12 +54,12 @@ func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, valu
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
isRedirectCode := code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect
if !isRedirectCode {
Fail(t, fmt.Sprintf("Expected HTTP redirect status code for %q but received %d", url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP redirect status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...)
}
return isRedirectCode
@ -76,12 +76,12 @@ func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
isErrorCode := code >= http.StatusBadRequest
if !isErrorCode {
Fail(t, fmt.Sprintf("Expected HTTP error status code for %q but received %d", url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP error status code for %q but received %d", url+"?"+values.Encode(), code), msgAndArgs...)
}
return isErrorCode
@ -98,12 +98,12 @@ func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method, url string, va
}
code, err := httpCode(handler, method, url, values)
if err != nil {
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err))
Fail(t, fmt.Sprintf("Failed to build test request, got error: %s", err), msgAndArgs...)
}
successful := code == statuscode
if !successful {
Fail(t, fmt.Sprintf("Expected HTTP status code %d for %q but received %d", statuscode, url+"?"+values.Encode(), code))
Fail(t, fmt.Sprintf("Expected HTTP status code %d for %q but received %d", statuscode, url+"?"+values.Encode(), code), msgAndArgs...)
}
return successful
@ -113,7 +113,10 @@ func HTTPStatusCode(t TestingT, handler http.HandlerFunc, method, url string, va
// empty string if building a new request fails.
func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) string {
w := httptest.NewRecorder()
req, err := http.NewRequest(method, url+"?"+values.Encode(), nil)
if len(values) > 0 {
url += "?" + values.Encode()
}
req, err := http.NewRequest(method, url, http.NoBody)
if err != nil {
return ""
}
@ -135,7 +138,7 @@ func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string,
contains := strings.Contains(body, fmt.Sprint(str))
if !contains {
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body))
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...)
}
return contains
@ -155,7 +158,7 @@ func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url strin
contains := strings.Contains(body, fmt.Sprint(str))
if contains {
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body))
Fail(t, fmt.Sprintf("Expected response body for \"%s\" to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body), msgAndArgs...)
}
return !contains

View file

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package require
@ -235,7 +232,7 @@ func EqualExportedValuesf(t TestingT, expected interface{}, actual interface{},
t.FailNow()
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValues(t, uint32(123), int32(123))
@ -249,7 +246,7 @@ func EqualValues(t TestingT, expected interface{}, actual interface{}, msgAndArg
t.FailNow()
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// assert.EqualValuesf(t, uint32(123), int32(123), "error message %s", "formatted")
@ -1546,6 +1543,32 @@ func NotErrorIsf(t TestingT, err error, target error, msg string, args ...interf
t.FailNow()
}
// NotImplements asserts that an object does not implement the specified interface.
//
// assert.NotImplements(t, (*MyInterface)(nil), new(MyObject))
func NotImplements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
}
if assert.NotImplements(t, interfaceObject, object, msgAndArgs...) {
return
}
t.FailNow()
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// assert.NotImplementsf(t, (*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func NotImplementsf(t TestingT, interfaceObject interface{}, object interface{}, msg string, args ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
}
if assert.NotImplementsf(t, interfaceObject, object, msg, args...) {
return
}
t.FailNow()
}
// NotNil asserts that the specified object is not nil.
//
// assert.NotNil(t, err)
@ -1658,10 +1681,12 @@ func NotSamef(t TestingT, expected interface{}, actual interface{}, msg string,
t.FailNow()
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubset(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// assert.NotSubset(t, [1, 3, 4], [1, 2])
// assert.NotSubset(t, {"x": 1, "y": 2}, {"z": 3})
func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1672,10 +1697,12 @@ func NotSubset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...i
t.FailNow()
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// assert.NotSubsetf(t, [1, 3, 4], [1, 2], "error message %s", "formatted")
// assert.NotSubsetf(t, {"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func NotSubsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1880,10 +1907,11 @@ func Samef(t TestingT, expected interface{}, actual interface{}, msg string, arg
t.FailNow()
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subset(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// assert.Subset(t, [1, 2, 3], [1, 2])
// assert.Subset(t, {"x": 1, "y": 2}, {"x": 1})
func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()
@ -1894,10 +1922,11 @@ func Subset(t TestingT, list interface{}, subset interface{}, msgAndArgs ...inte
t.FailNow()
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// assert.Subsetf(t, [1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// assert.Subsetf(t, [1, 2, 3], [1, 2], "error message %s", "formatted")
// assert.Subsetf(t, {"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func Subsetf(t TestingT, list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := t.(tHelper); ok {
h.Helper()

View file

@ -1,7 +1,4 @@
/*
* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
* THIS FILE MUST NOT BE EDITED BY HAND
*/
// Code generated with github.com/stretchr/testify/_codegen; DO NOT EDIT.
package require
@ -190,7 +187,7 @@ func (a *Assertions) EqualExportedValuesf(expected interface{}, actual interface
EqualExportedValuesf(a.t, expected, actual, msg, args...)
}
// EqualValues asserts that two objects are equal or convertable to the same types
// EqualValues asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValues(uint32(123), int32(123))
@ -201,7 +198,7 @@ func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAn
EqualValues(a.t, expected, actual, msgAndArgs...)
}
// EqualValuesf asserts that two objects are equal or convertable to the same types
// EqualValuesf asserts that two objects are equal or convertible to the same types
// and equal.
//
// a.EqualValuesf(uint32(123), int32(123), "error message %s", "formatted")
@ -1222,6 +1219,26 @@ func (a *Assertions) NotErrorIsf(err error, target error, msg string, args ...in
NotErrorIsf(a.t, err, target, msg, args...)
}
// NotImplements asserts that an object does not implement the specified interface.
//
// a.NotImplements((*MyInterface)(nil), new(MyObject))
func (a *Assertions) NotImplements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
NotImplements(a.t, interfaceObject, object, msgAndArgs...)
}
// NotImplementsf asserts that an object does not implement the specified interface.
//
// a.NotImplementsf((*MyInterface)(nil), new(MyObject), "error message %s", "formatted")
func (a *Assertions) NotImplementsf(interfaceObject interface{}, object interface{}, msg string, args ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
}
NotImplementsf(a.t, interfaceObject, object, msg, args...)
}
// NotNil asserts that the specified object is not nil.
//
// a.NotNil(err)
@ -1310,10 +1327,12 @@ func (a *Assertions) NotSamef(expected interface{}, actual interface{}, msg stri
NotSamef(a.t, expected, actual, msg, args...)
}
// NotSubset asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubset asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubset([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]")
// a.NotSubset([1, 3, 4], [1, 2])
// a.NotSubset({"x": 1, "y": 2}, {"z": 3})
func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1321,10 +1340,12 @@ func (a *Assertions) NotSubset(list interface{}, subset interface{}, msgAndArgs
NotSubset(a.t, list, subset, msgAndArgs...)
}
// NotSubsetf asserts that the specified list(array, slice...) contains not all
// elements given in the specified subset(array, slice...).
// NotSubsetf asserts that the specified list(array, slice...) or map does NOT
// contain all elements given in the specified subset list(array, slice...) or
// map.
//
// a.NotSubsetf([1, 3, 4], [1, 2], "But [1, 3, 4] does not contain [1, 2]", "error message %s", "formatted")
// a.NotSubsetf([1, 3, 4], [1, 2], "error message %s", "formatted")
// a.NotSubsetf({"x": 1, "y": 2}, {"z": 3}, "error message %s", "formatted")
func (a *Assertions) NotSubsetf(list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1484,10 +1505,11 @@ func (a *Assertions) Samef(expected interface{}, actual interface{}, msg string,
Samef(a.t, expected, actual, msg, args...)
}
// Subset asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subset asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subset([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]")
// a.Subset([1, 2, 3], [1, 2])
// a.Subset({"x": 1, "y": 2}, {"x": 1})
func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()
@ -1495,10 +1517,11 @@ func (a *Assertions) Subset(list interface{}, subset interface{}, msgAndArgs ...
Subset(a.t, list, subset, msgAndArgs...)
}
// Subsetf asserts that the specified list(array, slice...) contains all
// elements given in the specified subset(array, slice...).
// Subsetf asserts that the specified list(array, slice...) or map contains all
// elements given in the specified subset list(array, slice...) or map.
//
// a.Subsetf([1, 2, 3], [1, 2], "But [1, 2, 3] does contain [1, 2]", "error message %s", "formatted")
// a.Subsetf([1, 2, 3], [1, 2], "error message %s", "formatted")
// a.Subsetf({"x": 1, "y": 2}, {"x": 1}, "error message %s", "formatted")
func (a *Assertions) Subsetf(list interface{}, subset interface{}, msg string, args ...interface{}) {
if h, ok := a.t.(tHelper); ok {
h.Helper()

View file

@ -125,9 +125,11 @@ func (SeverityNumber) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_d1c030a3ec7e961e, []int{0}
}
// LogRecordFlags is defined as a protobuf 'uint32' type and is to be used as
// bit-fields. Each non-zero value defined in this enum is a bit-mask.
// To extract the bit-field, for example, use an expression like:
// LogRecordFlags represents constants used to interpret the
// LogRecord.flags field, which is protobuf 'fixed32' type and is to
// be used as bit-fields. Each non-zero value defined in this enum is
// a bit-mask. To extract the bit-field, for example, use an
// expression like:
//
// (logRecord.flags & LOG_RECORD_FLAGS_TRACE_FLAGS_MASK)
type LogRecordFlags int32
@ -225,6 +227,9 @@ type ResourceLogs struct {
Resource v1.Resource `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource"`
// A list of ScopeLogs that originate from a resource.
ScopeLogs []*ScopeLogs `protobuf:"bytes,2,rep,name=scope_logs,json=scopeLogs,proto3" json:"scope_logs,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the resource data
// is recorded in. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to the data in the "resource" field. It does not apply
// to the data in the "scope_logs" field which have their own schema_url field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`
@ -299,6 +304,9 @@ type ScopeLogs struct {
Scope v11.InstrumentationScope `protobuf:"bytes,1,opt,name=scope,proto3" json:"scope"`
// A list of log records.
LogRecords []*LogRecord `protobuf:"bytes,2,rep,name=log_records,json=logRecords,proto3" json:"log_records,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the log data
// is recorded in. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to all logs in the "logs" field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`
}

View file

@ -223,6 +223,9 @@ type ResourceMetrics struct {
Resource v1.Resource `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource"`
// A list of metrics that originate from a resource.
ScopeMetrics []*ScopeMetrics `protobuf:"bytes,2,rep,name=scope_metrics,json=scopeMetrics,proto3" json:"scope_metrics,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the resource data
// is recorded in. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to the data in the "resource" field. It does not apply
// to the data in the "scope_metrics" field which have their own schema_url field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`
@ -297,6 +300,9 @@ type ScopeMetrics struct {
Scope v11.InstrumentationScope `protobuf:"bytes,1,opt,name=scope,proto3" json:"scope"`
// A list of metrics that originate from an instrumentation library.
Metrics []*Metric `protobuf:"bytes,2,rep,name=metrics,proto3" json:"metrics,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the metric data
// is recorded in. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to all metrics in the "metrics" field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`
}
@ -443,7 +449,7 @@ func (m *ScopeMetrics) GetSchemaUrl() string {
// when the start time is truly unknown, setting StartTimeUnixNano is
// strongly encouraged.
type Metric struct {
// name of the metric, including its DNS name prefix. It must be unique.
// name of the metric.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// description of the metric, which can be used in documentation.
Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"`

View file

@ -29,6 +29,48 @@ var _ = math.Inf
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
// SpanFlags represents constants used to interpret the
// Span.flags field, which is protobuf 'fixed32' type and is to
// be used as bit-fields. Each non-zero value defined in this enum is
// a bit-mask. To extract the bit-field, for example, use an
// expression like:
//
// (span.flags & SPAN_FLAGS_TRACE_FLAGS_MASK)
//
// See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.
//
// Note that Span flags were introduced in version 1.1 of the
// OpenTelemetry protocol. Older Span producers do not set this
// field, consequently consumers should not rely on the absence of a
// particular flag bit to indicate the presence of a particular feature.
type SpanFlags int32
const (
// The zero value for the enum. Should not be used for comparisons.
// Instead use bitwise "and" with the appropriate mask as shown above.
SpanFlags_SPAN_FLAGS_DO_NOT_USE SpanFlags = 0
// Bits 0-7 are used for trace flags.
SpanFlags_SPAN_FLAGS_TRACE_FLAGS_MASK SpanFlags = 255
)
var SpanFlags_name = map[int32]string{
0: "SPAN_FLAGS_DO_NOT_USE",
255: "SPAN_FLAGS_TRACE_FLAGS_MASK",
}
var SpanFlags_value = map[string]int32{
"SPAN_FLAGS_DO_NOT_USE": 0,
"SPAN_FLAGS_TRACE_FLAGS_MASK": 255,
}
func (x SpanFlags) String() string {
return proto.EnumName(SpanFlags_name, int32(x))
}
func (SpanFlags) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_5c407ac9c675a601, []int{0}
}
// SpanKind is the type of span. Can be used to specify additional relationships between spans
// in addition to a parent/child relationship.
type Span_SpanKind int32
@ -183,6 +225,9 @@ type ResourceSpans struct {
Resource v1.Resource `protobuf:"bytes,1,opt,name=resource,proto3" json:"resource"`
// A list of ScopeSpans that originate from a resource.
ScopeSpans []*ScopeSpans `protobuf:"bytes,2,rep,name=scope_spans,json=scopeSpans,proto3" json:"scope_spans,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the resource data
// is recorded in. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to the data in the "resource" field. It does not apply
// to the data in the "scope_spans" field which have their own schema_url field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`
@ -257,6 +302,9 @@ type ScopeSpans struct {
Scope v11.InstrumentationScope `protobuf:"bytes,1,opt,name=scope,proto3" json:"scope"`
// A list of Spans that originate from an instrumentation scope.
Spans []*Span `protobuf:"bytes,2,rep,name=spans,proto3" json:"spans,omitempty"`
// The Schema URL, if known. This is the identifier of the Schema that the span data
// is recorded in. To learn more about Schema URL see
// https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
// This schema_url applies to all spans and span events in the "spans" field.
SchemaUrl string `protobuf:"bytes,3,opt,name=schema_url,json=schemaUrl,proto3" json:"schema_url,omitempty"`
}
@ -340,6 +388,21 @@ type Span struct {
// The `span_id` of this span's parent span. If this is a root span, then this
// field must be empty. The ID is an 8-byte array.
ParentSpanId go_opentelemetry_io_collector_pdata_internal_data.SpanID `protobuf:"bytes,4,opt,name=parent_span_id,json=parentSpanId,proto3,customtype=go.opentelemetry.io/collector/pdata/internal/data.SpanID" json:"parent_span_id"`
// Flags, a bit field. 8 least significant bits are the trace
// flags as defined in W3C Trace Context specification. Readers
// MUST not assume that 24 most significant bits will be zero.
// To read the 8-bit W3C trace flag, use `flags & SPAN_FLAGS_TRACE_FLAGS_MASK`.
//
// When creating span messages, if the message is logically forwarded from another source
// with an equivalent flags fields (i.e., usually another OTLP span message), the field SHOULD
// be copied as-is. If creating from a source that does not have an equivalent flags field
// (such as a runtime representation of an OpenTelemetry span), the high 24 bits MUST
// be set to zero.
//
// [Optional].
//
// See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.
Flags uint32 `protobuf:"fixed32,16,opt,name=flags,proto3" json:"flags,omitempty"`
// A description of the span's operation.
//
// For example, the name can be a qualified method name or a file name
@ -443,6 +506,13 @@ func (m *Span) GetTraceState() string {
return ""
}
func (m *Span) GetFlags() uint32 {
if m != nil {
return m.Flags
}
return 0
}
func (m *Span) GetName() string {
if m != nil {
return m.Name
@ -617,6 +687,15 @@ type Span_Link struct {
// dropped_attributes_count is the number of dropped attributes. If the value is 0,
// then no attributes were dropped.
DroppedAttributesCount uint32 `protobuf:"varint,5,opt,name=dropped_attributes_count,json=droppedAttributesCount,proto3" json:"dropped_attributes_count,omitempty"`
// Flags, a bit field. 8 least significant bits are the trace
// flags as defined in W3C Trace Context specification. Readers
// MUST not assume that 24 most significant bits will be zero.
// When creating new spans, the most-significant 24-bits MUST be
// zero. To read the 8-bit W3C trace flag (use flags &
// SPAN_FLAGS_TRACE_FLAGS_MASK). [Optional].
//
// See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.
Flags uint32 `protobuf:"fixed32,6,opt,name=flags,proto3" json:"flags,omitempty"`
}
func (m *Span_Link) Reset() { *m = Span_Link{} }
@ -673,6 +752,13 @@ func (m *Span_Link) GetDroppedAttributesCount() uint32 {
return 0
}
func (m *Span_Link) GetFlags() uint32 {
if m != nil {
return m.Flags
}
return 0
}
// The Status type defines a logical error model that is suitable for different
// programming environments, including REST APIs and RPC APIs.
type Status struct {
@ -730,6 +816,7 @@ func (m *Status) GetCode() Status_StatusCode {
}
func init() {
proto.RegisterEnum("opentelemetry.proto.trace.v1.SpanFlags", SpanFlags_name, SpanFlags_value)
proto.RegisterEnum("opentelemetry.proto.trace.v1.Span_SpanKind", Span_SpanKind_name, Span_SpanKind_value)
proto.RegisterEnum("opentelemetry.proto.trace.v1.Status_StatusCode", Status_StatusCode_name, Status_StatusCode_value)
proto.RegisterType((*TracesData)(nil), "opentelemetry.proto.trace.v1.TracesData")
@ -746,70 +833,75 @@ func init() {
}
var fileDescriptor_5c407ac9c675a601 = []byte{
// 1007 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x56, 0x4f, 0x6f, 0xe3, 0x44,
0x14, 0xcf, 0xa4, 0x4e, 0xda, 0xbe, 0xb6, 0x59, 0x77, 0xe8, 0x56, 0xa6, 0x5a, 0xd2, 0x28, 0x5a,
0x89, 0xc0, 0x4a, 0x09, 0xed, 0x5e, 0xca, 0x01, 0xb1, 0x6d, 0x62, 0x24, 0xd3, 0x6e, 0x52, 0x8d,
0x93, 0x4a, 0x20, 0x24, 0x33, 0x1b, 0x0f, 0xc5, 0x6a, 0x32, 0xb6, 0xec, 0x49, 0xb5, 0x7b, 0xe3,
0x23, 0x70, 0xe5, 0x13, 0x20, 0xc1, 0x9d, 0x1b, 0xf7, 0x15, 0xa7, 0x3d, 0x22, 0x0e, 0x2b, 0xd4,
0x5e, 0xe0, 0x5b, 0xa0, 0x99, 0xb1, 0xf3, 0xa7, 0xaa, 0x92, 0xad, 0xc4, 0x5e, 0xf6, 0xd2, 0x4e,
0xde, 0x7b, 0xbf, 0x3f, 0xef, 0xcd, 0xb3, 0x65, 0xa8, 0x85, 0x11, 0xe3, 0x82, 0x0d, 0xd8, 0x90,
0x89, 0xf8, 0x45, 0x23, 0x8a, 0x43, 0x11, 0x36, 0x44, 0x4c, 0xfb, 0xac, 0x71, 0xb9, 0xa7, 0x0f,
0x75, 0x15, 0xc4, 0x0f, 0x66, 0x2a, 0x75, 0xb0, 0xae, 0x0b, 0x2e, 0xf7, 0x76, 0xb6, 0xce, 0xc3,
0xf3, 0x50, 0xa3, 0xe5, 0x49, 0xa7, 0x77, 0x3e, 0xbe, 0x8d, 0xbd, 0x1f, 0x0e, 0x87, 0x21, 0x97,
0xf4, 0xfa, 0x94, 0xd6, 0xd6, 0x6f, 0xab, 0x8d, 0x59, 0x12, 0x8e, 0x62, 0x6d, 0x26, 0x3b, 0xeb,
0xfa, 0xea, 0xb7, 0x00, 0x5d, 0xa9, 0x9e, 0xb4, 0xa8, 0xa0, 0x98, 0x40, 0x29, 0xcb, 0x7b, 0x49,
0x44, 0x79, 0x62, 0xa1, 0xca, 0x52, 0x6d, 0x6d, 0xff, 0x51, 0x7d, 0x9e, 0xed, 0x3a, 0x49, 0x31,
0xae, 0x84, 0x90, 0x8d, 0x78, 0xfa, 0x67, 0xf5, 0xe7, 0x3c, 0x6c, 0xcc, 0x14, 0x60, 0x0f, 0xb6,
0x7d, 0x16, 0xc5, 0xac, 0x4f, 0x05, 0xf3, 0xbd, 0xa4, 0x1f, 0x46, 0x99, 0xda, 0x3f, 0xcb, 0x4a,
0xae, 0x36, 0x5f, 0xce, 0x95, 0x08, 0xad, 0xb5, 0x35, 0x21, 0x9a, 0x44, 0xf1, 0x31, 0xac, 0x64,
0x1e, 0x2c, 0x54, 0x41, 0xb5, 0xb5, 0xfd, 0x8f, 0x6e, 0x65, 0x1c, 0xcf, 0x62, 0xaa, 0x87, 0x23,
0xe3, 0xe5, 0xeb, 0xdd, 0x1c, 0x19, 0x13, 0x60, 0x07, 0xd6, 0xa6, 0x2d, 0xe6, 0xef, 0xe8, 0x10,
0x92, 0x89, 0xaf, 0x0f, 0x00, 0x92, 0xfe, 0xf7, 0x6c, 0x48, 0xbd, 0x51, 0x3c, 0xb0, 0x96, 0x2a,
0xa8, 0xb6, 0x4a, 0x56, 0x75, 0xa4, 0x17, 0x0f, 0xaa, 0xbf, 0x21, 0x80, 0xa9, 0x2e, 0x3a, 0x50,
0x50, 0xd8, 0xb4, 0x85, 0xc7, 0xb7, 0x4a, 0xa6, 0x97, 0x7f, 0xb9, 0x57, 0x77, 0x78, 0x22, 0xe2,
0xd1, 0x90, 0x71, 0x41, 0x45, 0x10, 0x72, 0x45, 0x94, 0x36, 0xa3, 0x79, 0xf0, 0x01, 0x14, 0xa6,
0x7b, 0xa8, 0x2e, 0xe8, 0x21, 0xa2, 0x9c, 0x68, 0xc0, 0x22, 0xe3, 0x3f, 0x6c, 0x80, 0x21, 0xcb,
0xf1, 0x37, 0xb0, 0xa2, 0xf0, 0x5e, 0xe0, 0x2b, 0xd7, 0xeb, 0x47, 0x87, 0xd2, 0xc0, 0x5f, 0xaf,
0x77, 0x3f, 0x3d, 0x0f, 0x6f, 0xc8, 0x05, 0x72, 0x87, 0x07, 0x03, 0xd6, 0x17, 0x61, 0xdc, 0x88,
0x7c, 0x2a, 0x68, 0x23, 0xe0, 0x82, 0xc5, 0x9c, 0x0e, 0x1a, 0xf2, 0x57, 0x5d, 0xed, 0xa5, 0xd3,
0x22, 0xcb, 0x8a, 0xd2, 0xf1, 0xf1, 0x57, 0xb0, 0x2c, 0xed, 0x48, 0xf2, 0xbc, 0x22, 0x7f, 0x92,
0x92, 0x1f, 0xdc, 0x9d, 0x5c, 0xda, 0x75, 0x5a, 0xa4, 0x28, 0x09, 0x1d, 0x1f, 0xef, 0xc2, 0x9a,
0x36, 0x9e, 0x08, 0x2a, 0x58, 0xda, 0x21, 0xa8, 0x90, 0x2b, 0x23, 0xf8, 0x3b, 0x28, 0x45, 0x34,
0x66, 0x5c, 0x78, 0x99, 0x05, 0xe3, 0x7f, 0xb2, 0xb0, 0xae, 0x79, 0x5d, 0x6d, 0x04, 0x83, 0xc1,
0xe9, 0x90, 0x59, 0x05, 0xe5, 0x40, 0x9d, 0xf1, 0xe7, 0x60, 0x5c, 0x04, 0xdc, 0xb7, 0x8a, 0x15,
0x54, 0x2b, 0x2d, 0x7a, 0x16, 0x25, 0x8f, 0xfa, 0x73, 0x1c, 0x70, 0x9f, 0x28, 0x20, 0x6e, 0xc0,
0x56, 0x22, 0x68, 0x2c, 0x3c, 0x11, 0x0c, 0x99, 0x37, 0xe2, 0xc1, 0x73, 0x8f, 0x53, 0x1e, 0x5a,
0xcb, 0x15, 0x54, 0x2b, 0x92, 0x4d, 0x95, 0xeb, 0x06, 0x43, 0xd6, 0xe3, 0xc1, 0xf3, 0x36, 0xe5,
0x21, 0x7e, 0x04, 0x98, 0x71, 0xff, 0x66, 0xf9, 0x8a, 0x2a, 0xbf, 0xc7, 0xb8, 0x3f, 0x53, 0xfc,
0x14, 0x80, 0x0a, 0x11, 0x07, 0xcf, 0x46, 0x82, 0x25, 0xd6, 0xaa, 0xda, 0xad, 0x0f, 0x17, 0x2c,
0xeb, 0x31, 0x7b, 0x71, 0x46, 0x07, 0xa3, 0x6c, 0x41, 0xa7, 0x08, 0xf0, 0x01, 0x58, 0x7e, 0x1c,
0x46, 0x11, 0xf3, 0xbd, 0x49, 0xd4, 0xeb, 0x87, 0x23, 0x2e, 0x2c, 0xa8, 0xa0, 0xda, 0x06, 0xd9,
0x4e, 0xf3, 0x87, 0xe3, 0x74, 0x53, 0x66, 0xf1, 0x13, 0x28, 0xb2, 0x4b, 0xc6, 0x45, 0x62, 0xad,
0xbd, 0xd1, 0x43, 0x2a, 0x27, 0x65, 0x4b, 0x00, 0x49, 0x71, 0xf8, 0x13, 0xd8, 0xca, 0xb4, 0x75,
0x24, 0xd5, 0x5d, 0x57, 0xba, 0x38, 0xcd, 0x29, 0x4c, 0xaa, 0xf9, 0x19, 0x14, 0x06, 0x01, 0xbf,
0x48, 0xac, 0x8d, 0x39, 0x7d, 0xcf, 0x4a, 0x9e, 0x04, 0xfc, 0x82, 0x68, 0x14, 0xae, 0xc3, 0x7b,
0x99, 0xa0, 0x0a, 0xa4, 0x7a, 0x25, 0xa5, 0xb7, 0x99, 0xa6, 0x24, 0x20, 0x95, 0x3b, 0x82, 0xa2,
0xdc, 0xd0, 0x51, 0x62, 0xdd, 0x53, 0x2f, 0x85, 0x87, 0x0b, 0xf4, 0x54, 0x6d, 0x3a, 0xe4, 0x14,
0xb9, 0xf3, 0x07, 0x82, 0x82, 0x6a, 0x01, 0x3f, 0x84, 0xd2, 0x8d, 0x2b, 0x46, 0xea, 0x8a, 0xd7,
0xc5, 0xf4, 0xfd, 0x66, 0x2b, 0x99, 0x9f, 0x5a, 0xc9, 0xd9, 0x3b, 0x5f, 0x7a, 0x9b, 0x77, 0x6e,
0xcc, 0xbb, 0xf3, 0x9d, 0x7f, 0xf3, 0x60, 0xc8, 0xf9, 0xbc, 0xc3, 0xaf, 0x9e, 0xd9, 0x59, 0x1b,
0x6f, 0x73, 0xd6, 0x85, 0x79, 0xb3, 0xae, 0xfe, 0x84, 0x60, 0x25, 0x7b, 0xb3, 0xe0, 0xf7, 0xe1,
0xbe, 0x7b, 0x7a, 0xd8, 0xf6, 0x8e, 0x9d, 0x76, 0xcb, 0xeb, 0xb5, 0xdd, 0x53, 0xbb, 0xe9, 0x7c,
0xe1, 0xd8, 0x2d, 0x33, 0x87, 0xb7, 0x01, 0x4f, 0x52, 0x4e, 0xbb, 0x6b, 0x93, 0xf6, 0xe1, 0x89,
0x89, 0xf0, 0x16, 0x98, 0x93, 0xb8, 0x6b, 0x93, 0x33, 0x9b, 0x98, 0xf9, 0xd9, 0x68, 0xf3, 0xc4,
0xb1, 0xdb, 0x5d, 0x73, 0x69, 0x96, 0xe3, 0x94, 0x74, 0x5a, 0xbd, 0xa6, 0x4d, 0x4c, 0x63, 0x36,
0xde, 0xec, 0xb4, 0xdd, 0xde, 0x53, 0x9b, 0x98, 0x85, 0xea, 0xef, 0x08, 0x8a, 0x7a, 0xdb, 0xb1,
0x05, 0xcb, 0x43, 0x96, 0x24, 0xf4, 0x3c, 0x5b, 0xd9, 0xec, 0x27, 0x6e, 0x82, 0xd1, 0x0f, 0x7d,
0x3d, 0xe3, 0xd2, 0x7e, 0xe3, 0x4d, 0x9e, 0x9d, 0xf4, 0x5f, 0x33, 0xf4, 0x19, 0x51, 0xe0, 0x6a,
0x1b, 0x60, 0x12, 0xc3, 0xf7, 0x61, 0xd3, 0xed, 0x1e, 0x76, 0x7b, 0xae, 0xd7, 0xec, 0xb4, 0x6c,
0x39, 0x08, 0xbb, 0x6b, 0xe6, 0x30, 0x86, 0xd2, 0x74, 0xb8, 0x73, 0x6c, 0xa2, 0x9b, 0xa5, 0x36,
0x21, 0x1d, 0x62, 0xe6, 0xbf, 0x34, 0x56, 0x90, 0x99, 0x3f, 0xfa, 0x15, 0xbd, 0xbc, 0x2a, 0xa3,
0x57, 0x57, 0x65, 0xf4, 0xf7, 0x55, 0x19, 0xfd, 0x78, 0x5d, 0xce, 0xbd, 0xba, 0x2e, 0xe7, 0xfe,
0xbc, 0x2e, 0xe7, 0x60, 0x37, 0x08, 0xe7, 0x3a, 0x3d, 0xd2, 0x5f, 0x70, 0xa7, 0x32, 0x78, 0x8a,
0xbe, 0x6e, 0xde, 0x79, 0x23, 0xf5, 0x57, 0xe2, 0x39, 0xe3, 0xe3, 0x4f, 0xd6, 0x5f, 0xf2, 0x0f,
0x3a, 0x11, 0xe3, 0xdd, 0x31, 0x85, 0x22, 0xd7, 0x8f, 0x45, 0xfd, 0x6c, 0xef, 0x59, 0x51, 0x21,
0x1e, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0xbb, 0xe9, 0x90, 0xbc, 0xf8, 0x0a, 0x00, 0x00,
// 1073 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x56, 0x4f, 0x6f, 0x1b, 0x45,
0x14, 0xf7, 0x38, 0xfe, 0x93, 0xbc, 0x24, 0xee, 0x76, 0x70, 0xa3, 0x25, 0x14, 0xc7, 0xb2, 0x2a,
0x61, 0x5a, 0xc9, 0x26, 0xe9, 0x25, 0x1c, 0x10, 0x75, 0xec, 0x0d, 0x18, 0x27, 0x76, 0x34, 0x6b,
0x47, 0x02, 0x21, 0x2d, 0x5b, 0xef, 0xd4, 0xac, 0x62, 0xcf, 0xae, 0x76, 0xc7, 0x51, 0xfb, 0x2d,
0xb8, 0xf2, 0x09, 0x90, 0x80, 0x33, 0x37, 0xee, 0x15, 0xa7, 0x1e, 0x11, 0x87, 0x0a, 0x25, 0x17,
0xbe, 0x45, 0xd1, 0xcc, 0xec, 0xda, 0xeb, 0x28, 0x72, 0x1a, 0x89, 0x5e, 0xb8, 0x24, 0x33, 0xef,
0xcf, 0xef, 0xf7, 0x7b, 0x6f, 0xde, 0x8c, 0x17, 0xaa, 0x9e, 0x4f, 0x19, 0xa7, 0x63, 0x3a, 0xa1,
0x3c, 0x78, 0x51, 0xf7, 0x03, 0x8f, 0x7b, 0x75, 0x1e, 0xd8, 0x43, 0x5a, 0x3f, 0xdf, 0x55, 0x8b,
0x9a, 0x34, 0xe2, 0xfb, 0x0b, 0x91, 0xca, 0x58, 0x53, 0x01, 0xe7, 0xbb, 0xdb, 0xc5, 0x91, 0x37,
0xf2, 0x54, 0xb6, 0x58, 0x29, 0xf7, 0xf6, 0xc3, 0xeb, 0xd0, 0x87, 0xde, 0x64, 0xe2, 0x31, 0x01,
0xaf, 0x56, 0x51, 0x6c, 0xed, 0xba, 0xd8, 0x80, 0x86, 0xde, 0x34, 0x50, 0x62, 0xe2, 0xb5, 0x8a,
0xaf, 0x7c, 0x07, 0xd0, 0x17, 0xec, 0x61, 0xcb, 0xe6, 0x36, 0x26, 0x50, 0x88, 0xfd, 0x56, 0xe8,
0xdb, 0x2c, 0xd4, 0x51, 0x79, 0xa5, 0xba, 0xbe, 0xf7, 0xa8, 0xb6, 0x4c, 0x76, 0x8d, 0x44, 0x39,
0xa6, 0x48, 0x21, 0x9b, 0x41, 0x72, 0x5b, 0xf9, 0x29, 0x0d, 0x9b, 0x0b, 0x01, 0xd8, 0x82, 0x2d,
0x87, 0xfa, 0x01, 0x1d, 0xda, 0x9c, 0x3a, 0x56, 0x38, 0xf4, 0xfc, 0x98, 0xed, 0x9f, 0xbc, 0xa4,
0xab, 0x2e, 0xa7, 0x33, 0x45, 0x86, 0xe2, 0x2a, 0xce, 0x81, 0xe6, 0x56, 0xdc, 0x81, 0xd5, 0x58,
0x83, 0x8e, 0xca, 0xa8, 0xba, 0xbe, 0xf7, 0xf1, 0xb5, 0x88, 0xb3, 0x5e, 0x24, 0x6a, 0x38, 0xc8,
0xbc, 0x7c, 0xbd, 0x93, 0x22, 0x33, 0x00, 0xdc, 0x86, 0xf5, 0xa4, 0xc4, 0xf4, 0x2d, 0x15, 0x42,
0x38, 0xd7, 0xf5, 0x21, 0x40, 0x38, 0xfc, 0x9e, 0x4e, 0x6c, 0x6b, 0x1a, 0x8c, 0xf5, 0x95, 0x32,
0xaa, 0xae, 0x91, 0x35, 0x65, 0x19, 0x04, 0xe3, 0xca, 0x6f, 0x08, 0x20, 0x51, 0x45, 0x0f, 0xb2,
0x32, 0x37, 0x2a, 0xe1, 0xf1, 0xb5, 0x94, 0xd1, 0xe1, 0x9f, 0xef, 0xd6, 0xda, 0x2c, 0xe4, 0xc1,
0x74, 0x42, 0x19, 0xb7, 0xb9, 0xeb, 0x31, 0x09, 0x14, 0x15, 0xa3, 0x70, 0xf0, 0x3e, 0x64, 0x93,
0x35, 0x54, 0x6e, 0xa8, 0xc1, 0xb7, 0x19, 0x51, 0x09, 0x37, 0x09, 0xff, 0x75, 0x13, 0x32, 0x22,
0x1c, 0x7f, 0x0b, 0xab, 0x32, 0xdf, 0x72, 0x1d, 0xa9, 0x7a, 0xe3, 0xa0, 0x21, 0x04, 0xfc, 0xf5,
0x7a, 0xe7, 0xd3, 0x91, 0x77, 0x85, 0xce, 0x15, 0x33, 0x3c, 0x1e, 0xd3, 0x21, 0xf7, 0x82, 0xba,
0xef, 0xd8, 0xdc, 0xae, 0xbb, 0x8c, 0xd3, 0x80, 0xd9, 0xe3, 0xba, 0xd8, 0xd5, 0xe4, 0x5c, 0xb6,
0x5b, 0x24, 0x2f, 0x21, 0xdb, 0x0e, 0xfe, 0x1a, 0xf2, 0x42, 0x8e, 0x00, 0x4f, 0x4b, 0xf0, 0x27,
0x11, 0xf8, 0xfe, 0xed, 0xc1, 0x85, 0xdc, 0x76, 0x8b, 0xe4, 0x04, 0x60, 0xdb, 0xc1, 0x3b, 0xb0,
0xae, 0x84, 0x87, 0xdc, 0xe6, 0x34, 0xaa, 0x10, 0xa4, 0xc9, 0x14, 0x16, 0xfc, 0x0c, 0x0a, 0xbe,
0x1d, 0x50, 0xc6, 0xad, 0x58, 0x42, 0xe6, 0x3f, 0x92, 0xb0, 0xa1, 0x70, 0x4d, 0x25, 0xa4, 0x08,
0xd9, 0x67, 0x63, 0x7b, 0x14, 0xea, 0x5a, 0x19, 0x55, 0xf3, 0x44, 0x6d, 0x30, 0x86, 0x0c, 0xb3,
0x27, 0x54, 0xcf, 0x4a, 0x5d, 0x72, 0x8d, 0x3f, 0x87, 0xcc, 0x99, 0xcb, 0x1c, 0x3d, 0x57, 0x46,
0xd5, 0xc2, 0x4d, 0x37, 0x54, 0xa0, 0xcb, 0x3f, 0x1d, 0x97, 0x39, 0x44, 0x26, 0xe2, 0x3a, 0x14,
0x43, 0x6e, 0x07, 0xdc, 0xe2, 0xee, 0x84, 0x5a, 0x53, 0xe6, 0x3e, 0xb7, 0x98, 0xcd, 0x3c, 0x3d,
0x5f, 0x46, 0xd5, 0x1c, 0xb9, 0x2b, 0x7d, 0x7d, 0x77, 0x42, 0x07, 0xcc, 0x7d, 0xde, 0xb5, 0x99,
0x87, 0x1f, 0x01, 0xa6, 0xcc, 0xb9, 0x1a, 0xbe, 0x2a, 0xc3, 0xef, 0x50, 0xe6, 0x2c, 0x04, 0x1f,
0x03, 0xd8, 0x9c, 0x07, 0xee, 0xd3, 0x29, 0xa7, 0xa1, 0xbe, 0x26, 0x27, 0xee, 0xa3, 0x1b, 0x46,
0xb8, 0x43, 0x5f, 0x9c, 0xda, 0xe3, 0x69, 0x3c, 0xb6, 0x09, 0x00, 0xbc, 0x0f, 0xba, 0x13, 0x78,
0xbe, 0x4f, 0x1d, 0x6b, 0x6e, 0xb5, 0x86, 0xde, 0x94, 0x71, 0x1d, 0xca, 0xa8, 0xba, 0x49, 0xb6,
0x22, 0x7f, 0x63, 0xe6, 0x6e, 0x0a, 0x2f, 0x7e, 0x02, 0x39, 0x7a, 0x4e, 0x19, 0x0f, 0xf5, 0xf5,
0xb7, 0xba, 0xba, 0xa2, 0x53, 0x86, 0x48, 0x20, 0x51, 0x1e, 0xfe, 0x04, 0x8a, 0x31, 0xb7, 0xb2,
0x44, 0xbc, 0x1b, 0x92, 0x17, 0x47, 0x3e, 0x99, 0x13, 0x71, 0x7e, 0x06, 0xd9, 0xb1, 0xcb, 0xce,
0x42, 0x7d, 0x73, 0x49, 0xdd, 0x8b, 0x94, 0x47, 0x2e, 0x3b, 0x23, 0x2a, 0x0b, 0xd7, 0xe0, 0xbd,
0x98, 0x50, 0x1a, 0x22, 0xbe, 0x82, 0xe4, 0xbb, 0x1b, 0xb9, 0x44, 0x42, 0x44, 0x77, 0x00, 0x39,
0x31, 0xb7, 0xd3, 0x50, 0xbf, 0x23, 0x9f, 0x8a, 0x07, 0x37, 0xf0, 0xc9, 0xd8, 0xa8, 0xc9, 0x51,
0xe6, 0xf6, 0x1f, 0x08, 0xb2, 0xb2, 0x04, 0xfc, 0x00, 0x0a, 0x57, 0x8e, 0x18, 0xc9, 0x23, 0xde,
0xe0, 0xc9, 0xf3, 0x8d, 0x47, 0x32, 0x9d, 0x18, 0xc9, 0xc5, 0x33, 0x5f, 0x79, 0x97, 0x67, 0x9e,
0x59, 0x76, 0xe6, 0xdb, 0x6f, 0xd2, 0x90, 0x11, 0xfd, 0xf9, 0x1f, 0x3f, 0x48, 0x8b, 0xbd, 0xce,
0xbc, 0xcb, 0x5e, 0x67, 0x97, 0xde, 0xaf, 0xd9, 0x8b, 0x95, 0x4b, 0xbc, 0x58, 0x95, 0x1f, 0x11,
0xac, 0xc6, 0xef, 0x0d, 0x7e, 0x1f, 0xee, 0x99, 0x27, 0x8d, 0xae, 0xd5, 0x69, 0x77, 0x5b, 0xd6,
0xa0, 0x6b, 0x9e, 0x18, 0xcd, 0xf6, 0x61, 0xdb, 0x68, 0x69, 0x29, 0xbc, 0x05, 0x78, 0xee, 0x6a,
0x77, 0xfb, 0x06, 0xe9, 0x36, 0x8e, 0x34, 0x84, 0x8b, 0xa0, 0xcd, 0xed, 0xa6, 0x41, 0x4e, 0x0d,
0xa2, 0xa5, 0x17, 0xad, 0xcd, 0xa3, 0xb6, 0xd1, 0xed, 0x6b, 0x2b, 0x8b, 0x18, 0x27, 0xa4, 0xd7,
0x1a, 0x34, 0x0d, 0xa2, 0x65, 0x16, 0xed, 0xcd, 0x5e, 0xd7, 0x1c, 0x1c, 0x1b, 0x44, 0xcb, 0x56,
0x7e, 0x47, 0x90, 0x53, 0x77, 0x00, 0xeb, 0x90, 0x9f, 0xd0, 0x30, 0xb4, 0x47, 0xf1, 0x20, 0xc7,
0x5b, 0xdc, 0x84, 0xcc, 0xd0, 0x73, 0x54, 0xe7, 0x0b, 0x7b, 0xf5, 0xb7, 0xb9, 0x51, 0xd1, 0xbf,
0xa6, 0xe7, 0x50, 0x22, 0x93, 0x2b, 0x5d, 0x80, 0xb9, 0x0d, 0xdf, 0x83, 0xbb, 0x66, 0xbf, 0xd1,
0x1f, 0x98, 0x56, 0xb3, 0xd7, 0x32, 0x44, 0x23, 0x8c, 0xbe, 0x96, 0xc2, 0x18, 0x0a, 0x49, 0x73,
0xaf, 0xa3, 0xa1, 0xab, 0xa1, 0x06, 0x21, 0x3d, 0xa2, 0xa5, 0xbf, 0xca, 0xac, 0x22, 0x2d, 0xfd,
0xf0, 0x4b, 0x58, 0x13, 0xad, 0x3d, 0x94, 0x3f, 0x0d, 0x71, 0x6f, 0x0f, 0x8f, 0x1a, 0x5f, 0x98,
0x56, 0xab, 0x67, 0x75, 0x7b, 0x7d, 0x6b, 0x60, 0x1a, 0x5a, 0x0a, 0x97, 0xe1, 0x83, 0x84, 0xab,
0x4f, 0x1a, 0x4d, 0x23, 0x5a, 0x1f, 0x37, 0xcc, 0x8e, 0xf6, 0x06, 0x1d, 0xfc, 0x82, 0x5e, 0x5e,
0x94, 0xd0, 0xab, 0x8b, 0x12, 0xfa, 0xfb, 0xa2, 0x84, 0x7e, 0xb8, 0x2c, 0xa5, 0x5e, 0x5d, 0x96,
0x52, 0x7f, 0x5e, 0x96, 0x52, 0xb0, 0xe3, 0x7a, 0x4b, 0x6b, 0x3e, 0x50, 0xdf, 0x8d, 0x27, 0xc2,
0x78, 0x82, 0xbe, 0x69, 0xde, 0x7a, 0xe2, 0xd5, 0xb7, 0xe9, 0x88, 0xb2, 0xd9, 0x87, 0xf2, 0xcf,
0xe9, 0xfb, 0x3d, 0x9f, 0xb2, 0xfe, 0x0c, 0x42, 0x82, 0xab, 0x6b, 0x57, 0x3b, 0xdd, 0x7d, 0x9a,
0x93, 0x19, 0x8f, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xc0, 0xd4, 0x18, 0xf8, 0x6e, 0x0b, 0x00,
0x00,
}
func (m *TracesData) Marshal() (dAtA []byte, err error) {
@ -993,6 +1085,14 @@ func (m *Span) MarshalToSizedBuffer(dAtA []byte) (int, error) {
_ = i
var l int
_ = l
if m.Flags != 0 {
i -= 4
encoding_binary.LittleEndian.PutUint32(dAtA[i:], uint32(m.Flags))
i--
dAtA[i] = 0x1
i--
dAtA[i] = 0x85
}
{
size, err := m.Status.MarshalToSizedBuffer(dAtA[:i])
if err != nil {
@ -1199,6 +1299,12 @@ func (m *Span_Link) MarshalToSizedBuffer(dAtA []byte) (int, error) {
_ = i
var l int
_ = l
if m.Flags != 0 {
i -= 4
encoding_binary.LittleEndian.PutUint32(dAtA[i:], uint32(m.Flags))
i--
dAtA[i] = 0x35
}
if m.DroppedAttributesCount != 0 {
i = encodeVarintTrace(dAtA, i, uint64(m.DroppedAttributesCount))
i--
@ -1415,6 +1521,9 @@ func (m *Span) Size() (n int) {
}
l = m.Status.Size()
n += 1 + l + sovTrace(uint64(l))
if m.Flags != 0 {
n += 6
}
return n
}
@ -1466,6 +1575,9 @@ func (m *Span_Link) Size() (n int) {
if m.DroppedAttributesCount != 0 {
n += 1 + sovTrace(uint64(m.DroppedAttributesCount))
}
if m.Flags != 0 {
n += 5
}
return n
}
@ -2330,6 +2442,16 @@ func (m *Span) Unmarshal(dAtA []byte) error {
return err
}
iNdEx = postIndex
case 16:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field Flags", wireType)
}
m.Flags = 0
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
m.Flags = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
default:
iNdEx = preIndex
skippy, err := skipTrace(dAtA[iNdEx:])
@ -2676,6 +2798,16 @@ func (m *Span_Link) Unmarshal(dAtA []byte) error {
break
}
}
case 6:
if wireType != 5 {
return fmt.Errorf("proto: wrong wireType = %d for field Flags", wireType)
}
m.Flags = 0
if (iNdEx + 4) > l {
return io.ErrUnexpectedEOF
}
m.Flags = uint32(encoding_binary.LittleEndian.Uint32(dAtA[iNdEx:]))
iNdEx += 4
default:
iNdEx = preIndex
skippy, err := skipTrace(dAtA[iNdEx:])

View file

@ -2911,6 +2911,15 @@ func (rl *clientConnReadLoop) processWindowUpdate(f *WindowUpdateFrame) error {
fl = &cs.flow
}
if !fl.add(int32(f.Increment)) {
// For stream, the sender sends RST_STREAM with an error code of FLOW_CONTROL_ERROR
if cs != nil {
rl.endStreamError(cs, StreamError{
StreamID: f.StreamID,
Code: ErrCodeFlowControl,
})
return nil
}
return ConnectionError(ErrCodeFlowControl)
}
cc.cond.Broadcast()

View file

@ -22,7 +22,7 @@ import (
const (
adcSetupURL = "https://cloud.google.com/docs/authentication/external/set-up-adc"
universeDomainDefault = "googleapis.com"
defaultUniverseDomain = "googleapis.com"
)
// Credentials holds Google credentials, including "Application Default Credentials".
@ -58,7 +58,7 @@ type Credentials struct {
// See also [The attached service account](https://cloud.google.com/docs/authentication/application-default-credentials#attached-sa).
func (c *Credentials) UniverseDomain() string {
if c.universeDomain == "" {
return universeDomainDefault
return defaultUniverseDomain
}
return c.universeDomain
}
@ -89,7 +89,7 @@ func (c *Credentials) GetUniverseDomain() (string, error) {
// computeUniverseDomain that did not set universeDomain, set the default
// universe domain.
if c.universeDomain == "" {
c.universeDomain = universeDomainDefault
c.universeDomain = defaultUniverseDomain
}
return c.universeDomain, nil
}
@ -103,7 +103,7 @@ func (c *Credentials) computeUniverseDomain() error {
if err != nil {
if _, ok := err.(metadata.NotDefinedError); ok {
// http.StatusNotFound (404)
c.universeDomain = universeDomainDefault
c.universeDomain = defaultUniverseDomain
return nil
} else {
return err
@ -287,7 +287,7 @@ func CredentialsFromJSONWithParams(ctx context.Context, jsonData []byte, params
}
// Authorized user credentials are only supported in the googleapis.com universe.
if f.Type == userCredentialsKey {
universeDomain = universeDomainDefault
universeDomain = defaultUniverseDomain
}
ts, err := f.tokenSource(ctx, params)

View file

@ -22,91 +22,9 @@
// the other by JWTConfigFromJSON. The returned Config can be used to obtain a TokenSource or
// create an http.Client.
//
// # Workload Identity Federation
// # Workload and Workforce Identity Federation
//
// Using workload identity federation, your application can access Google Cloud
// resources from Amazon Web Services (AWS), Microsoft Azure or any identity
// provider that supports OpenID Connect (OIDC) or SAML 2.0.
// Traditionally, applications running outside Google Cloud have used service
// account keys to access Google Cloud resources. Using identity federation,
// you can allow your workload to impersonate a service account.
// This lets you access Google Cloud resources directly, eliminating the
// maintenance and security burden associated with service account keys.
//
// Follow the detailed instructions on how to configure Workload Identity Federation
// in various platforms:
//
// Amazon Web Services (AWS): https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds#aws
// Microsoft Azure: https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds#azure
// OIDC identity provider: https://cloud.google.com/iam/docs/workload-identity-federation-with-other-providers#oidc
// SAML 2.0 identity provider: https://cloud.google.com/iam/docs/workload-identity-federation-with-other-providers#saml
//
// For OIDC and SAML providers, the library can retrieve tokens in three ways:
// from a local file location (file-sourced credentials), from a server
// (URL-sourced credentials), or from a local executable (executable-sourced
// credentials).
// For file-sourced credentials, a background process needs to be continuously
// refreshing the file location with a new OIDC/SAML token prior to expiration.
// For tokens with one hour lifetimes, the token needs to be updated in the file
// every hour. The token can be stored directly as plain text or in JSON format.
// For URL-sourced credentials, a local server needs to host a GET endpoint to
// return the OIDC/SAML token. The response can be in plain text or JSON.
// Additional required request headers can also be specified.
// For executable-sourced credentials, an application needs to be available to
// output the OIDC/SAML token and other information in a JSON format.
// For more information on how these work (and how to implement
// executable-sourced credentials), please check out:
// https://cloud.google.com/iam/docs/workload-identity-federation-with-other-providers#create_a_credential_configuration
//
// Note that this library does not perform any validation on the token_url, token_info_url,
// or service_account_impersonation_url fields of the credential configuration.
// It is not recommended to use a credential configuration that you did not generate with
// the gcloud CLI unless you verify that the URL fields point to a googleapis.com domain.
//
// # Workforce Identity Federation
//
// Workforce identity federation lets you use an external identity provider (IdP) to
// authenticate and authorize a workforce—a group of users, such as employees, partners,
// and contractors—using IAM, so that the users can access Google Cloud services.
// Workforce identity federation extends Google Cloud's identity capabilities to support
// syncless, attribute-based single sign on.
//
// With workforce identity federation, your workforce can access Google Cloud resources
// using an external identity provider (IdP) that supports OpenID Connect (OIDC) or
// SAML 2.0 such as Azure Active Directory (Azure AD), Active Directory Federation
// Services (AD FS), Okta, and others.
//
// Follow the detailed instructions on how to configure Workload Identity Federation
// in various platforms:
//
// Azure AD: https://cloud.google.com/iam/docs/workforce-sign-in-azure-ad
// Okta: https://cloud.google.com/iam/docs/workforce-sign-in-okta
// OIDC identity provider: https://cloud.google.com/iam/docs/configuring-workforce-identity-federation#oidc
// SAML 2.0 identity provider: https://cloud.google.com/iam/docs/configuring-workforce-identity-federation#saml
//
// For workforce identity federation, the library can retrieve tokens in three ways:
// from a local file location (file-sourced credentials), from a server
// (URL-sourced credentials), or from a local executable (executable-sourced
// credentials).
// For file-sourced credentials, a background process needs to be continuously
// refreshing the file location with a new OIDC/SAML token prior to expiration.
// For tokens with one hour lifetimes, the token needs to be updated in the file
// every hour. The token can be stored directly as plain text or in JSON format.
// For URL-sourced credentials, a local server needs to host a GET endpoint to
// return the OIDC/SAML token. The response can be in plain text or JSON.
// Additional required request headers can also be specified.
// For executable-sourced credentials, an application needs to be available to
// output the OIDC/SAML token and other information in a JSON format.
// For more information on how these work (and how to implement
// executable-sourced credentials), please check out:
// https://cloud.google.com/iam/docs/workforce-obtaining-short-lived-credentials#generate_a_configuration_file_for_non-interactive_sign-in
//
// # Security considerations
//
// Note that this library does not perform any validation on the token_url, token_info_url,
// or service_account_impersonation_url fields of the credential configuration.
// It is not recommended to use a credential configuration that you did not generate with
// the gcloud CLI unless you verify that the URL fields point to a googleapis.com domain.
// For information on how to use Workload and Workforce Identity Federation, see [golang.org/x/oauth2/google/externalaccount].
//
// # Credentials
//

View file

@ -26,22 +26,28 @@ import (
"golang.org/x/oauth2"
)
type awsSecurityCredentials struct {
// AwsSecurityCredentials models AWS security credentials.
type AwsSecurityCredentials struct {
// AccessKeyId is the AWS Access Key ID - Required.
AccessKeyID string `json:"AccessKeyID"`
// SecretAccessKey is the AWS Secret Access Key - Required.
SecretAccessKey string `json:"SecretAccessKey"`
SecurityToken string `json:"Token"`
// SessionToken is the AWS Session token. This should be provided for temporary AWS security credentials - Optional.
SessionToken string `json:"Token"`
}
// awsRequestSigner is a utility class to sign http requests using a AWS V4 signature.
type awsRequestSigner struct {
RegionName string
AwsSecurityCredentials awsSecurityCredentials
AwsSecurityCredentials *AwsSecurityCredentials
}
// getenv aliases os.Getenv for testing
var getenv = os.Getenv
const (
defaultRegionalCredentialVerificationUrl = "https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15"
// AWS Signature Version 4 signing algorithm identifier.
awsAlgorithm = "AWS4-HMAC-SHA256"
@ -197,8 +203,8 @@ func (rs *awsRequestSigner) SignRequest(req *http.Request) error {
signedRequest.Header.Add("host", requestHost(req))
if rs.AwsSecurityCredentials.SecurityToken != "" {
signedRequest.Header.Add(awsSecurityTokenHeader, rs.AwsSecurityCredentials.SecurityToken)
if rs.AwsSecurityCredentials.SessionToken != "" {
signedRequest.Header.Add(awsSecurityTokenHeader, rs.AwsSecurityCredentials.SessionToken)
}
if signedRequest.Header.Get("date") == "" {
@ -251,16 +257,18 @@ func (rs *awsRequestSigner) generateAuthentication(req *http.Request, timestamp
}
type awsCredentialSource struct {
EnvironmentID string
RegionURL string
RegionalCredVerificationURL string
CredVerificationURL string
IMDSv2SessionTokenURL string
TargetResource string
environmentID string
regionURL string
regionalCredVerificationURL string
credVerificationURL string
imdsv2SessionTokenURL string
targetResource string
requestSigner *awsRequestSigner
region string
ctx context.Context
client *http.Client
awsSecurityCredentialsSupplier AwsSecurityCredentialsSupplier
supplierOptions SupplierOptions
}
type awsRequestHeader struct {
@ -292,18 +300,25 @@ func canRetrieveSecurityCredentialFromEnvironment() bool {
return getenv(awsAccessKeyId) != "" && getenv(awsSecretAccessKey) != ""
}
func shouldUseMetadataServer() bool {
return !canRetrieveRegionFromEnvironment() || !canRetrieveSecurityCredentialFromEnvironment()
func (cs awsCredentialSource) shouldUseMetadataServer() bool {
return cs.awsSecurityCredentialsSupplier == nil && (!canRetrieveRegionFromEnvironment() || !canRetrieveSecurityCredentialFromEnvironment())
}
func (cs awsCredentialSource) credentialSourceType() string {
if cs.awsSecurityCredentialsSupplier != nil {
return "programmatic"
}
return "aws"
}
func (cs awsCredentialSource) subjectToken() (string, error) {
// Set Defaults
if cs.regionalCredVerificationURL == "" {
cs.regionalCredVerificationURL = defaultRegionalCredentialVerificationUrl
}
if cs.requestSigner == nil {
headers := make(map[string]string)
if shouldUseMetadataServer() {
if cs.shouldUseMetadataServer() {
awsSessionToken, err := cs.getAWSSessionToken()
if err != nil {
return "", err
@ -318,8 +333,8 @@ func (cs awsCredentialSource) subjectToken() (string, error) {
if err != nil {
return "", err
}
if cs.region, err = cs.getRegion(headers); err != nil {
cs.region, err = cs.getRegion(headers)
if err != nil {
return "", err
}
@ -331,7 +346,7 @@ func (cs awsCredentialSource) subjectToken() (string, error) {
// Generate the signed request to AWS STS GetCallerIdentity API.
// Use the required regional endpoint. Otherwise, the request will fail.
req, err := http.NewRequest("POST", strings.Replace(cs.RegionalCredVerificationURL, "{region}", cs.region, 1), nil)
req, err := http.NewRequest("POST", strings.Replace(cs.regionalCredVerificationURL, "{region}", cs.region, 1), nil)
if err != nil {
return "", err
}
@ -339,8 +354,8 @@ func (cs awsCredentialSource) subjectToken() (string, error) {
// provider, with or without the HTTPS prefix.
// Including this header as part of the signature is recommended to
// ensure data integrity.
if cs.TargetResource != "" {
req.Header.Add("x-goog-cloud-target-resource", cs.TargetResource)
if cs.targetResource != "" {
req.Header.Add("x-goog-cloud-target-resource", cs.targetResource)
}
cs.requestSigner.SignRequest(req)
@ -387,11 +402,11 @@ func (cs awsCredentialSource) subjectToken() (string, error) {
}
func (cs *awsCredentialSource) getAWSSessionToken() (string, error) {
if cs.IMDSv2SessionTokenURL == "" {
if cs.imdsv2SessionTokenURL == "" {
return "", nil
}
req, err := http.NewRequest("PUT", cs.IMDSv2SessionTokenURL, nil)
req, err := http.NewRequest("PUT", cs.imdsv2SessionTokenURL, nil)
if err != nil {
return "", err
}
@ -410,25 +425,29 @@ func (cs *awsCredentialSource) getAWSSessionToken() (string, error) {
}
if resp.StatusCode != 200 {
return "", fmt.Errorf("oauth2/google: unable to retrieve AWS session token - %s", string(respBody))
return "", fmt.Errorf("oauth2/google/externalaccount: unable to retrieve AWS session token - %s", string(respBody))
}
return string(respBody), nil
}
func (cs *awsCredentialSource) getRegion(headers map[string]string) (string, error) {
if cs.awsSecurityCredentialsSupplier != nil {
return cs.awsSecurityCredentialsSupplier.AwsRegion(cs.ctx, cs.supplierOptions)
}
if canRetrieveRegionFromEnvironment() {
if envAwsRegion := getenv(awsRegion); envAwsRegion != "" {
cs.region = envAwsRegion
return envAwsRegion, nil
}
return getenv("AWS_DEFAULT_REGION"), nil
}
if cs.RegionURL == "" {
return "", errors.New("oauth2/google: unable to determine AWS region")
if cs.regionURL == "" {
return "", errors.New("oauth2/google/externalaccount: unable to determine AWS region")
}
req, err := http.NewRequest("GET", cs.RegionURL, nil)
req, err := http.NewRequest("GET", cs.regionURL, nil)
if err != nil {
return "", err
}
@ -449,7 +468,7 @@ func (cs *awsCredentialSource) getRegion(headers map[string]string) (string, err
}
if resp.StatusCode != 200 {
return "", fmt.Errorf("oauth2/google: unable to retrieve AWS region - %s", string(respBody))
return "", fmt.Errorf("oauth2/google/externalaccount: unable to retrieve AWS region - %s", string(respBody))
}
// This endpoint will return the region in format: us-east-2b.
@ -461,12 +480,15 @@ func (cs *awsCredentialSource) getRegion(headers map[string]string) (string, err
return string(respBody[:respBodyEnd]), nil
}
func (cs *awsCredentialSource) getSecurityCredentials(headers map[string]string) (result awsSecurityCredentials, err error) {
func (cs *awsCredentialSource) getSecurityCredentials(headers map[string]string) (result *AwsSecurityCredentials, err error) {
if cs.awsSecurityCredentialsSupplier != nil {
return cs.awsSecurityCredentialsSupplier.AwsSecurityCredentials(cs.ctx, cs.supplierOptions)
}
if canRetrieveSecurityCredentialFromEnvironment() {
return awsSecurityCredentials{
return &AwsSecurityCredentials{
AccessKeyID: getenv(awsAccessKeyId),
SecretAccessKey: getenv(awsSecretAccessKey),
SecurityToken: getenv(awsSessionToken),
SessionToken: getenv(awsSessionToken),
}, nil
}
@ -481,20 +503,20 @@ func (cs *awsCredentialSource) getSecurityCredentials(headers map[string]string)
}
if credentials.AccessKeyID == "" {
return result, errors.New("oauth2/google: missing AccessKeyId credential")
return result, errors.New("oauth2/google/externalaccount: missing AccessKeyId credential")
}
if credentials.SecretAccessKey == "" {
return result, errors.New("oauth2/google: missing SecretAccessKey credential")
return result, errors.New("oauth2/google/externalaccount: missing SecretAccessKey credential")
}
return credentials, nil
return &credentials, nil
}
func (cs *awsCredentialSource) getMetadataSecurityCredentials(roleName string, headers map[string]string) (awsSecurityCredentials, error) {
var result awsSecurityCredentials
func (cs *awsCredentialSource) getMetadataSecurityCredentials(roleName string, headers map[string]string) (AwsSecurityCredentials, error) {
var result AwsSecurityCredentials
req, err := http.NewRequest("GET", fmt.Sprintf("%s/%s", cs.CredVerificationURL, roleName), nil)
req, err := http.NewRequest("GET", fmt.Sprintf("%s/%s", cs.credVerificationURL, roleName), nil)
if err != nil {
return result, err
}
@ -516,7 +538,7 @@ func (cs *awsCredentialSource) getMetadataSecurityCredentials(roleName string, h
}
if resp.StatusCode != 200 {
return result, fmt.Errorf("oauth2/google: unable to retrieve AWS security credentials - %s", string(respBody))
return result, fmt.Errorf("oauth2/google/externalaccount: unable to retrieve AWS security credentials - %s", string(respBody))
}
err = json.Unmarshal(respBody, &result)
@ -524,11 +546,11 @@ func (cs *awsCredentialSource) getMetadataSecurityCredentials(roleName string, h
}
func (cs *awsCredentialSource) getMetadataRoleName(headers map[string]string) (string, error) {
if cs.CredVerificationURL == "" {
return "", errors.New("oauth2/google: unable to determine the AWS metadata server security credentials endpoint")
if cs.credVerificationURL == "" {
return "", errors.New("oauth2/google/externalaccount: unable to determine the AWS metadata server security credentials endpoint")
}
req, err := http.NewRequest("GET", cs.CredVerificationURL, nil)
req, err := http.NewRequest("GET", cs.credVerificationURL, nil)
if err != nil {
return "", err
}
@ -549,7 +571,7 @@ func (cs *awsCredentialSource) getMetadataRoleName(headers map[string]string) (s
}
if resp.StatusCode != 200 {
return "", fmt.Errorf("oauth2/google: unable to retrieve AWS role name - %s", string(respBody))
return "", fmt.Errorf("oauth2/google/externalaccount: unable to retrieve AWS role name - %s", string(respBody))
}
return string(respBody), nil

View file

@ -0,0 +1,484 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package externalaccount provides support for creating workload identity
federation and workforce identity federation token sources that can be
used to access Google Cloud resources from external identity providers.
# Workload Identity Federation
Using workload identity federation, your application can access Google Cloud
resources from Amazon Web Services (AWS), Microsoft Azure or any identity
provider that supports OpenID Connect (OIDC) or SAML 2.0.
Traditionally, applications running outside Google Cloud have used service
account keys to access Google Cloud resources. Using identity federation,
you can allow your workload to impersonate a service account.
This lets you access Google Cloud resources directly, eliminating the
maintenance and security burden associated with service account keys.
Follow the detailed instructions on how to configure Workload Identity Federation
in various platforms:
Amazon Web Services (AWS): https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds#aws
Microsoft Azure: https://cloud.google.com/iam/docs/workload-identity-federation-with-other-clouds#azure
OIDC identity provider: https://cloud.google.com/iam/docs/workload-identity-federation-with-other-providers#oidc
SAML 2.0 identity provider: https://cloud.google.com/iam/docs/workload-identity-federation-with-other-providers#saml
For OIDC and SAML providers, the library can retrieve tokens in fours ways:
from a local file location (file-sourced credentials), from a server
(URL-sourced credentials), from a local executable (executable-sourced
credentials), or from a user defined function that returns an OIDC or SAML token.
For file-sourced credentials, a background process needs to be continuously
refreshing the file location with a new OIDC/SAML token prior to expiration.
For tokens with one hour lifetimes, the token needs to be updated in the file
every hour. The token can be stored directly as plain text or in JSON format.
For URL-sourced credentials, a local server needs to host a GET endpoint to
return the OIDC/SAML token. The response can be in plain text or JSON.
Additional required request headers can also be specified.
For executable-sourced credentials, an application needs to be available to
output the OIDC/SAML token and other information in a JSON format.
For more information on how these work (and how to implement
executable-sourced credentials), please check out:
https://cloud.google.com/iam/docs/workload-identity-federation-with-other-providers#create_a_credential_configuration
To use a custom function to supply the token, define a struct that implements the [SubjectTokenSupplier] interface for OIDC/SAML providers,
or one that implements [AwsSecurityCredentialsSupplier] for AWS providers. This can then be used when building a [Config].
The [golang.org/x/oauth2.TokenSource] created from the config using [NewTokenSource] can then be used to access Google
Cloud resources. For instance, you can create a new client from the
[cloud.google.com/go/storage] package and pass in option.WithTokenSource(yourTokenSource))
Note that this library does not perform any validation on the token_url, token_info_url,
or service_account_impersonation_url fields of the credential configuration.
It is not recommended to use a credential configuration that you did not generate with
the gcloud CLI unless you verify that the URL fields point to a googleapis.com domain.
# Workforce Identity Federation
Workforce identity federation lets you use an external identity provider (IdP) to
authenticate and authorize a workforcea group of users, such as employees, partners,
and contractorsusing IAM, so that the users can access Google Cloud services.
Workforce identity federation extends Google Cloud's identity capabilities to support
syncless, attribute-based single sign on.
With workforce identity federation, your workforce can access Google Cloud resources
using an external identity provider (IdP) that supports OpenID Connect (OIDC) or
SAML 2.0 such as Azure Active Directory (Azure AD), Active Directory Federation
Services (AD FS), Okta, and others.
Follow the detailed instructions on how to configure Workload Identity Federation
in various platforms:
Azure AD: https://cloud.google.com/iam/docs/workforce-sign-in-azure-ad
Okta: https://cloud.google.com/iam/docs/workforce-sign-in-okta
OIDC identity provider: https://cloud.google.com/iam/docs/configuring-workforce-identity-federation#oidc
SAML 2.0 identity provider: https://cloud.google.com/iam/docs/configuring-workforce-identity-federation#saml
For workforce identity federation, the library can retrieve tokens in four ways:
from a local file location (file-sourced credentials), from a server
(URL-sourced credentials), from a local executable (executable-sourced
credentials), or from a user supplied function that returns an OIDC or SAML token.
For file-sourced credentials, a background process needs to be continuously
refreshing the file location with a new OIDC/SAML token prior to expiration.
For tokens with one hour lifetimes, the token needs to be updated in the file
every hour. The token can be stored directly as plain text or in JSON format.
For URL-sourced credentials, a local server needs to host a GET endpoint to
return the OIDC/SAML token. The response can be in plain text or JSON.
Additional required request headers can also be specified.
For executable-sourced credentials, an application needs to be available to
output the OIDC/SAML token and other information in a JSON format.
For more information on how these work (and how to implement
executable-sourced credentials), please check out:
https://cloud.google.com/iam/docs/workforce-obtaining-short-lived-credentials#generate_a_configuration_file_for_non-interactive_sign-in
To use a custom function to supply the token, define a struct that implements the [SubjectTokenSupplier] interface for OIDC/SAML providers.
This can then be used when building a [Config].
The [golang.org/x/oauth2.TokenSource] created from the config using [NewTokenSource] can then be used access Google
Cloud resources. For instance, you can create a new client from the
[cloud.google.com/go/storage] package and pass in option.WithTokenSource(yourTokenSource))
# Security considerations
Note that this library does not perform any validation on the token_url, token_info_url,
or service_account_impersonation_url fields of the credential configuration.
It is not recommended to use a credential configuration that you did not generate with
the gcloud CLI unless you verify that the URL fields point to a googleapis.com domain.
*/
package externalaccount
import (
"context"
"fmt"
"net/http"
"regexp"
"strconv"
"strings"
"time"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google/internal/impersonate"
"golang.org/x/oauth2/google/internal/stsexchange"
)
const (
universeDomainPlaceholder = "UNIVERSE_DOMAIN"
defaultTokenURL = "https://sts.UNIVERSE_DOMAIN/v1/token"
defaultUniverseDomain = "googleapis.com"
)
// now aliases time.Now for testing
var now = func() time.Time {
return time.Now().UTC()
}
// Config stores the configuration for fetching tokens with external credentials.
type Config struct {
// Audience is the Secure Token Service (STS) audience which contains the resource name for the workload
// identity pool or the workforce pool and the provider identifier in that pool. Required.
Audience string
// SubjectTokenType is the STS token type based on the Oauth2.0 token exchange spec.
// Expected values include:
// “urn:ietf:params:oauth:token-type:jwt”
// “urn:ietf:params:oauth:token-type:id-token”
// “urn:ietf:params:oauth:token-type:saml2”
// “urn:ietf:params:aws:token-type:aws4_request”
// Required.
SubjectTokenType string
// TokenURL is the STS token exchange endpoint. If not provided, will default to
// https://sts.UNIVERSE_DOMAIN/v1/token, with UNIVERSE_DOMAIN set to the
// default service domain googleapis.com unless UniverseDomain is set.
// Optional.
TokenURL string
// TokenInfoURL is the token_info endpoint used to retrieve the account related information (
// user attributes like account identifier, eg. email, username, uid, etc). This is
// needed for gCloud session account identification. Optional.
TokenInfoURL string
// ServiceAccountImpersonationURL is the URL for the service account impersonation request. This is only
// required for workload identity pools when APIs to be accessed have not integrated with UberMint. Optional.
ServiceAccountImpersonationURL string
// ServiceAccountImpersonationLifetimeSeconds is the number of seconds the service account impersonation
// token will be valid for. If not provided, it will default to 3600. Optional.
ServiceAccountImpersonationLifetimeSeconds int
// ClientSecret is currently only required if token_info endpoint also
// needs to be called with the generated GCP access token. When provided, STS will be
// called with additional basic authentication using ClientId as username and ClientSecret as password. Optional.
ClientSecret string
// ClientID is only required in conjunction with ClientSecret, as described above. Optional.
ClientID string
// CredentialSource contains the necessary information to retrieve the token itself, as well
// as some environmental information. One of SubjectTokenSupplier, AWSSecurityCredentialSupplier or
// CredentialSource must be provided. Optional.
CredentialSource *CredentialSource
// QuotaProjectID is injected by gCloud. If the value is non-empty, the Auth libraries
// will set the x-goog-user-project header which overrides the project associated with the credentials. Optional.
QuotaProjectID string
// Scopes contains the desired scopes for the returned access token. Optional.
Scopes []string
// WorkforcePoolUserProject is the workforce pool user project number when the credential
// corresponds to a workforce pool and not a workload identity pool.
// The underlying principal must still have serviceusage.services.use IAM
// permission to use the project for billing/quota. Optional.
WorkforcePoolUserProject string
// SubjectTokenSupplier is an optional token supplier for OIDC/SAML credentials.
// One of SubjectTokenSupplier, AWSSecurityCredentialSupplier or CredentialSource must be provided. Optional.
SubjectTokenSupplier SubjectTokenSupplier
// AwsSecurityCredentialsSupplier is an AWS Security Credential supplier for AWS credentials.
// One of SubjectTokenSupplier, AWSSecurityCredentialSupplier or CredentialSource must be provided. Optional.
AwsSecurityCredentialsSupplier AwsSecurityCredentialsSupplier
// UniverseDomain is the default service domain for a given Cloud universe.
// This value will be used in the default STS token URL. The default value
// is "googleapis.com". It will not be used if TokenURL is set. Optional.
UniverseDomain string
}
var (
validWorkforceAudiencePattern *regexp.Regexp = regexp.MustCompile(`//iam\.googleapis\.com/locations/[^/]+/workforcePools/`)
)
func validateWorkforceAudience(input string) bool {
return validWorkforceAudiencePattern.MatchString(input)
}
// NewTokenSource Returns an external account TokenSource using the provided external account config.
func NewTokenSource(ctx context.Context, conf Config) (oauth2.TokenSource, error) {
if conf.Audience == "" {
return nil, fmt.Errorf("oauth2/google/externalaccount: Audience must be set")
}
if conf.SubjectTokenType == "" {
return nil, fmt.Errorf("oauth2/google/externalaccount: Subject token type must be set")
}
if conf.WorkforcePoolUserProject != "" {
valid := validateWorkforceAudience(conf.Audience)
if !valid {
return nil, fmt.Errorf("oauth2/google/externalaccount: Workforce pool user project should not be set for non-workforce pool credentials")
}
}
count := 0
if conf.CredentialSource != nil {
count++
}
if conf.SubjectTokenSupplier != nil {
count++
}
if conf.AwsSecurityCredentialsSupplier != nil {
count++
}
if count == 0 {
return nil, fmt.Errorf("oauth2/google/externalaccount: One of CredentialSource, SubjectTokenSupplier, or AwsSecurityCredentialsSupplier must be set")
}
if count > 1 {
return nil, fmt.Errorf("oauth2/google/externalaccount: Only one of CredentialSource, SubjectTokenSupplier, or AwsSecurityCredentialsSupplier must be set")
}
return conf.tokenSource(ctx, "https")
}
// tokenSource is a private function that's directly called by some of the tests,
// because the unit test URLs are mocked, and would otherwise fail the
// validity check.
func (c *Config) tokenSource(ctx context.Context, scheme string) (oauth2.TokenSource, error) {
ts := tokenSource{
ctx: ctx,
conf: c,
}
if c.ServiceAccountImpersonationURL == "" {
return oauth2.ReuseTokenSource(nil, ts), nil
}
scopes := c.Scopes
ts.conf.Scopes = []string{"https://www.googleapis.com/auth/cloud-platform"}
imp := impersonate.ImpersonateTokenSource{
Ctx: ctx,
URL: c.ServiceAccountImpersonationURL,
Scopes: scopes,
Ts: oauth2.ReuseTokenSource(nil, ts),
TokenLifetimeSeconds: c.ServiceAccountImpersonationLifetimeSeconds,
}
return oauth2.ReuseTokenSource(nil, imp), nil
}
// Subject token file types.
const (
fileTypeText = "text"
fileTypeJSON = "json"
)
// Format contains information needed to retireve a subject token for URL or File sourced credentials.
type Format struct {
// Type should be either "text" or "json". This determines whether the file or URL sourced credentials
// expect a simple text subject token or if the subject token will be contained in a JSON object.
// When not provided "text" type is assumed.
Type string `json:"type"`
// SubjectTokenFieldName is only required for JSON format. This is the field name that the credentials will check
// for the subject token in the file or URL response. This would be "access_token" for azure.
SubjectTokenFieldName string `json:"subject_token_field_name"`
}
// CredentialSource stores the information necessary to retrieve the credentials for the STS exchange.
type CredentialSource struct {
// File is the location for file sourced credentials.
// One field amongst File, URL, Executable, or EnvironmentID should be provided, depending on the kind of credential in question.
File string `json:"file"`
// Url is the URL to call for URL sourced credentials.
// One field amongst File, URL, Executable, or EnvironmentID should be provided, depending on the kind of credential in question.
URL string `json:"url"`
// Headers are the headers to attach to the request for URL sourced credentials.
Headers map[string]string `json:"headers"`
// Executable is the configuration object for executable sourced credentials.
// One field amongst File, URL, Executable, or EnvironmentID should be provided, depending on the kind of credential in question.
Executable *ExecutableConfig `json:"executable"`
// EnvironmentID is the EnvironmentID used for AWS sourced credentials. This should start with "AWS".
// One field amongst File, URL, Executable, or EnvironmentID should be provided, depending on the kind of credential in question.
EnvironmentID string `json:"environment_id"`
// RegionURL is the metadata URL to retrieve the region from for EC2 AWS credentials.
RegionURL string `json:"region_url"`
// RegionalCredVerificationURL is the AWS regional credential verification URL, will default to
// "https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15" if not provided."
RegionalCredVerificationURL string `json:"regional_cred_verification_url"`
// IMDSv2SessionTokenURL is the URL to retrieve the session token when using IMDSv2 in AWS.
IMDSv2SessionTokenURL string `json:"imdsv2_session_token_url"`
// Format is the format type for the subject token. Used for File and URL sourced credentials. Expected values are "text" or "json".
Format Format `json:"format"`
}
// ExecutableConfig contains information needed for executable sourced credentials.
type ExecutableConfig struct {
// Command is the the full command to run to retrieve the subject token.
// This can include arguments. Must be an absolute path for the program. Required.
Command string `json:"command"`
// TimeoutMillis is the timeout duration, in milliseconds. Defaults to 30000 milliseconds when not provided. Optional.
TimeoutMillis *int `json:"timeout_millis"`
// OutputFile is the absolute path to the output file where the executable will cache the response.
// If specified the auth libraries will first check this location before running the executable. Optional.
OutputFile string `json:"output_file"`
}
// SubjectTokenSupplier can be used to supply a subject token to exchange for a GCP access token.
type SubjectTokenSupplier interface {
// SubjectToken should return a valid subject token or an error.
// The external account token source does not cache the returned subject token, so caching
// logic should be implemented in the supplier to prevent multiple requests for the same subject token.
SubjectToken(ctx context.Context, options SupplierOptions) (string, error)
}
// AWSSecurityCredentialsSupplier can be used to supply AwsSecurityCredentials and an AWS Region to
// exchange for a GCP access token.
type AwsSecurityCredentialsSupplier interface {
// AwsRegion should return the AWS region or an error.
AwsRegion(ctx context.Context, options SupplierOptions) (string, error)
// GetAwsSecurityCredentials should return a valid set of AwsSecurityCredentials or an error.
// The external account token source does not cache the returned security credentials, so caching
// logic should be implemented in the supplier to prevent multiple requests for the same security credentials.
AwsSecurityCredentials(ctx context.Context, options SupplierOptions) (*AwsSecurityCredentials, error)
}
// SupplierOptions contains information about the requested subject token or AWS security credentials from the
// Google external account credential.
type SupplierOptions struct {
// Audience is the requested audience for the external account credential.
Audience string
// Subject token type is the requested subject token type for the external account credential. Expected values include:
// “urn:ietf:params:oauth:token-type:jwt”
// “urn:ietf:params:oauth:token-type:id-token”
// “urn:ietf:params:oauth:token-type:saml2”
// “urn:ietf:params:aws:token-type:aws4_request”
SubjectTokenType string
}
// tokenURL returns the default STS token endpoint with the configured universe
// domain.
func (c *Config) tokenURL() string {
if c.UniverseDomain == "" {
return strings.Replace(defaultTokenURL, universeDomainPlaceholder, defaultUniverseDomain, 1)
}
return strings.Replace(defaultTokenURL, universeDomainPlaceholder, c.UniverseDomain, 1)
}
// parse determines the type of CredentialSource needed.
func (c *Config) parse(ctx context.Context) (baseCredentialSource, error) {
//set Defaults
if c.TokenURL == "" {
c.TokenURL = c.tokenURL()
}
supplierOptions := SupplierOptions{Audience: c.Audience, SubjectTokenType: c.SubjectTokenType}
if c.AwsSecurityCredentialsSupplier != nil {
awsCredSource := awsCredentialSource{
awsSecurityCredentialsSupplier: c.AwsSecurityCredentialsSupplier,
targetResource: c.Audience,
supplierOptions: supplierOptions,
ctx: ctx,
}
return awsCredSource, nil
} else if c.SubjectTokenSupplier != nil {
return programmaticRefreshCredentialSource{subjectTokenSupplier: c.SubjectTokenSupplier, supplierOptions: supplierOptions, ctx: ctx}, nil
} else if len(c.CredentialSource.EnvironmentID) > 3 && c.CredentialSource.EnvironmentID[:3] == "aws" {
if awsVersion, err := strconv.Atoi(c.CredentialSource.EnvironmentID[3:]); err == nil {
if awsVersion != 1 {
return nil, fmt.Errorf("oauth2/google/externalaccount: aws version '%d' is not supported in the current build", awsVersion)
}
awsCredSource := awsCredentialSource{
environmentID: c.CredentialSource.EnvironmentID,
regionURL: c.CredentialSource.RegionURL,
regionalCredVerificationURL: c.CredentialSource.RegionalCredVerificationURL,
credVerificationURL: c.CredentialSource.URL,
targetResource: c.Audience,
ctx: ctx,
}
if c.CredentialSource.IMDSv2SessionTokenURL != "" {
awsCredSource.imdsv2SessionTokenURL = c.CredentialSource.IMDSv2SessionTokenURL
}
return awsCredSource, nil
}
} else if c.CredentialSource.File != "" {
return fileCredentialSource{File: c.CredentialSource.File, Format: c.CredentialSource.Format}, nil
} else if c.CredentialSource.URL != "" {
return urlCredentialSource{URL: c.CredentialSource.URL, Headers: c.CredentialSource.Headers, Format: c.CredentialSource.Format, ctx: ctx}, nil
} else if c.CredentialSource.Executable != nil {
return createExecutableCredential(ctx, c.CredentialSource.Executable, c)
}
return nil, fmt.Errorf("oauth2/google/externalaccount: unable to parse credential source")
}
type baseCredentialSource interface {
credentialSourceType() string
subjectToken() (string, error)
}
// tokenSource is the source that handles external credentials. It is used to retrieve Tokens.
type tokenSource struct {
ctx context.Context
conf *Config
}
func getMetricsHeaderValue(conf *Config, credSource baseCredentialSource) string {
return fmt.Sprintf("gl-go/%s auth/%s google-byoid-sdk source/%s sa-impersonation/%t config-lifetime/%t",
goVersion(),
"unknown",
credSource.credentialSourceType(),
conf.ServiceAccountImpersonationURL != "",
conf.ServiceAccountImpersonationLifetimeSeconds != 0)
}
// Token allows tokenSource to conform to the oauth2.TokenSource interface.
func (ts tokenSource) Token() (*oauth2.Token, error) {
conf := ts.conf
credSource, err := conf.parse(ts.ctx)
if err != nil {
return nil, err
}
subjectToken, err := credSource.subjectToken()
if err != nil {
return nil, err
}
stsRequest := stsexchange.TokenExchangeRequest{
GrantType: "urn:ietf:params:oauth:grant-type:token-exchange",
Audience: conf.Audience,
Scope: conf.Scopes,
RequestedTokenType: "urn:ietf:params:oauth:token-type:access_token",
SubjectToken: subjectToken,
SubjectTokenType: conf.SubjectTokenType,
}
header := make(http.Header)
header.Add("Content-Type", "application/x-www-form-urlencoded")
header.Add("x-goog-api-client", getMetricsHeaderValue(conf, credSource))
clientAuth := stsexchange.ClientAuthentication{
AuthStyle: oauth2.AuthStyleInHeader,
ClientID: conf.ClientID,
ClientSecret: conf.ClientSecret,
}
var options map[string]interface{}
// Do not pass workforce_pool_user_project when client authentication is used.
// The client ID is sufficient for determining the user project.
if conf.WorkforcePoolUserProject != "" && conf.ClientID == "" {
options = map[string]interface{}{
"userProject": conf.WorkforcePoolUserProject,
}
}
stsResp, err := stsexchange.ExchangeToken(ts.ctx, conf.TokenURL, &stsRequest, clientAuth, header, options)
if err != nil {
return nil, err
}
accessToken := &oauth2.Token{
AccessToken: stsResp.AccessToken,
TokenType: stsResp.TokenType,
}
if stsResp.ExpiresIn < 0 {
return nil, fmt.Errorf("oauth2/google/externalaccount: got invalid expiry from security token service")
} else if stsResp.ExpiresIn >= 0 {
accessToken.Expiry = now().Add(time.Duration(stsResp.ExpiresIn) * time.Second)
}
if stsResp.RefreshToken != "" {
accessToken.RefreshToken = stsResp.RefreshToken
}
return accessToken, nil
}

View file

@ -39,51 +39,51 @@ func (nce nonCacheableError) Error() string {
}
func missingFieldError(source, field string) error {
return fmt.Errorf("oauth2/google: %v missing `%q` field", source, field)
return fmt.Errorf("oauth2/google/externalaccount: %v missing `%q` field", source, field)
}
func jsonParsingError(source, data string) error {
return fmt.Errorf("oauth2/google: unable to parse %v\nResponse: %v", source, data)
return fmt.Errorf("oauth2/google/externalaccount: unable to parse %v\nResponse: %v", source, data)
}
func malformedFailureError() error {
return nonCacheableError{"oauth2/google: response must include `error` and `message` fields when unsuccessful"}
return nonCacheableError{"oauth2/google/externalaccount: response must include `error` and `message` fields when unsuccessful"}
}
func userDefinedError(code, message string) error {
return nonCacheableError{fmt.Sprintf("oauth2/google: response contains unsuccessful response: (%v) %v", code, message)}
return nonCacheableError{fmt.Sprintf("oauth2/google/externalaccount: response contains unsuccessful response: (%v) %v", code, message)}
}
func unsupportedVersionError(source string, version int) error {
return fmt.Errorf("oauth2/google: %v contains unsupported version: %v", source, version)
return fmt.Errorf("oauth2/google/externalaccount: %v contains unsupported version: %v", source, version)
}
func tokenExpiredError() error {
return nonCacheableError{"oauth2/google: the token returned by the executable is expired"}
return nonCacheableError{"oauth2/google/externalaccount: the token returned by the executable is expired"}
}
func tokenTypeError(source string) error {
return fmt.Errorf("oauth2/google: %v contains unsupported token type", source)
return fmt.Errorf("oauth2/google/externalaccount: %v contains unsupported token type", source)
}
func exitCodeError(exitCode int) error {
return fmt.Errorf("oauth2/google: executable command failed with exit code %v", exitCode)
return fmt.Errorf("oauth2/google/externalaccount: executable command failed with exit code %v", exitCode)
}
func executableError(err error) error {
return fmt.Errorf("oauth2/google: executable command failed: %v", err)
return fmt.Errorf("oauth2/google/externalaccount: executable command failed: %v", err)
}
func executablesDisallowedError() error {
return errors.New("oauth2/google: executables need to be explicitly allowed (set GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES to '1') to run")
return errors.New("oauth2/google/externalaccount: executables need to be explicitly allowed (set GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES to '1') to run")
}
func timeoutRangeError() error {
return errors.New("oauth2/google: invalid `timeout_millis` field — executable timeout must be between 5 and 120 seconds")
return errors.New("oauth2/google/externalaccount: invalid `timeout_millis` field — executable timeout must be between 5 and 120 seconds")
}
func commandMissingError() error {
return errors.New("oauth2/google: missing `command` field — executable command must be provided")
return errors.New("oauth2/google/externalaccount: missing `command` field — executable command must be provided")
}
type environment interface {
@ -146,7 +146,7 @@ type executableCredentialSource struct {
// CreateExecutableCredential creates an executableCredentialSource given an ExecutableConfig.
// It also performs defaulting and type conversions.
func CreateExecutableCredential(ctx context.Context, ec *ExecutableConfig, config *Config) (executableCredentialSource, error) {
func createExecutableCredential(ctx context.Context, ec *ExecutableConfig, config *Config) (executableCredentialSource, error) {
if ec.Command == "" {
return executableCredentialSource{}, commandMissingError()
}

View file

@ -16,7 +16,7 @@ import (
type fileCredentialSource struct {
File string
Format format
Format Format
}
func (cs fileCredentialSource) credentialSourceType() string {
@ -26,12 +26,12 @@ func (cs fileCredentialSource) credentialSourceType() string {
func (cs fileCredentialSource) subjectToken() (string, error) {
tokenFile, err := os.Open(cs.File)
if err != nil {
return "", fmt.Errorf("oauth2/google: failed to open credential file %q", cs.File)
return "", fmt.Errorf("oauth2/google/externalaccount: failed to open credential file %q", cs.File)
}
defer tokenFile.Close()
tokenBytes, err := ioutil.ReadAll(io.LimitReader(tokenFile, 1<<20))
if err != nil {
return "", fmt.Errorf("oauth2/google: failed to read credential file: %v", err)
return "", fmt.Errorf("oauth2/google/externalaccount: failed to read credential file: %v", err)
}
tokenBytes = bytes.TrimSpace(tokenBytes)
switch cs.Format.Type {
@ -39,15 +39,15 @@ func (cs fileCredentialSource) subjectToken() (string, error) {
jsonData := make(map[string]interface{})
err = json.Unmarshal(tokenBytes, &jsonData)
if err != nil {
return "", fmt.Errorf("oauth2/google: failed to unmarshal subject token file: %v", err)
return "", fmt.Errorf("oauth2/google/externalaccount: failed to unmarshal subject token file: %v", err)
}
val, ok := jsonData[cs.Format.SubjectTokenFieldName]
if !ok {
return "", errors.New("oauth2/google: provided subject_token_field_name not found in credentials")
return "", errors.New("oauth2/google/externalaccount: provided subject_token_field_name not found in credentials")
}
token, ok := val.(string)
if !ok {
return "", errors.New("oauth2/google: improperly formatted subject token")
return "", errors.New("oauth2/google/externalaccount: improperly formatted subject token")
}
return token, nil
case "text":
@ -55,7 +55,7 @@ func (cs fileCredentialSource) subjectToken() (string, error) {
case "":
return string(tokenBytes), nil
default:
return "", errors.New("oauth2/google: invalid credential_source file format type")
return "", errors.New("oauth2/google/externalaccount: invalid credential_source file format type")
}
}

View file

@ -0,0 +1,21 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package externalaccount
import "context"
type programmaticRefreshCredentialSource struct {
supplierOptions SupplierOptions
subjectTokenSupplier SubjectTokenSupplier
ctx context.Context
}
func (cs programmaticRefreshCredentialSource) credentialSourceType() string {
return "programmatic"
}
func (cs programmaticRefreshCredentialSource) subjectToken() (string, error) {
return cs.subjectTokenSupplier.SubjectToken(cs.ctx, cs.supplierOptions)
}

View file

@ -19,7 +19,7 @@ import (
type urlCredentialSource struct {
URL string
Headers map[string]string
Format format
Format Format
ctx context.Context
}
@ -31,7 +31,7 @@ func (cs urlCredentialSource) subjectToken() (string, error) {
client := oauth2.NewClient(cs.ctx, nil)
req, err := http.NewRequest("GET", cs.URL, nil)
if err != nil {
return "", fmt.Errorf("oauth2/google: HTTP request for URL-sourced credential failed: %v", err)
return "", fmt.Errorf("oauth2/google/externalaccount: HTTP request for URL-sourced credential failed: %v", err)
}
req = req.WithContext(cs.ctx)
@ -40,16 +40,16 @@ func (cs urlCredentialSource) subjectToken() (string, error) {
}
resp, err := client.Do(req)
if err != nil {
return "", fmt.Errorf("oauth2/google: invalid response when retrieving subject token: %v", err)
return "", fmt.Errorf("oauth2/google/externalaccount: invalid response when retrieving subject token: %v", err)
}
defer resp.Body.Close()
respBody, err := ioutil.ReadAll(io.LimitReader(resp.Body, 1<<20))
if err != nil {
return "", fmt.Errorf("oauth2/google: invalid body in subject token URL query: %v", err)
return "", fmt.Errorf("oauth2/google/externalaccount: invalid body in subject token URL query: %v", err)
}
if c := resp.StatusCode; c < 200 || c > 299 {
return "", fmt.Errorf("oauth2/google: status code %d: %s", c, respBody)
return "", fmt.Errorf("oauth2/google/externalaccount: status code %d: %s", c, respBody)
}
switch cs.Format.Type {
@ -57,15 +57,15 @@ func (cs urlCredentialSource) subjectToken() (string, error) {
jsonData := make(map[string]interface{})
err = json.Unmarshal(respBody, &jsonData)
if err != nil {
return "", fmt.Errorf("oauth2/google: failed to unmarshal subject token file: %v", err)
return "", fmt.Errorf("oauth2/google/externalaccount: failed to unmarshal subject token file: %v", err)
}
val, ok := jsonData[cs.Format.SubjectTokenFieldName]
if !ok {
return "", errors.New("oauth2/google: provided subject_token_field_name not found in credentials")
return "", errors.New("oauth2/google/externalaccount: provided subject_token_field_name not found in credentials")
}
token, ok := val.(string)
if !ok {
return "", errors.New("oauth2/google: improperly formatted subject token")
return "", errors.New("oauth2/google/externalaccount: improperly formatted subject token")
}
return token, nil
case "text":
@ -73,7 +73,7 @@ func (cs urlCredentialSource) subjectToken() (string, error) {
case "":
return string(respBody), nil
default:
return "", errors.New("oauth2/google: invalid credential_source file format type")
return "", errors.New("oauth2/google/externalaccount: invalid credential_source file format type")
}
}

View file

@ -15,8 +15,9 @@ import (
"cloud.google.com/go/compute/metadata"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google/internal/externalaccount"
"golang.org/x/oauth2/google/externalaccount"
"golang.org/x/oauth2/google/internal/externalaccountauthorizeduser"
"golang.org/x/oauth2/google/internal/impersonate"
"golang.org/x/oauth2/jwt"
)
@ -200,12 +201,12 @@ func (f *credentialsFile) tokenSource(ctx context.Context, params CredentialsPar
ServiceAccountImpersonationLifetimeSeconds: f.ServiceAccountImpersonation.TokenLifetimeSeconds,
ClientSecret: f.ClientSecret,
ClientID: f.ClientID,
CredentialSource: f.CredentialSource,
CredentialSource: &f.CredentialSource,
QuotaProjectID: f.QuotaProjectID,
Scopes: params.Scopes,
WorkforcePoolUserProject: f.WorkforcePoolUserProject,
}
return cfg.TokenSource(ctx)
return externalaccount.NewTokenSource(ctx, *cfg)
case externalAccountAuthorizedUserKey:
cfg := &externalaccountauthorizeduser.Config{
Audience: f.Audience,
@ -228,7 +229,7 @@ func (f *credentialsFile) tokenSource(ctx context.Context, params CredentialsPar
if err != nil {
return nil, err
}
imp := externalaccount.ImpersonateTokenSource{
imp := impersonate.ImpersonateTokenSource{
Ctx: ctx,
URL: f.ServiceAccountImpersonationURL,
Scopes: params.Scopes,

View file

@ -1,254 +0,0 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package externalaccount
import (
"context"
"fmt"
"net/http"
"regexp"
"strconv"
"time"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google/internal/stsexchange"
)
// now aliases time.Now for testing
var now = func() time.Time {
return time.Now().UTC()
}
// Config stores the configuration for fetching tokens with external credentials.
type Config struct {
// Audience is the Secure Token Service (STS) audience which contains the resource name for the workload
// identity pool or the workforce pool and the provider identifier in that pool.
Audience string
// SubjectTokenType is the STS token type based on the Oauth2.0 token exchange spec
// e.g. `urn:ietf:params:oauth:token-type:jwt`.
SubjectTokenType string
// TokenURL is the STS token exchange endpoint.
TokenURL string
// TokenInfoURL is the token_info endpoint used to retrieve the account related information (
// user attributes like account identifier, eg. email, username, uid, etc). This is
// needed for gCloud session account identification.
TokenInfoURL string
// ServiceAccountImpersonationURL is the URL for the service account impersonation request. This is only
// required for workload identity pools when APIs to be accessed have not integrated with UberMint.
ServiceAccountImpersonationURL string
// ServiceAccountImpersonationLifetimeSeconds is the number of seconds the service account impersonation
// token will be valid for.
ServiceAccountImpersonationLifetimeSeconds int
// ClientSecret is currently only required if token_info endpoint also
// needs to be called with the generated GCP access token. When provided, STS will be
// called with additional basic authentication using client_id as username and client_secret as password.
ClientSecret string
// ClientID is only required in conjunction with ClientSecret, as described above.
ClientID string
// CredentialSource contains the necessary information to retrieve the token itself, as well
// as some environmental information.
CredentialSource CredentialSource
// QuotaProjectID is injected by gCloud. If the value is non-empty, the Auth libraries
// will set the x-goog-user-project which overrides the project associated with the credentials.
QuotaProjectID string
// Scopes contains the desired scopes for the returned access token.
Scopes []string
// The optional workforce pool user project number when the credential
// corresponds to a workforce pool and not a workload identity pool.
// The underlying principal must still have serviceusage.services.use IAM
// permission to use the project for billing/quota.
WorkforcePoolUserProject string
}
var (
validWorkforceAudiencePattern *regexp.Regexp = regexp.MustCompile(`//iam\.googleapis\.com/locations/[^/]+/workforcePools/`)
)
func validateWorkforceAudience(input string) bool {
return validWorkforceAudiencePattern.MatchString(input)
}
// TokenSource Returns an external account TokenSource struct. This is to be called by package google to construct a google.Credentials.
func (c *Config) TokenSource(ctx context.Context) (oauth2.TokenSource, error) {
return c.tokenSource(ctx, "https")
}
// tokenSource is a private function that's directly called by some of the tests,
// because the unit test URLs are mocked, and would otherwise fail the
// validity check.
func (c *Config) tokenSource(ctx context.Context, scheme string) (oauth2.TokenSource, error) {
if c.WorkforcePoolUserProject != "" {
valid := validateWorkforceAudience(c.Audience)
if !valid {
return nil, fmt.Errorf("oauth2/google: workforce_pool_user_project should not be set for non-workforce pool credentials")
}
}
ts := tokenSource{
ctx: ctx,
conf: c,
}
if c.ServiceAccountImpersonationURL == "" {
return oauth2.ReuseTokenSource(nil, ts), nil
}
scopes := c.Scopes
ts.conf.Scopes = []string{"https://www.googleapis.com/auth/cloud-platform"}
imp := ImpersonateTokenSource{
Ctx: ctx,
URL: c.ServiceAccountImpersonationURL,
Scopes: scopes,
Ts: oauth2.ReuseTokenSource(nil, ts),
TokenLifetimeSeconds: c.ServiceAccountImpersonationLifetimeSeconds,
}
return oauth2.ReuseTokenSource(nil, imp), nil
}
// Subject token file types.
const (
fileTypeText = "text"
fileTypeJSON = "json"
)
type format struct {
// Type is either "text" or "json". When not provided "text" type is assumed.
Type string `json:"type"`
// SubjectTokenFieldName is only required for JSON format. This would be "access_token" for azure.
SubjectTokenFieldName string `json:"subject_token_field_name"`
}
// CredentialSource stores the information necessary to retrieve the credentials for the STS exchange.
// One field amongst File, URL, and Executable should be filled, depending on the kind of credential in question.
// The EnvironmentID should start with AWS if being used for an AWS credential.
type CredentialSource struct {
File string `json:"file"`
URL string `json:"url"`
Headers map[string]string `json:"headers"`
Executable *ExecutableConfig `json:"executable"`
EnvironmentID string `json:"environment_id"`
RegionURL string `json:"region_url"`
RegionalCredVerificationURL string `json:"regional_cred_verification_url"`
CredVerificationURL string `json:"cred_verification_url"`
IMDSv2SessionTokenURL string `json:"imdsv2_session_token_url"`
Format format `json:"format"`
}
type ExecutableConfig struct {
Command string `json:"command"`
TimeoutMillis *int `json:"timeout_millis"`
OutputFile string `json:"output_file"`
}
// parse determines the type of CredentialSource needed.
func (c *Config) parse(ctx context.Context) (baseCredentialSource, error) {
if len(c.CredentialSource.EnvironmentID) > 3 && c.CredentialSource.EnvironmentID[:3] == "aws" {
if awsVersion, err := strconv.Atoi(c.CredentialSource.EnvironmentID[3:]); err == nil {
if awsVersion != 1 {
return nil, fmt.Errorf("oauth2/google: aws version '%d' is not supported in the current build", awsVersion)
}
awsCredSource := awsCredentialSource{
EnvironmentID: c.CredentialSource.EnvironmentID,
RegionURL: c.CredentialSource.RegionURL,
RegionalCredVerificationURL: c.CredentialSource.RegionalCredVerificationURL,
CredVerificationURL: c.CredentialSource.URL,
TargetResource: c.Audience,
ctx: ctx,
}
if c.CredentialSource.IMDSv2SessionTokenURL != "" {
awsCredSource.IMDSv2SessionTokenURL = c.CredentialSource.IMDSv2SessionTokenURL
}
return awsCredSource, nil
}
} else if c.CredentialSource.File != "" {
return fileCredentialSource{File: c.CredentialSource.File, Format: c.CredentialSource.Format}, nil
} else if c.CredentialSource.URL != "" {
return urlCredentialSource{URL: c.CredentialSource.URL, Headers: c.CredentialSource.Headers, Format: c.CredentialSource.Format, ctx: ctx}, nil
} else if c.CredentialSource.Executable != nil {
return CreateExecutableCredential(ctx, c.CredentialSource.Executable, c)
}
return nil, fmt.Errorf("oauth2/google: unable to parse credential source")
}
type baseCredentialSource interface {
credentialSourceType() string
subjectToken() (string, error)
}
// tokenSource is the source that handles external credentials. It is used to retrieve Tokens.
type tokenSource struct {
ctx context.Context
conf *Config
}
func getMetricsHeaderValue(conf *Config, credSource baseCredentialSource) string {
return fmt.Sprintf("gl-go/%s auth/%s google-byoid-sdk source/%s sa-impersonation/%t config-lifetime/%t",
goVersion(),
"unknown",
credSource.credentialSourceType(),
conf.ServiceAccountImpersonationURL != "",
conf.ServiceAccountImpersonationLifetimeSeconds != 0)
}
// Token allows tokenSource to conform to the oauth2.TokenSource interface.
func (ts tokenSource) Token() (*oauth2.Token, error) {
conf := ts.conf
credSource, err := conf.parse(ts.ctx)
if err != nil {
return nil, err
}
subjectToken, err := credSource.subjectToken()
if err != nil {
return nil, err
}
stsRequest := stsexchange.TokenExchangeRequest{
GrantType: "urn:ietf:params:oauth:grant-type:token-exchange",
Audience: conf.Audience,
Scope: conf.Scopes,
RequestedTokenType: "urn:ietf:params:oauth:token-type:access_token",
SubjectToken: subjectToken,
SubjectTokenType: conf.SubjectTokenType,
}
header := make(http.Header)
header.Add("Content-Type", "application/x-www-form-urlencoded")
header.Add("x-goog-api-client", getMetricsHeaderValue(conf, credSource))
clientAuth := stsexchange.ClientAuthentication{
AuthStyle: oauth2.AuthStyleInHeader,
ClientID: conf.ClientID,
ClientSecret: conf.ClientSecret,
}
var options map[string]interface{}
// Do not pass workforce_pool_user_project when client authentication is used.
// The client ID is sufficient for determining the user project.
if conf.WorkforcePoolUserProject != "" && conf.ClientID == "" {
options = map[string]interface{}{
"userProject": conf.WorkforcePoolUserProject,
}
}
stsResp, err := stsexchange.ExchangeToken(ts.ctx, conf.TokenURL, &stsRequest, clientAuth, header, options)
if err != nil {
return nil, err
}
accessToken := &oauth2.Token{
AccessToken: stsResp.AccessToken,
TokenType: stsResp.TokenType,
}
if stsResp.ExpiresIn < 0 {
return nil, fmt.Errorf("oauth2/google: got invalid expiry from security token service")
} else if stsResp.ExpiresIn >= 0 {
accessToken.Expiry = now().Add(time.Duration(stsResp.ExpiresIn) * time.Second)
}
if stsResp.RefreshToken != "" {
accessToken.RefreshToken = stsResp.RefreshToken
}
return accessToken, nil
}

View file

@ -1,18 +0,0 @@
// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package externalaccount
import "fmt"
// Error for handling OAuth related error responses as stated in rfc6749#5.2.
type Error struct {
Code string
URI string
Description string
}
func (err *Error) Error() string {
return fmt.Sprintf("got error code %s from %s: %s", err.Code, err.URI, err.Description)
}

View file

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package externalaccount
package impersonate
import (
"bytes"

View file

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build (aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos) && go1.9
//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
package unix

View file

@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build darwin && go1.12
//go:build darwin
package unix

View file

@ -13,6 +13,7 @@
package unix
import (
"errors"
"sync"
"unsafe"
)
@ -169,25 +170,26 @@ func Getfsstat(buf []Statfs_t, flags int) (n int, err error) {
func Uname(uname *Utsname) error {
mib := []_C_int{CTL_KERN, KERN_OSTYPE}
n := unsafe.Sizeof(uname.Sysname)
if err := sysctl(mib, &uname.Sysname[0], &n, nil, 0); err != nil {
// Suppress ENOMEM errors to be compatible with the C library __xuname() implementation.
if err := sysctl(mib, &uname.Sysname[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) {
return err
}
mib = []_C_int{CTL_KERN, KERN_HOSTNAME}
n = unsafe.Sizeof(uname.Nodename)
if err := sysctl(mib, &uname.Nodename[0], &n, nil, 0); err != nil {
if err := sysctl(mib, &uname.Nodename[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) {
return err
}
mib = []_C_int{CTL_KERN, KERN_OSRELEASE}
n = unsafe.Sizeof(uname.Release)
if err := sysctl(mib, &uname.Release[0], &n, nil, 0); err != nil {
if err := sysctl(mib, &uname.Release[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) {
return err
}
mib = []_C_int{CTL_KERN, KERN_VERSION}
n = unsafe.Sizeof(uname.Version)
if err := sysctl(mib, &uname.Version[0], &n, nil, 0); err != nil {
if err := sysctl(mib, &uname.Version[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) {
return err
}
@ -205,7 +207,7 @@ func Uname(uname *Utsname) error {
mib = []_C_int{CTL_HW, HW_MACHINE}
n = unsafe.Sizeof(uname.Machine)
if err := sysctl(mib, &uname.Machine[0], &n, nil, 0); err != nil {
if err := sysctl(mib, &uname.Machine[0], &n, nil, 0); err != nil && !errors.Is(err, ENOMEM) {
return err
}

View file

@ -1849,6 +1849,105 @@ func Dup2(oldfd, newfd int) error {
//sys Fsmount(fd int, flags int, mountAttrs int) (fsfd int, err error)
//sys Fsopen(fsName string, flags int) (fd int, err error)
//sys Fspick(dirfd int, pathName string, flags int) (fd int, err error)
//sys fsconfig(fd int, cmd uint, key *byte, value *byte, aux int) (err error)
func fsconfigCommon(fd int, cmd uint, key string, value *byte, aux int) (err error) {
var keyp *byte
if keyp, err = BytePtrFromString(key); err != nil {
return
}
return fsconfig(fd, cmd, keyp, value, aux)
}
// FsconfigSetFlag is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_SET_FLAG.
//
// fd is the filesystem context to act upon.
// key the parameter key to set.
func FsconfigSetFlag(fd int, key string) (err error) {
return fsconfigCommon(fd, FSCONFIG_SET_FLAG, key, nil, 0)
}
// FsconfigSetString is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_SET_STRING.
//
// fd is the filesystem context to act upon.
// key the parameter key to set.
// value is the parameter value to set.
func FsconfigSetString(fd int, key string, value string) (err error) {
var valuep *byte
if valuep, err = BytePtrFromString(value); err != nil {
return
}
return fsconfigCommon(fd, FSCONFIG_SET_STRING, key, valuep, 0)
}
// FsconfigSetBinary is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_SET_BINARY.
//
// fd is the filesystem context to act upon.
// key the parameter key to set.
// value is the parameter value to set.
func FsconfigSetBinary(fd int, key string, value []byte) (err error) {
if len(value) == 0 {
return EINVAL
}
return fsconfigCommon(fd, FSCONFIG_SET_BINARY, key, &value[0], len(value))
}
// FsconfigSetPath is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_SET_PATH.
//
// fd is the filesystem context to act upon.
// key the parameter key to set.
// path is a non-empty path for specified key.
// atfd is a file descriptor at which to start lookup from or AT_FDCWD.
func FsconfigSetPath(fd int, key string, path string, atfd int) (err error) {
var valuep *byte
if valuep, err = BytePtrFromString(path); err != nil {
return
}
return fsconfigCommon(fd, FSCONFIG_SET_PATH, key, valuep, atfd)
}
// FsconfigSetPathEmpty is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_SET_PATH_EMPTY. The same as
// FconfigSetPath but with AT_PATH_EMPTY implied.
func FsconfigSetPathEmpty(fd int, key string, path string, atfd int) (err error) {
var valuep *byte
if valuep, err = BytePtrFromString(path); err != nil {
return
}
return fsconfigCommon(fd, FSCONFIG_SET_PATH_EMPTY, key, valuep, atfd)
}
// FsconfigSetFd is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_SET_FD.
//
// fd is the filesystem context to act upon.
// key the parameter key to set.
// value is a file descriptor to be assigned to specified key.
func FsconfigSetFd(fd int, key string, value int) (err error) {
return fsconfigCommon(fd, FSCONFIG_SET_FD, key, nil, value)
}
// FsconfigCreate is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_CMD_CREATE.
//
// fd is the filesystem context to act upon.
func FsconfigCreate(fd int) (err error) {
return fsconfig(fd, FSCONFIG_CMD_CREATE, nil, nil, 0)
}
// FsconfigReconfigure is equivalent to fsconfig(2) called
// with cmd == FSCONFIG_CMD_RECONFIGURE.
//
// fd is the filesystem context to act upon.
func FsconfigReconfigure(fd int) (err error) {
return fsconfig(fd, FSCONFIG_CMD_RECONFIGURE, nil, nil, 0)
}
//sys Getdents(fd int, buf []byte) (n int, err error) = SYS_GETDENTS64
//sysnb Getpgid(pid int) (pgid int, err error)

View file

@ -906,6 +906,16 @@ func Fspick(dirfd int, pathName string, flags int) (fd int, err error) {
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fsconfig(fd int, cmd uint, key *byte, value *byte, aux int) (err error) {
_, _, e1 := Syscall6(SYS_FSCONFIG, uintptr(fd), uintptr(cmd), uintptr(unsafe.Pointer(key)), uintptr(unsafe.Pointer(value)), uintptr(aux), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getdents(fd int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {

View file

@ -836,6 +836,15 @@ const (
FSPICK_EMPTY_PATH = 0x8
FSMOUNT_CLOEXEC = 0x1
FSCONFIG_SET_FLAG = 0x0
FSCONFIG_SET_STRING = 0x1
FSCONFIG_SET_BINARY = 0x2
FSCONFIG_SET_PATH = 0x3
FSCONFIG_SET_PATH_EMPTY = 0x4
FSCONFIG_SET_FD = 0x5
FSCONFIG_CMD_CREATE = 0x6
FSCONFIG_CMD_RECONFIGURE = 0x7
)
type OpenHow struct {
@ -1550,6 +1559,7 @@ const (
IFLA_DEVLINK_PORT = 0x3e
IFLA_GSO_IPV4_MAX_SIZE = 0x3f
IFLA_GRO_IPV4_MAX_SIZE = 0x40
IFLA_DPLL_PIN = 0x41
IFLA_PROTO_DOWN_REASON_UNSPEC = 0x0
IFLA_PROTO_DOWN_REASON_MASK = 0x1
IFLA_PROTO_DOWN_REASON_VALUE = 0x2
@ -1565,6 +1575,7 @@ const (
IFLA_INET6_ICMP6STATS = 0x6
IFLA_INET6_TOKEN = 0x7
IFLA_INET6_ADDR_GEN_MODE = 0x8
IFLA_INET6_RA_MTU = 0x9
IFLA_BR_UNSPEC = 0x0
IFLA_BR_FORWARD_DELAY = 0x1
IFLA_BR_HELLO_TIME = 0x2
@ -1612,6 +1623,9 @@ const (
IFLA_BR_MCAST_MLD_VERSION = 0x2c
IFLA_BR_VLAN_STATS_PER_PORT = 0x2d
IFLA_BR_MULTI_BOOLOPT = 0x2e
IFLA_BR_MCAST_QUERIER_STATE = 0x2f
IFLA_BR_FDB_N_LEARNED = 0x30
IFLA_BR_FDB_MAX_LEARNED = 0x31
IFLA_BRPORT_UNSPEC = 0x0
IFLA_BRPORT_STATE = 0x1
IFLA_BRPORT_PRIORITY = 0x2
@ -1649,6 +1663,14 @@ const (
IFLA_BRPORT_BACKUP_PORT = 0x22
IFLA_BRPORT_MRP_RING_OPEN = 0x23
IFLA_BRPORT_MRP_IN_OPEN = 0x24
IFLA_BRPORT_MCAST_EHT_HOSTS_LIMIT = 0x25
IFLA_BRPORT_MCAST_EHT_HOSTS_CNT = 0x26
IFLA_BRPORT_LOCKED = 0x27
IFLA_BRPORT_MAB = 0x28
IFLA_BRPORT_MCAST_N_GROUPS = 0x29
IFLA_BRPORT_MCAST_MAX_GROUPS = 0x2a
IFLA_BRPORT_NEIGH_VLAN_SUPPRESS = 0x2b
IFLA_BRPORT_BACKUP_NHID = 0x2c
IFLA_INFO_UNSPEC = 0x0
IFLA_INFO_KIND = 0x1
IFLA_INFO_DATA = 0x2
@ -1670,6 +1692,9 @@ const (
IFLA_MACVLAN_MACADDR = 0x4
IFLA_MACVLAN_MACADDR_DATA = 0x5
IFLA_MACVLAN_MACADDR_COUNT = 0x6
IFLA_MACVLAN_BC_QUEUE_LEN = 0x7
IFLA_MACVLAN_BC_QUEUE_LEN_USED = 0x8
IFLA_MACVLAN_BC_CUTOFF = 0x9
IFLA_VRF_UNSPEC = 0x0
IFLA_VRF_TABLE = 0x1
IFLA_VRF_PORT_UNSPEC = 0x0
@ -1693,9 +1718,22 @@ const (
IFLA_XFRM_UNSPEC = 0x0
IFLA_XFRM_LINK = 0x1
IFLA_XFRM_IF_ID = 0x2
IFLA_XFRM_COLLECT_METADATA = 0x3
IFLA_IPVLAN_UNSPEC = 0x0
IFLA_IPVLAN_MODE = 0x1
IFLA_IPVLAN_FLAGS = 0x2
NETKIT_NEXT = -0x1
NETKIT_PASS = 0x0
NETKIT_DROP = 0x2
NETKIT_REDIRECT = 0x7
NETKIT_L2 = 0x0
NETKIT_L3 = 0x1
IFLA_NETKIT_UNSPEC = 0x0
IFLA_NETKIT_PEER_INFO = 0x1
IFLA_NETKIT_PRIMARY = 0x2
IFLA_NETKIT_POLICY = 0x3
IFLA_NETKIT_PEER_POLICY = 0x4
IFLA_NETKIT_MODE = 0x5
IFLA_VXLAN_UNSPEC = 0x0
IFLA_VXLAN_ID = 0x1
IFLA_VXLAN_GROUP = 0x2
@ -1726,6 +1764,8 @@ const (
IFLA_VXLAN_GPE = 0x1b
IFLA_VXLAN_TTL_INHERIT = 0x1c
IFLA_VXLAN_DF = 0x1d
IFLA_VXLAN_VNIFILTER = 0x1e
IFLA_VXLAN_LOCALBYPASS = 0x1f
IFLA_GENEVE_UNSPEC = 0x0
IFLA_GENEVE_ID = 0x1
IFLA_GENEVE_REMOTE = 0x2
@ -1740,6 +1780,7 @@ const (
IFLA_GENEVE_LABEL = 0xb
IFLA_GENEVE_TTL_INHERIT = 0xc
IFLA_GENEVE_DF = 0xd
IFLA_GENEVE_INNER_PROTO_INHERIT = 0xe
IFLA_BAREUDP_UNSPEC = 0x0
IFLA_BAREUDP_PORT = 0x1
IFLA_BAREUDP_ETHERTYPE = 0x2
@ -1752,6 +1793,8 @@ const (
IFLA_GTP_FD1 = 0x2
IFLA_GTP_PDP_HASHSIZE = 0x3
IFLA_GTP_ROLE = 0x4
IFLA_GTP_CREATE_SOCKETS = 0x5
IFLA_GTP_RESTART_COUNT = 0x6
IFLA_BOND_UNSPEC = 0x0
IFLA_BOND_MODE = 0x1
IFLA_BOND_ACTIVE_SLAVE = 0x2
@ -1781,6 +1824,9 @@ const (
IFLA_BOND_AD_ACTOR_SYSTEM = 0x1a
IFLA_BOND_TLB_DYNAMIC_LB = 0x1b
IFLA_BOND_PEER_NOTIF_DELAY = 0x1c
IFLA_BOND_AD_LACP_ACTIVE = 0x1d
IFLA_BOND_MISSED_MAX = 0x1e
IFLA_BOND_NS_IP6_TARGET = 0x1f
IFLA_BOND_AD_INFO_UNSPEC = 0x0
IFLA_BOND_AD_INFO_AGGREGATOR = 0x1
IFLA_BOND_AD_INFO_NUM_PORTS = 0x2
@ -1796,6 +1842,7 @@ const (
IFLA_BOND_SLAVE_AD_AGGREGATOR_ID = 0x6
IFLA_BOND_SLAVE_AD_ACTOR_OPER_PORT_STATE = 0x7
IFLA_BOND_SLAVE_AD_PARTNER_OPER_PORT_STATE = 0x8
IFLA_BOND_SLAVE_PRIO = 0x9
IFLA_VF_INFO_UNSPEC = 0x0
IFLA_VF_INFO = 0x1
IFLA_VF_UNSPEC = 0x0
@ -1854,8 +1901,16 @@ const (
IFLA_STATS_LINK_XSTATS_SLAVE = 0x3
IFLA_STATS_LINK_OFFLOAD_XSTATS = 0x4
IFLA_STATS_AF_SPEC = 0x5
IFLA_STATS_GETSET_UNSPEC = 0x0
IFLA_STATS_GET_FILTERS = 0x1
IFLA_STATS_SET_OFFLOAD_XSTATS_L3_STATS = 0x2
IFLA_OFFLOAD_XSTATS_UNSPEC = 0x0
IFLA_OFFLOAD_XSTATS_CPU_HIT = 0x1
IFLA_OFFLOAD_XSTATS_HW_S_INFO = 0x2
IFLA_OFFLOAD_XSTATS_L3_STATS = 0x3
IFLA_OFFLOAD_XSTATS_HW_S_INFO_UNSPEC = 0x0
IFLA_OFFLOAD_XSTATS_HW_S_INFO_REQUEST = 0x1
IFLA_OFFLOAD_XSTATS_HW_S_INFO_USED = 0x2
IFLA_XDP_UNSPEC = 0x0
IFLA_XDP_FD = 0x1
IFLA_XDP_ATTACHED = 0x2
@ -1885,6 +1940,11 @@ const (
IFLA_RMNET_UNSPEC = 0x0
IFLA_RMNET_MUX_ID = 0x1
IFLA_RMNET_FLAGS = 0x2
IFLA_MCTP_UNSPEC = 0x0
IFLA_MCTP_NET = 0x1
IFLA_DSA_UNSPEC = 0x0
IFLA_DSA_CONDUIT = 0x1
IFLA_DSA_MASTER = 0x1
)
const (

View file

@ -89,6 +89,7 @@ func (ds *DialSettings) HasCustomAudience() bool {
return len(ds.Audiences) > 0
}
// IsNewAuthLibraryEnabled returns true if the new auth library should be used.
func (ds *DialSettings) IsNewAuthLibraryEnabled() bool {
if ds.EnableNewAuthLibrary {
return true
@ -185,6 +186,8 @@ func (ds *DialSettings) GetUniverseDomain() string {
return ds.UniverseDomain
}
// IsUniverseDomainGDU returns true if the universe domain is the default Google
// universe.
func (ds *DialSettings) IsUniverseDomainGDU() bool {
return ds.GetUniverseDomain() == ds.GetDefaultUniverseDomain()
}

View file

@ -5,4 +5,4 @@
package internal
// Version is the current tagged release of the library.
const Version = "0.167.0"
const Version = "0.168.0"

View file

@ -1772,6 +1772,8 @@ func parseTarget(target string) (resolver.Target, error) {
return resolver.Target{URL: *u}, nil
}
// encodeAuthority escapes the authority string based on valid chars defined in
// https://datatracker.ietf.org/doc/html/rfc3986#section-3.2.
func encodeAuthority(authority string) string {
const upperhex = "0123456789ABCDEF"

View file

@ -28,6 +28,7 @@ import (
"fmt"
"io"
"net"
"strings"
"sync"
"sync/atomic"
"time"
@ -362,8 +363,12 @@ func (s *Stream) SendCompress() string {
// ClientAdvertisedCompressors returns the compressor names advertised by the
// client via grpc-accept-encoding header.
func (s *Stream) ClientAdvertisedCompressors() string {
return s.clientAdvertisedCompressors
func (s *Stream) ClientAdvertisedCompressors() []string {
values := strings.Split(s.clientAdvertisedCompressors, ",")
for i, v := range values {
values[i] = strings.TrimSpace(v)
}
return values
}
// Done returns a channel which is closed when it receives the final status

View file

@ -168,6 +168,9 @@ type BuildOptions struct {
// field. In most cases though, it is not appropriate, and this field may
// be ignored.
Dialer func(context.Context, string) (net.Conn, error)
// Authority is the effective authority of the clientconn for which the
// resolver is built.
Authority string
}
// An Endpoint is one network endpoint, or server, which may have multiple

View file

@ -75,6 +75,7 @@ func (ccr *ccResolverWrapper) start() error {
DialCreds: ccr.cc.dopts.copts.TransportCredentials,
CredsBundle: ccr.cc.dopts.copts.CredsBundle,
Dialer: ccr.cc.dopts.copts.Dialer,
Authority: ccr.cc.authority,
}
var err error
ccr.resolver, err = ccr.cc.resolverBuilder.Build(ccr.cc.parsedTarget, ccr, opts)

View file

@ -744,17 +744,19 @@ type payloadInfo struct {
uncompressedBytes []byte
}
func recvAndDecompress(p *parser, s *transport.Stream, dc Decompressor, maxReceiveMessageSize int, payInfo *payloadInfo, compressor encoding.Compressor) ([]byte, error) {
pf, buf, err := p.recvMsg(maxReceiveMessageSize)
// recvAndDecompress reads a message from the stream, decompressing it if necessary.
//
// Cancelling the returned cancel function releases the buffer back to the pool. So the caller should cancel as soon as
// the buffer is no longer needed.
func recvAndDecompress(p *parser, s *transport.Stream, dc Decompressor, maxReceiveMessageSize int, payInfo *payloadInfo, compressor encoding.Compressor,
) (uncompressedBuf []byte, cancel func(), err error) {
pf, compressedBuf, err := p.recvMsg(maxReceiveMessageSize)
if err != nil {
return nil, err
}
if payInfo != nil {
payInfo.compressedLength = len(buf)
return nil, nil, err
}
if st := checkRecvPayload(pf, s.RecvCompress(), compressor != nil || dc != nil); st != nil {
return nil, st.Err()
return nil, nil, st.Err()
}
var size int
@ -762,21 +764,35 @@ func recvAndDecompress(p *parser, s *transport.Stream, dc Decompressor, maxRecei
// To match legacy behavior, if the decompressor is set by WithDecompressor or RPCDecompressor,
// use this decompressor as the default.
if dc != nil {
buf, err = dc.Do(bytes.NewReader(buf))
size = len(buf)
uncompressedBuf, err = dc.Do(bytes.NewReader(compressedBuf))
size = len(uncompressedBuf)
} else {
buf, size, err = decompress(compressor, buf, maxReceiveMessageSize)
uncompressedBuf, size, err = decompress(compressor, compressedBuf, maxReceiveMessageSize)
}
if err != nil {
return nil, status.Errorf(codes.Internal, "grpc: failed to decompress the received message: %v", err)
return nil, nil, status.Errorf(codes.Internal, "grpc: failed to decompress the received message: %v", err)
}
if size > maxReceiveMessageSize {
// TODO: Revisit the error code. Currently keep it consistent with java
// implementation.
return nil, status.Errorf(codes.ResourceExhausted, "grpc: received message after decompression larger than max (%d vs. %d)", size, maxReceiveMessageSize)
return nil, nil, status.Errorf(codes.ResourceExhausted, "grpc: received message after decompression larger than max (%d vs. %d)", size, maxReceiveMessageSize)
}
} else {
uncompressedBuf = compressedBuf
}
if payInfo != nil {
payInfo.compressedLength = len(compressedBuf)
payInfo.uncompressedBytes = uncompressedBuf
cancel = func() {}
} else {
cancel = func() {
p.recvBufferPool.Put(&compressedBuf)
}
}
return buf, nil
return uncompressedBuf, cancel, nil
}
// Using compressor, decompress d, returning data and size.
@ -796,6 +812,9 @@ func decompress(compressor encoding.Compressor, d []byte, maxReceiveMessageSize
// size is used as an estimate to size the buffer, but we
// will read more data if available.
// +MinRead so ReadFrom will not reallocate if size is correct.
//
// TODO: If we ensure that the buffer size is the same as the DecompressedSize,
// we can also utilize the recv buffer pool here.
buf := bytes.NewBuffer(make([]byte, 0, size+bytes.MinRead))
bytesRead, err := buf.ReadFrom(io.LimitReader(dcReader, int64(maxReceiveMessageSize)+1))
return buf.Bytes(), int(bytesRead), err
@ -811,18 +830,15 @@ func decompress(compressor encoding.Compressor, d []byte, maxReceiveMessageSize
// dc takes precedence over compressor.
// TODO(dfawley): wrap the old compressor/decompressor using the new API?
func recv(p *parser, c baseCodec, s *transport.Stream, dc Decompressor, m any, maxReceiveMessageSize int, payInfo *payloadInfo, compressor encoding.Compressor) error {
buf, err := recvAndDecompress(p, s, dc, maxReceiveMessageSize, payInfo, compressor)
buf, cancel, err := recvAndDecompress(p, s, dc, maxReceiveMessageSize, payInfo, compressor)
if err != nil {
return err
}
defer cancel()
if err := c.Unmarshal(buf, m); err != nil {
return status.Errorf(codes.Internal, "grpc: failed to unmarshal the received message: %v", err)
}
if payInfo != nil {
payInfo.uncompressedBytes = buf
} else {
p.recvBufferPool.Put(&buf)
}
return nil
}

View file

@ -1342,7 +1342,8 @@ func (s *Server) processUnaryRPC(ctx context.Context, t transport.ServerTranspor
if len(shs) != 0 || len(binlogs) != 0 {
payInfo = &payloadInfo{}
}
d, err := recvAndDecompress(&parser{r: stream, recvBufferPool: s.opts.recvBufferPool}, stream, dc, s.opts.maxReceiveMessageSize, payInfo, decomp)
d, cancel, err := recvAndDecompress(&parser{r: stream, recvBufferPool: s.opts.recvBufferPool}, stream, dc, s.opts.maxReceiveMessageSize, payInfo, decomp)
if err != nil {
if e := t.WriteStatus(stream, status.Convert(err)); e != nil {
channelz.Warningf(logger, s.channelzID, "grpc: Server.processUnaryRPC failed to write status: %v", e)
@ -1353,6 +1354,8 @@ func (s *Server) processUnaryRPC(ctx context.Context, t transport.ServerTranspor
t.IncrMsgRecv()
}
df := func(v any) error {
defer cancel()
if err := s.getCodec(stream.ContentSubtype()).Unmarshal(d, v); err != nil {
return status.Errorf(codes.Internal, "grpc: error unmarshalling request: %v", err)
}
@ -2117,7 +2120,7 @@ func ClientSupportedCompressors(ctx context.Context) ([]string, error) {
return nil, fmt.Errorf("failed to fetch the stream from the given context %v", ctx)
}
return strings.Split(stream.ClientAdvertisedCompressors(), ","), nil
return stream.ClientAdvertisedCompressors(), nil
}
// SetTrailer sets the trailer metadata that will be sent when an RPC returns.
@ -2157,7 +2160,7 @@ func (c *channelzServer) ChannelzMetric() *channelz.ServerInternalMetric {
// validateSendCompressor returns an error when given compressor name cannot be
// handled by the server or the client based on the advertised compressors.
func validateSendCompressor(name, clientCompressors string) error {
func validateSendCompressor(name string, clientCompressors []string) error {
if name == encoding.Identity {
return nil
}
@ -2166,7 +2169,7 @@ func validateSendCompressor(name, clientCompressors string) error {
return fmt.Errorf("compressor not registered %q", name)
}
for _, c := range strings.Split(clientCompressors, ",") {
for _, c := range clientCompressors {
if c == name {
return nil // found match
}

View file

@ -19,4 +19,4 @@
package grpc
// Version is the current grpc version.
const Version = "1.62.0"
const Version = "1.62.1"

View file

@ -322,6 +322,10 @@ func (d decoder) skipJSONValue() error {
if open > d.opts.RecursionLimit {
return errors.New("exceeded max recursion depth")
}
case json.EOF:
// This can only happen if there's a bug in Decoder.Read.
// Avoid an infinite loop if this does happen.
return errors.New("unexpected EOF")
}
if open == 0 {
return nil

View file

@ -0,0 +1,12 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package editiondefaults contains the binary representation of the editions
// defaults.
package editiondefaults
import _ "embed"
//go:embed editions_defaults.binpb
var Defaults []byte

View file

@ -0,0 +1,4 @@
  (0ж
  (0з
  (0и ж

View file

@ -121,7 +121,7 @@ func (d *Decoder) Read() (Token, error) {
case ObjectClose:
if len(d.openStack) == 0 ||
d.lastToken.kind == comma ||
d.lastToken.kind&(Name|comma) != 0 ||
d.openStack[len(d.openStack)-1] != ObjectOpen {
return Token{}, d.newSyntaxError(tok.pos, unexpectedFmt, tok.RawString())
}

View file

@ -68,7 +68,7 @@ type (
Extensions Extensions
Services Services
EditionFeatures FileEditionFeatures
EditionFeatures EditionFeatures
}
FileL2 struct {
Options func() protoreflect.ProtoMessage
@ -76,10 +76,13 @@ type (
Locations SourceLocations
}
FileEditionFeatures struct {
EditionFeatures struct {
// IsFieldPresence is true if field_presence is EXPLICIT
// https://protobuf.dev/editions/features/#field_presence
IsFieldPresence bool
// IsFieldPresence is true if field_presence is LEGACY_REQUIRED
// https://protobuf.dev/editions/features/#field_presence
IsLegacyRequired bool
// IsOpenEnum is true if enum_type is OPEN
// https://protobuf.dev/editions/features/#enum_type
IsOpenEnum bool
@ -95,6 +98,9 @@ type (
// IsJSONCompliant is true if json_format is ALLOW
// https://protobuf.dev/editions/features/#json_format
IsJSONCompliant bool
// GenerateLegacyUnmarshalJSON determines if the plugin generates the
// UnmarshalJSON([]byte) error method for enums.
GenerateLegacyUnmarshalJSON bool
}
)
@ -156,6 +162,8 @@ type (
}
EnumL1 struct {
eagerValues bool // controls whether EnumL2.Values is already populated
EditionFeatures EditionFeatures
}
EnumL2 struct {
Options func() protoreflect.ProtoMessage
@ -217,6 +225,8 @@ type (
Extensions Extensions
IsMapEntry bool // promoted from google.protobuf.MessageOptions
IsMessageSet bool // promoted from google.protobuf.MessageOptions
EditionFeatures EditionFeatures
}
MessageL2 struct {
Options func() protoreflect.ProtoMessage
@ -250,8 +260,7 @@ type (
Enum protoreflect.EnumDescriptor
Message protoreflect.MessageDescriptor
// Edition features.
Presence bool
EditionFeatures EditionFeatures
}
Oneof struct {
@ -261,6 +270,8 @@ type (
OneofL1 struct {
Options func() protoreflect.ProtoMessage
Fields OneofFields // must be consistent with Message.Fields.ContainingOneof
EditionFeatures EditionFeatures
}
)
@ -310,26 +321,36 @@ func (fd *Field) Options() protoreflect.ProtoMessage {
}
func (fd *Field) Number() protoreflect.FieldNumber { return fd.L1.Number }
func (fd *Field) Cardinality() protoreflect.Cardinality { return fd.L1.Cardinality }
func (fd *Field) Kind() protoreflect.Kind { return fd.L1.Kind }
func (fd *Field) Kind() protoreflect.Kind {
return fd.L1.Kind
}
func (fd *Field) HasJSONName() bool { return fd.L1.StringName.hasJSON }
func (fd *Field) JSONName() string { return fd.L1.StringName.getJSON(fd) }
func (fd *Field) TextName() string { return fd.L1.StringName.getText(fd) }
func (fd *Field) HasPresence() bool {
if fd.L0.ParentFile.L1.Syntax == protoreflect.Editions {
return fd.L1.Presence || fd.L1.Message != nil || fd.L1.ContainingOneof != nil
if fd.L1.Cardinality == protoreflect.Repeated {
return false
}
return fd.L1.Cardinality != protoreflect.Repeated && (fd.L0.ParentFile.L1.Syntax == protoreflect.Proto2 || fd.L1.Message != nil || fd.L1.ContainingOneof != nil)
explicitFieldPresence := fd.Syntax() == protoreflect.Editions && fd.L1.EditionFeatures.IsFieldPresence
return fd.Syntax() == protoreflect.Proto2 || explicitFieldPresence || fd.L1.Message != nil || fd.L1.ContainingOneof != nil
}
func (fd *Field) HasOptionalKeyword() bool {
return (fd.L0.ParentFile.L1.Syntax == protoreflect.Proto2 && fd.L1.Cardinality == protoreflect.Optional && fd.L1.ContainingOneof == nil) || fd.L1.IsProto3Optional
}
func (fd *Field) IsPacked() bool {
if !fd.L1.HasPacked && fd.L0.ParentFile.L1.Syntax != protoreflect.Proto2 && fd.L1.Cardinality == protoreflect.Repeated {
if fd.L1.Cardinality != protoreflect.Repeated {
return false
}
switch fd.L1.Kind {
case protoreflect.StringKind, protoreflect.BytesKind, protoreflect.MessageKind, protoreflect.GroupKind:
default:
return true
return false
}
if fd.L0.ParentFile.L1.Syntax == protoreflect.Editions {
return fd.L1.EditionFeatures.IsPacked
}
if fd.L0.ParentFile.L1.Syntax == protoreflect.Proto3 {
// proto3 repeated fields are packed by default.
return !fd.L1.HasPacked || fd.L1.IsPacked
}
return fd.L1.IsPacked
}
@ -378,6 +399,9 @@ func (fd *Field) ProtoType(protoreflect.FieldDescriptor) {}
// WARNING: This method is exempt from the compatibility promise and may be
// removed in the future without warning.
func (fd *Field) EnforceUTF8() bool {
if fd.L0.ParentFile.L1.Syntax == protoreflect.Editions {
return fd.L1.EditionFeatures.IsUTF8Validated
}
if fd.L1.HasEnforceUTF8 {
return fd.L1.EnforceUTF8
}
@ -408,6 +432,7 @@ type (
Extendee protoreflect.MessageDescriptor
Cardinality protoreflect.Cardinality
Kind protoreflect.Kind
EditionFeatures EditionFeatures
}
ExtensionL2 struct {
Options func() protoreflect.ProtoMessage

View file

@ -5,6 +5,7 @@
package filedesc
import (
"fmt"
"sync"
"google.golang.org/protobuf/encoding/protowire"
@ -98,6 +99,7 @@ func (fd *File) unmarshalSeed(b []byte) {
var prevField protoreflect.FieldNumber
var numEnums, numMessages, numExtensions, numServices int
var posEnums, posMessages, posExtensions, posServices int
var options []byte
b0 := b
for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b)
@ -113,6 +115,8 @@ func (fd *File) unmarshalSeed(b []byte) {
fd.L1.Syntax = protoreflect.Proto2
case "proto3":
fd.L1.Syntax = protoreflect.Proto3
case "editions":
fd.L1.Syntax = protoreflect.Editions
default:
panic("invalid syntax")
}
@ -120,6 +124,8 @@ func (fd *File) unmarshalSeed(b []byte) {
fd.L1.Path = sb.MakeString(v)
case genid.FileDescriptorProto_Package_field_number:
fd.L1.Package = protoreflect.FullName(sb.MakeString(v))
case genid.FileDescriptorProto_Options_field_number:
options = v
case genid.FileDescriptorProto_EnumType_field_number:
if prevField != genid.FileDescriptorProto_EnumType_field_number {
if numEnums > 0 {
@ -154,6 +160,13 @@ func (fd *File) unmarshalSeed(b []byte) {
numServices++
}
prevField = num
case protowire.VarintType:
v, m := protowire.ConsumeVarint(b)
b = b[m:]
switch num {
case genid.FileDescriptorProto_Edition_field_number:
fd.L1.Edition = Edition(v)
}
default:
m := protowire.ConsumeFieldValue(num, typ, b)
b = b[m:]
@ -166,6 +179,15 @@ func (fd *File) unmarshalSeed(b []byte) {
fd.L1.Syntax = protoreflect.Proto2
}
if fd.L1.Syntax == protoreflect.Editions {
fd.L1.EditionFeatures = getFeaturesFor(fd.L1.Edition)
}
// Parse editions features from options if any
if options != nil {
fd.unmarshalSeedOptions(options)
}
// Must allocate all declarations before parsing each descriptor type
// to ensure we handled all descriptors in "flattened ordering".
if numEnums > 0 {
@ -219,6 +241,28 @@ func (fd *File) unmarshalSeed(b []byte) {
}
}
func (fd *File) unmarshalSeedOptions(b []byte) {
for b := b; len(b) > 0; {
num, typ, n := protowire.ConsumeTag(b)
b = b[n:]
switch typ {
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.FileOptions_Features_field_number:
if fd.Syntax() != protoreflect.Editions {
panic(fmt.Sprintf("invalid descriptor: using edition features in a proto with syntax %s", fd.Syntax()))
}
fd.L1.EditionFeatures = unmarshalFeatureSet(v, fd.L1.EditionFeatures)
}
default:
m := protowire.ConsumeFieldValue(num, typ, b)
b = b[m:]
}
}
}
func (ed *Enum) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd protoreflect.Descriptor, i int) {
ed.L0.ParentFile = pf
ed.L0.Parent = pd
@ -275,6 +319,7 @@ func (md *Message) unmarshalSeed(b []byte, sb *strs.Builder, pf *File, pd protor
md.L0.ParentFile = pf
md.L0.Parent = pd
md.L0.Index = i
md.L1.EditionFeatures = featuresFromParentDesc(md.Parent())
var prevField protoreflect.FieldNumber
var numEnums, numMessages, numExtensions int
@ -380,6 +425,13 @@ func (md *Message) unmarshalSeedOptions(b []byte) {
case genid.MessageOptions_MessageSetWireFormat_field_number:
md.L1.IsMessageSet = protowire.DecodeBool(v)
}
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.MessageOptions_Features_field_number:
md.L1.EditionFeatures = unmarshalFeatureSet(v, md.L1.EditionFeatures)
}
default:
m := protowire.ConsumeFieldValue(num, typ, b)
b = b[m:]

View file

@ -414,6 +414,7 @@ func (fd *Field) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd protoref
fd.L0.ParentFile = pf
fd.L0.Parent = pd
fd.L0.Index = i
fd.L1.EditionFeatures = featuresFromParentDesc(fd.Parent())
var rawTypeName []byte
var rawOptions []byte
@ -465,6 +466,12 @@ func (fd *Field) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd protoref
b = b[m:]
}
}
if fd.Syntax() == protoreflect.Editions && fd.L1.Kind == protoreflect.MessageKind && fd.L1.EditionFeatures.IsDelimitedEncoded {
fd.L1.Kind = protoreflect.GroupKind
}
if fd.Syntax() == protoreflect.Editions && fd.L1.EditionFeatures.IsLegacyRequired {
fd.L1.Cardinality = protoreflect.Required
}
if rawTypeName != nil {
name := makeFullName(sb, rawTypeName)
switch fd.L1.Kind {
@ -497,6 +504,13 @@ func (fd *Field) unmarshalOptions(b []byte) {
fd.L1.HasEnforceUTF8 = true
fd.L1.EnforceUTF8 = protowire.DecodeBool(v)
}
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.FieldOptions_Features_field_number:
fd.L1.EditionFeatures = unmarshalFeatureSet(v, fd.L1.EditionFeatures)
}
default:
m := protowire.ConsumeFieldValue(num, typ, b)
b = b[m:]
@ -534,6 +548,7 @@ func (od *Oneof) unmarshalFull(b []byte, sb *strs.Builder, pf *File, pd protoref
func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) {
var rawTypeName []byte
var rawOptions []byte
xd.L1.EditionFeatures = featuresFromParentDesc(xd.L1.Extendee)
xd.L2 = new(ExtensionL2)
for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b)
@ -565,6 +580,12 @@ func (xd *Extension) unmarshalFull(b []byte, sb *strs.Builder) {
b = b[m:]
}
}
if xd.Syntax() == protoreflect.Editions && xd.L1.Kind == protoreflect.MessageKind && xd.L1.EditionFeatures.IsDelimitedEncoded {
xd.L1.Kind = protoreflect.GroupKind
}
if xd.Syntax() == protoreflect.Editions && xd.L1.EditionFeatures.IsLegacyRequired {
xd.L1.Cardinality = protoreflect.Required
}
if rawTypeName != nil {
name := makeFullName(sb, rawTypeName)
switch xd.L1.Kind {
@ -589,6 +610,13 @@ func (xd *Extension) unmarshalOptions(b []byte) {
case genid.FieldOptions_Packed_field_number:
xd.L2.IsPacked = protowire.DecodeBool(v)
}
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.FieldOptions_Features_field_number:
xd.L1.EditionFeatures = unmarshalFeatureSet(v, xd.L1.EditionFeatures)
}
default:
m := protowire.ConsumeFieldValue(num, typ, b)
b = b[m:]

View file

@ -0,0 +1,142 @@
// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package filedesc
import (
"fmt"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/internal/editiondefaults"
"google.golang.org/protobuf/internal/genid"
"google.golang.org/protobuf/reflect/protoreflect"
)
var defaultsCache = make(map[Edition]EditionFeatures)
func init() {
unmarshalEditionDefaults(editiondefaults.Defaults)
}
func unmarshalGoFeature(b []byte, parent EditionFeatures) EditionFeatures {
for len(b) > 0 {
num, _, n := protowire.ConsumeTag(b)
b = b[n:]
switch num {
case genid.GoFeatures_LegacyUnmarshalJsonEnum_field_number:
v, m := protowire.ConsumeVarint(b)
b = b[m:]
parent.GenerateLegacyUnmarshalJSON = protowire.DecodeBool(v)
default:
panic(fmt.Sprintf("unkown field number %d while unmarshalling GoFeatures", num))
}
}
return parent
}
func unmarshalFeatureSet(b []byte, parent EditionFeatures) EditionFeatures {
for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b)
b = b[n:]
switch typ {
case protowire.VarintType:
v, m := protowire.ConsumeVarint(b)
b = b[m:]
switch num {
case genid.FeatureSet_FieldPresence_field_number:
parent.IsFieldPresence = v == genid.FeatureSet_EXPLICIT_enum_value || v == genid.FeatureSet_LEGACY_REQUIRED_enum_value
parent.IsLegacyRequired = v == genid.FeatureSet_LEGACY_REQUIRED_enum_value
case genid.FeatureSet_EnumType_field_number:
parent.IsOpenEnum = v == genid.FeatureSet_OPEN_enum_value
case genid.FeatureSet_RepeatedFieldEncoding_field_number:
parent.IsPacked = v == genid.FeatureSet_PACKED_enum_value
case genid.FeatureSet_Utf8Validation_field_number:
parent.IsUTF8Validated = v == genid.FeatureSet_VERIFY_enum_value
case genid.FeatureSet_MessageEncoding_field_number:
parent.IsDelimitedEncoded = v == genid.FeatureSet_DELIMITED_enum_value
case genid.FeatureSet_JsonFormat_field_number:
parent.IsJSONCompliant = v == genid.FeatureSet_ALLOW_enum_value
default:
panic(fmt.Sprintf("unkown field number %d while unmarshalling FeatureSet", num))
}
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.GoFeatures_LegacyUnmarshalJsonEnum_field_number:
parent = unmarshalGoFeature(v, parent)
}
}
}
return parent
}
func featuresFromParentDesc(parentDesc protoreflect.Descriptor) EditionFeatures {
var parentFS EditionFeatures
switch p := parentDesc.(type) {
case *File:
parentFS = p.L1.EditionFeatures
case *Message:
parentFS = p.L1.EditionFeatures
default:
panic(fmt.Sprintf("unknown parent type %T", parentDesc))
}
return parentFS
}
func unmarshalEditionDefault(b []byte) {
var ed Edition
var fs EditionFeatures
for len(b) > 0 {
num, typ, n := protowire.ConsumeTag(b)
b = b[n:]
switch typ {
case protowire.VarintType:
v, m := protowire.ConsumeVarint(b)
b = b[m:]
switch num {
case genid.FeatureSetDefaults_FeatureSetEditionDefault_Edition_field_number:
ed = Edition(v)
}
case protowire.BytesType:
v, m := protowire.ConsumeBytes(b)
b = b[m:]
switch num {
case genid.FeatureSetDefaults_FeatureSetEditionDefault_Features_field_number:
fs = unmarshalFeatureSet(v, fs)
}
}
}
defaultsCache[ed] = fs
}
func unmarshalEditionDefaults(b []byte) {
for len(b) > 0 {
num, _, n := protowire.ConsumeTag(b)
b = b[n:]
switch num {
case genid.FeatureSetDefaults_Defaults_field_number:
def, m := protowire.ConsumeBytes(b)
b = b[m:]
unmarshalEditionDefault(def)
case genid.FeatureSetDefaults_MinimumEdition_field_number,
genid.FeatureSetDefaults_MaximumEdition_field_number:
// We don't care about the minimum and maximum editions. If the
// edition we are looking for later on is not in the cache we know
// it is outside of the range between minimum and maximum edition.
_, m := protowire.ConsumeVarint(b)
b = b[m:]
default:
panic(fmt.Sprintf("unkown field number %d while unmarshalling EditionDefault", num))
}
}
}
func getFeaturesFor(ed Edition) EditionFeatures {
if def, ok := defaultsCache[ed]; ok {
return def
}
panic(fmt.Sprintf("unsupported edition: %v", ed))
}

View file

@ -18,6 +18,21 @@ const (
Edition_enum_name = "Edition"
)
// Enum values for google.protobuf.Edition.
const (
Edition_EDITION_UNKNOWN_enum_value = 0
Edition_EDITION_PROTO2_enum_value = 998
Edition_EDITION_PROTO3_enum_value = 999
Edition_EDITION_2023_enum_value = 1000
Edition_EDITION_2024_enum_value = 1001
Edition_EDITION_1_TEST_ONLY_enum_value = 1
Edition_EDITION_2_TEST_ONLY_enum_value = 2
Edition_EDITION_99997_TEST_ONLY_enum_value = 99997
Edition_EDITION_99998_TEST_ONLY_enum_value = 99998
Edition_EDITION_99999_TEST_ONLY_enum_value = 99999
Edition_EDITION_MAX_enum_value = 2147483647
)
// Names for google.protobuf.FileDescriptorSet.
const (
FileDescriptorSet_message_name protoreflect.Name = "FileDescriptorSet"
@ -213,6 +228,12 @@ const (
ExtensionRangeOptions_VerificationState_enum_name = "VerificationState"
)
// Enum values for google.protobuf.ExtensionRangeOptions.VerificationState.
const (
ExtensionRangeOptions_DECLARATION_enum_value = 0
ExtensionRangeOptions_UNVERIFIED_enum_value = 1
)
// Names for google.protobuf.ExtensionRangeOptions.Declaration.
const (
ExtensionRangeOptions_Declaration_message_name protoreflect.Name = "Declaration"
@ -297,12 +318,41 @@ const (
FieldDescriptorProto_Type_enum_name = "Type"
)
// Enum values for google.protobuf.FieldDescriptorProto.Type.
const (
FieldDescriptorProto_TYPE_DOUBLE_enum_value = 1
FieldDescriptorProto_TYPE_FLOAT_enum_value = 2
FieldDescriptorProto_TYPE_INT64_enum_value = 3
FieldDescriptorProto_TYPE_UINT64_enum_value = 4
FieldDescriptorProto_TYPE_INT32_enum_value = 5
FieldDescriptorProto_TYPE_FIXED64_enum_value = 6
FieldDescriptorProto_TYPE_FIXED32_enum_value = 7
FieldDescriptorProto_TYPE_BOOL_enum_value = 8
FieldDescriptorProto_TYPE_STRING_enum_value = 9
FieldDescriptorProto_TYPE_GROUP_enum_value = 10
FieldDescriptorProto_TYPE_MESSAGE_enum_value = 11
FieldDescriptorProto_TYPE_BYTES_enum_value = 12
FieldDescriptorProto_TYPE_UINT32_enum_value = 13
FieldDescriptorProto_TYPE_ENUM_enum_value = 14
FieldDescriptorProto_TYPE_SFIXED32_enum_value = 15
FieldDescriptorProto_TYPE_SFIXED64_enum_value = 16
FieldDescriptorProto_TYPE_SINT32_enum_value = 17
FieldDescriptorProto_TYPE_SINT64_enum_value = 18
)
// Full and short names for google.protobuf.FieldDescriptorProto.Label.
const (
FieldDescriptorProto_Label_enum_fullname = "google.protobuf.FieldDescriptorProto.Label"
FieldDescriptorProto_Label_enum_name = "Label"
)
// Enum values for google.protobuf.FieldDescriptorProto.Label.
const (
FieldDescriptorProto_LABEL_OPTIONAL_enum_value = 1
FieldDescriptorProto_LABEL_REPEATED_enum_value = 3
FieldDescriptorProto_LABEL_REQUIRED_enum_value = 2
)
// Names for google.protobuf.OneofDescriptorProto.
const (
OneofDescriptorProto_message_name protoreflect.Name = "OneofDescriptorProto"
@ -474,7 +524,6 @@ const (
FileOptions_CcGenericServices_field_name protoreflect.Name = "cc_generic_services"
FileOptions_JavaGenericServices_field_name protoreflect.Name = "java_generic_services"
FileOptions_PyGenericServices_field_name protoreflect.Name = "py_generic_services"
FileOptions_PhpGenericServices_field_name protoreflect.Name = "php_generic_services"
FileOptions_Deprecated_field_name protoreflect.Name = "deprecated"
FileOptions_CcEnableArenas_field_name protoreflect.Name = "cc_enable_arenas"
FileOptions_ObjcClassPrefix_field_name protoreflect.Name = "objc_class_prefix"
@ -497,7 +546,6 @@ const (
FileOptions_CcGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.cc_generic_services"
FileOptions_JavaGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.java_generic_services"
FileOptions_PyGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.py_generic_services"
FileOptions_PhpGenericServices_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.php_generic_services"
FileOptions_Deprecated_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.deprecated"
FileOptions_CcEnableArenas_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.cc_enable_arenas"
FileOptions_ObjcClassPrefix_field_fullname protoreflect.FullName = "google.protobuf.FileOptions.objc_class_prefix"
@ -523,7 +571,6 @@ const (
FileOptions_CcGenericServices_field_number protoreflect.FieldNumber = 16
FileOptions_JavaGenericServices_field_number protoreflect.FieldNumber = 17
FileOptions_PyGenericServices_field_number protoreflect.FieldNumber = 18
FileOptions_PhpGenericServices_field_number protoreflect.FieldNumber = 42
FileOptions_Deprecated_field_number protoreflect.FieldNumber = 23
FileOptions_CcEnableArenas_field_number protoreflect.FieldNumber = 31
FileOptions_ObjcClassPrefix_field_number protoreflect.FieldNumber = 36
@ -543,6 +590,13 @@ const (
FileOptions_OptimizeMode_enum_name = "OptimizeMode"
)
// Enum values for google.protobuf.FileOptions.OptimizeMode.
const (
FileOptions_SPEED_enum_value = 1
FileOptions_CODE_SIZE_enum_value = 2
FileOptions_LITE_RUNTIME_enum_value = 3
)
// Names for google.protobuf.MessageOptions.
const (
MessageOptions_message_name protoreflect.Name = "MessageOptions"
@ -639,24 +693,59 @@ const (
FieldOptions_CType_enum_name = "CType"
)
// Enum values for google.protobuf.FieldOptions.CType.
const (
FieldOptions_STRING_enum_value = 0
FieldOptions_CORD_enum_value = 1
FieldOptions_STRING_PIECE_enum_value = 2
)
// Full and short names for google.protobuf.FieldOptions.JSType.
const (
FieldOptions_JSType_enum_fullname = "google.protobuf.FieldOptions.JSType"
FieldOptions_JSType_enum_name = "JSType"
)
// Enum values for google.protobuf.FieldOptions.JSType.
const (
FieldOptions_JS_NORMAL_enum_value = 0
FieldOptions_JS_STRING_enum_value = 1
FieldOptions_JS_NUMBER_enum_value = 2
)
// Full and short names for google.protobuf.FieldOptions.OptionRetention.
const (
FieldOptions_OptionRetention_enum_fullname = "google.protobuf.FieldOptions.OptionRetention"
FieldOptions_OptionRetention_enum_name = "OptionRetention"
)
// Enum values for google.protobuf.FieldOptions.OptionRetention.
const (
FieldOptions_RETENTION_UNKNOWN_enum_value = 0
FieldOptions_RETENTION_RUNTIME_enum_value = 1
FieldOptions_RETENTION_SOURCE_enum_value = 2
)
// Full and short names for google.protobuf.FieldOptions.OptionTargetType.
const (
FieldOptions_OptionTargetType_enum_fullname = "google.protobuf.FieldOptions.OptionTargetType"
FieldOptions_OptionTargetType_enum_name = "OptionTargetType"
)
// Enum values for google.protobuf.FieldOptions.OptionTargetType.
const (
FieldOptions_TARGET_TYPE_UNKNOWN_enum_value = 0
FieldOptions_TARGET_TYPE_FILE_enum_value = 1
FieldOptions_TARGET_TYPE_EXTENSION_RANGE_enum_value = 2
FieldOptions_TARGET_TYPE_MESSAGE_enum_value = 3
FieldOptions_TARGET_TYPE_FIELD_enum_value = 4
FieldOptions_TARGET_TYPE_ONEOF_enum_value = 5
FieldOptions_TARGET_TYPE_ENUM_enum_value = 6
FieldOptions_TARGET_TYPE_ENUM_ENTRY_enum_value = 7
FieldOptions_TARGET_TYPE_SERVICE_enum_value = 8
FieldOptions_TARGET_TYPE_METHOD_enum_value = 9
)
// Names for google.protobuf.FieldOptions.EditionDefault.
const (
FieldOptions_EditionDefault_message_name protoreflect.Name = "EditionDefault"
@ -813,6 +902,13 @@ const (
MethodOptions_IdempotencyLevel_enum_name = "IdempotencyLevel"
)
// Enum values for google.protobuf.MethodOptions.IdempotencyLevel.
const (
MethodOptions_IDEMPOTENCY_UNKNOWN_enum_value = 0
MethodOptions_NO_SIDE_EFFECTS_enum_value = 1
MethodOptions_IDEMPOTENT_enum_value = 2
)
// Names for google.protobuf.UninterpretedOption.
const (
UninterpretedOption_message_name protoreflect.Name = "UninterpretedOption"
@ -909,36 +1005,79 @@ const (
FeatureSet_FieldPresence_enum_name = "FieldPresence"
)
// Enum values for google.protobuf.FeatureSet.FieldPresence.
const (
FeatureSet_FIELD_PRESENCE_UNKNOWN_enum_value = 0
FeatureSet_EXPLICIT_enum_value = 1
FeatureSet_IMPLICIT_enum_value = 2
FeatureSet_LEGACY_REQUIRED_enum_value = 3
)
// Full and short names for google.protobuf.FeatureSet.EnumType.
const (
FeatureSet_EnumType_enum_fullname = "google.protobuf.FeatureSet.EnumType"
FeatureSet_EnumType_enum_name = "EnumType"
)
// Enum values for google.protobuf.FeatureSet.EnumType.
const (
FeatureSet_ENUM_TYPE_UNKNOWN_enum_value = 0
FeatureSet_OPEN_enum_value = 1
FeatureSet_CLOSED_enum_value = 2
)
// Full and short names for google.protobuf.FeatureSet.RepeatedFieldEncoding.
const (
FeatureSet_RepeatedFieldEncoding_enum_fullname = "google.protobuf.FeatureSet.RepeatedFieldEncoding"
FeatureSet_RepeatedFieldEncoding_enum_name = "RepeatedFieldEncoding"
)
// Enum values for google.protobuf.FeatureSet.RepeatedFieldEncoding.
const (
FeatureSet_REPEATED_FIELD_ENCODING_UNKNOWN_enum_value = 0
FeatureSet_PACKED_enum_value = 1
FeatureSet_EXPANDED_enum_value = 2
)
// Full and short names for google.protobuf.FeatureSet.Utf8Validation.
const (
FeatureSet_Utf8Validation_enum_fullname = "google.protobuf.FeatureSet.Utf8Validation"
FeatureSet_Utf8Validation_enum_name = "Utf8Validation"
)
// Enum values for google.protobuf.FeatureSet.Utf8Validation.
const (
FeatureSet_UTF8_VALIDATION_UNKNOWN_enum_value = 0
FeatureSet_VERIFY_enum_value = 2
FeatureSet_NONE_enum_value = 3
)
// Full and short names for google.protobuf.FeatureSet.MessageEncoding.
const (
FeatureSet_MessageEncoding_enum_fullname = "google.protobuf.FeatureSet.MessageEncoding"
FeatureSet_MessageEncoding_enum_name = "MessageEncoding"
)
// Enum values for google.protobuf.FeatureSet.MessageEncoding.
const (
FeatureSet_MESSAGE_ENCODING_UNKNOWN_enum_value = 0
FeatureSet_LENGTH_PREFIXED_enum_value = 1
FeatureSet_DELIMITED_enum_value = 2
)
// Full and short names for google.protobuf.FeatureSet.JsonFormat.
const (
FeatureSet_JsonFormat_enum_fullname = "google.protobuf.FeatureSet.JsonFormat"
FeatureSet_JsonFormat_enum_name = "JsonFormat"
)
// Enum values for google.protobuf.FeatureSet.JsonFormat.
const (
FeatureSet_JSON_FORMAT_UNKNOWN_enum_value = 0
FeatureSet_ALLOW_enum_value = 1
FeatureSet_LEGACY_BEST_EFFORT_enum_value = 2
)
// Names for google.protobuf.FeatureSetDefaults.
const (
FeatureSetDefaults_message_name protoreflect.Name = "FeatureSetDefaults"
@ -1085,3 +1224,10 @@ const (
GeneratedCodeInfo_Annotation_Semantic_enum_fullname = "google.protobuf.GeneratedCodeInfo.Annotation.Semantic"
GeneratedCodeInfo_Annotation_Semantic_enum_name = "Semantic"
)
// Enum values for google.protobuf.GeneratedCodeInfo.Annotation.Semantic.
const (
GeneratedCodeInfo_Annotation_NONE_enum_value = 0
GeneratedCodeInfo_Annotation_SET_enum_value = 1
GeneratedCodeInfo_Annotation_ALIAS_enum_value = 2
)

View file

@ -0,0 +1,31 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Code generated by generate-protos. DO NOT EDIT.
package genid
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
)
const File_reflect_protodesc_proto_go_features_proto = "reflect/protodesc/proto/go_features.proto"
// Names for google.protobuf.GoFeatures.
const (
GoFeatures_message_name protoreflect.Name = "GoFeatures"
GoFeatures_message_fullname protoreflect.FullName = "google.protobuf.GoFeatures"
)
// Field names for google.protobuf.GoFeatures.
const (
GoFeatures_LegacyUnmarshalJsonEnum_field_name protoreflect.Name = "legacy_unmarshal_json_enum"
GoFeatures_LegacyUnmarshalJsonEnum_field_fullname protoreflect.FullName = "google.protobuf.GoFeatures.legacy_unmarshal_json_enum"
)
// Field numbers for google.protobuf.GoFeatures.
const (
GoFeatures_LegacyUnmarshalJsonEnum_field_number protoreflect.FieldNumber = 1
)

View file

@ -18,6 +18,11 @@ const (
NullValue_enum_name = "NullValue"
)
// Enum values for google.protobuf.NullValue.
const (
NullValue_NULL_VALUE_enum_value = 0
)
// Names for google.protobuf.Struct.
const (
Struct_message_name protoreflect.Name = "Struct"

View file

@ -18,6 +18,13 @@ const (
Syntax_enum_name = "Syntax"
)
// Enum values for google.protobuf.Syntax.
const (
Syntax_SYNTAX_PROTO2_enum_value = 0
Syntax_SYNTAX_PROTO3_enum_value = 1
Syntax_SYNTAX_EDITIONS_enum_value = 2
)
// Names for google.protobuf.Type.
const (
Type_message_name protoreflect.Name = "Type"
@ -105,12 +112,43 @@ const (
Field_Kind_enum_name = "Kind"
)
// Enum values for google.protobuf.Field.Kind.
const (
Field_TYPE_UNKNOWN_enum_value = 0
Field_TYPE_DOUBLE_enum_value = 1
Field_TYPE_FLOAT_enum_value = 2
Field_TYPE_INT64_enum_value = 3
Field_TYPE_UINT64_enum_value = 4
Field_TYPE_INT32_enum_value = 5
Field_TYPE_FIXED64_enum_value = 6
Field_TYPE_FIXED32_enum_value = 7
Field_TYPE_BOOL_enum_value = 8
Field_TYPE_STRING_enum_value = 9
Field_TYPE_GROUP_enum_value = 10
Field_TYPE_MESSAGE_enum_value = 11
Field_TYPE_BYTES_enum_value = 12
Field_TYPE_UINT32_enum_value = 13
Field_TYPE_ENUM_enum_value = 14
Field_TYPE_SFIXED32_enum_value = 15
Field_TYPE_SFIXED64_enum_value = 16
Field_TYPE_SINT32_enum_value = 17
Field_TYPE_SINT64_enum_value = 18
)
// Full and short names for google.protobuf.Field.Cardinality.
const (
Field_Cardinality_enum_fullname = "google.protobuf.Field.Cardinality"
Field_Cardinality_enum_name = "Cardinality"
)
// Enum values for google.protobuf.Field.Cardinality.
const (
Field_CARDINALITY_UNKNOWN_enum_value = 0
Field_CARDINALITY_OPTIONAL_enum_value = 1
Field_CARDINALITY_REQUIRED_enum_value = 2
Field_CARDINALITY_REPEATED_enum_value = 3
)
// Names for google.protobuf.Enum.
const (
Enum_message_name protoreflect.Name = "Enum"

View file

@ -21,26 +21,18 @@ type extensionFieldInfo struct {
validation validationInfo
}
var legacyExtensionFieldInfoCache sync.Map // map[protoreflect.ExtensionType]*extensionFieldInfo
func getExtensionFieldInfo(xt protoreflect.ExtensionType) *extensionFieldInfo {
if xi, ok := xt.(*ExtensionInfo); ok {
xi.lazyInit()
return xi.info
}
return legacyLoadExtensionFieldInfo(xt)
}
// legacyLoadExtensionFieldInfo dynamically loads a *ExtensionInfo for xt.
func legacyLoadExtensionFieldInfo(xt protoreflect.ExtensionType) *extensionFieldInfo {
if xi, ok := legacyExtensionFieldInfoCache.Load(xt); ok {
return xi.(*extensionFieldInfo)
}
e := makeExtensionFieldInfo(xt.TypeDescriptor())
if e, ok := legacyMessageTypeCache.LoadOrStore(xt, e); ok {
return e.(*extensionFieldInfo)
}
return e
// Ideally we'd cache the resulting *extensionFieldInfo so we don't have to
// recompute this metadata repeatedly. But without support for something like
// weak references, such a cache would pin temporary values (like dynamic
// extension types, constructed for the duration of a user request) to the
// heap forever, causing memory usage of the cache to grow unbounded.
// See discussion in https://github.com/golang/protobuf/issues/1521.
return makeExtensionFieldInfo(xt.TypeDescriptor())
}
func makeExtensionFieldInfo(xd protoreflect.ExtensionDescriptor) *extensionFieldInfo {

View file

@ -197,7 +197,7 @@ func fieldCoder(fd protoreflect.FieldDescriptor, ft reflect.Type) (*MessageInfo,
return getMessageInfo(ft), makeMessageFieldCoder(fd, ft)
case fd.Kind() == protoreflect.GroupKind:
return getMessageInfo(ft), makeGroupFieldCoder(fd, ft)
case fd.Syntax() == protoreflect.Proto3 && fd.ContainingOneof() == nil:
case !fd.HasPresence() && fd.ContainingOneof() == nil:
// Populated oneof fields always encode even if set to the zero value,
// which normally are not encoded in proto3.
switch fd.Kind() {

View file

@ -538,6 +538,6 @@ func isZero(v reflect.Value) bool {
}
return true
default:
panic(&reflect.ValueError{"reflect.Value.IsZero", v.Kind()})
panic(&reflect.ValueError{Method: "reflect.Value.IsZero", Kind: v.Kind()})
}
}

View file

@ -17,7 +17,7 @@ import (
// EnforceUTF8 reports whether to enforce strict UTF-8 validation.
func EnforceUTF8(fd protoreflect.FieldDescriptor) bool {
if flags.ProtoLegacy {
if flags.ProtoLegacy || fd.Syntax() == protoreflect.Editions {
if fd, ok := fd.(interface{ EnforceUTF8() bool }); ok {
return fd.EnforceUTF8()
}

View file

@ -51,7 +51,7 @@ import (
// 10. Send out the CL for review and submit it.
const (
Major = 1
Minor = 32
Minor = 33
Patch = 0
PreRelease = ""
)

View file

@ -28,6 +28,7 @@ func (r descsByName) initEnumDeclarations(eds []*descriptorpb.EnumDescriptorProt
opts = proto.Clone(opts).(*descriptorpb.EnumOptions)
e.L2.Options = func() protoreflect.ProtoMessage { return opts }
}
e.L1.EditionFeatures = mergeEditionFeatures(parent, ed.GetOptions().GetFeatures())
for _, s := range ed.GetReservedName() {
e.L2.ReservedNames.List = append(e.L2.ReservedNames.List, protoreflect.Name(s))
}
@ -68,6 +69,9 @@ func (r descsByName) initMessagesDeclarations(mds []*descriptorpb.DescriptorProt
if m.L0, err = r.makeBase(m, parent, md.GetName(), i, sb); err != nil {
return nil, err
}
if m.Base.L0.ParentFile.Syntax() == protoreflect.Editions {
m.L1.EditionFeatures = mergeEditionFeatures(parent, md.GetOptions().GetFeatures())
}
if opts := md.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.MessageOptions)
m.L2.Options = func() protoreflect.ProtoMessage { return opts }
@ -114,6 +118,27 @@ func (r descsByName) initMessagesDeclarations(mds []*descriptorpb.DescriptorProt
return ms, nil
}
// canBePacked returns whether the field can use packed encoding:
// https://protobuf.dev/programming-guides/encoding/#packed
func canBePacked(fd *descriptorpb.FieldDescriptorProto) bool {
if fd.GetLabel() != descriptorpb.FieldDescriptorProto_LABEL_REPEATED {
return false // not a repeated field
}
switch protoreflect.Kind(fd.GetType()) {
case protoreflect.MessageKind, protoreflect.GroupKind:
return false // not a scalar type field
case protoreflect.StringKind, protoreflect.BytesKind:
// string and bytes can explicitly not be declared as packed,
// see https://protobuf.dev/programming-guides/encoding/#packed
return false
default:
return true
}
}
func (r descsByName) initFieldsFromDescriptorProto(fds []*descriptorpb.FieldDescriptorProto, parent protoreflect.Descriptor, sb *strs.Builder) (fs []filedesc.Field, err error) {
fs = make([]filedesc.Field, len(fds)) // allocate up-front to ensure stable pointers
for i, fd := range fds {
@ -139,12 +164,16 @@ func (r descsByName) initFieldsFromDescriptorProto(fds []*descriptorpb.FieldDesc
}
if f.Base.L0.ParentFile.Syntax() == protoreflect.Editions {
f.L1.Presence = resolveFeatureHasFieldPresence(f.Base.L0.ParentFile, fd)
f.L1.EditionFeatures = mergeEditionFeatures(parent, fd.GetOptions().GetFeatures())
if f.L1.EditionFeatures.IsLegacyRequired {
f.L1.Cardinality = protoreflect.Required
}
// We reuse the existing field because the old option `[packed =
// true]` is mutually exclusive with the editions feature.
if fd.GetLabel() == descriptorpb.FieldDescriptorProto_LABEL_REPEATED {
if canBePacked(fd) {
f.L1.HasPacked = true
f.L1.IsPacked = resolveFeatureRepeatedFieldEncodingPacked(f.Base.L0.ParentFile, fd)
f.L1.IsPacked = f.L1.EditionFeatures.IsPacked
}
// We pretend this option is always explicitly set because the only
@ -155,9 +184,9 @@ func (r descsByName) initFieldsFromDescriptorProto(fds []*descriptorpb.FieldDesc
// requested from the descriptor).
// In proto2/proto3 syntax HasEnforceUTF8 might be false.
f.L1.HasEnforceUTF8 = true
f.L1.EnforceUTF8 = resolveFeatureEnforceUTF8(f.Base.L0.ParentFile, fd)
f.L1.EnforceUTF8 = f.L1.EditionFeatures.IsUTF8Validated
if f.L1.Kind == protoreflect.MessageKind && resolveFeatureDelimitedEncoding(f.Base.L0.ParentFile, fd) {
if f.L1.Kind == protoreflect.MessageKind && f.L1.EditionFeatures.IsDelimitedEncoded {
f.L1.Kind = protoreflect.GroupKind
}
}
@ -175,6 +204,9 @@ func (r descsByName) initOneofsFromDescriptorProto(ods []*descriptorpb.OneofDesc
if opts := od.GetOptions(); opts != nil {
opts = proto.Clone(opts).(*descriptorpb.OneofOptions)
o.L1.Options = func() protoreflect.ProtoMessage { return opts }
if parent.Syntax() == protoreflect.Editions {
o.L1.EditionFeatures = mergeEditionFeatures(parent, opts.GetFeatures())
}
}
}
return os, nil

View file

@ -276,8 +276,8 @@ func unmarshalDefault(s string, fd protoreflect.FieldDescriptor, allowUnresolvab
} else if err != nil {
return v, ev, err
}
if fd.Syntax() == protoreflect.Proto3 {
return v, ev, errors.New("cannot be specified under proto3 semantics")
if !fd.HasPresence() {
return v, ev, errors.New("cannot be specified with implicit field presence")
}
if fd.Kind() == protoreflect.MessageKind || fd.Kind() == protoreflect.GroupKind || fd.Cardinality() == protoreflect.Repeated {
return v, ev, errors.New("cannot be specified on composite types")

View file

@ -107,7 +107,7 @@ func validateMessageDeclarations(ms []filedesc.Message, mds []*descriptorpb.Desc
if isMessageSet && !flags.ProtoLegacy {
return errors.New("message %q is a MessageSet, which is a legacy proto1 feature that is no longer supported", m.FullName())
}
if isMessageSet && (m.Syntax() != protoreflect.Proto2 || m.Fields().Len() > 0 || m.ExtensionRanges().Len() == 0) {
if isMessageSet && (m.Syntax() == protoreflect.Proto3 || m.Fields().Len() > 0 || m.ExtensionRanges().Len() == 0) {
return errors.New("message %q is an invalid proto1 MessageSet", m.FullName())
}
if m.Syntax() == protoreflect.Proto3 {
@ -314,8 +314,8 @@ func checkValidGroup(fd protoreflect.FieldDescriptor) error {
switch {
case fd.Kind() != protoreflect.GroupKind:
return nil
case fd.Syntax() != protoreflect.Proto2:
return errors.New("invalid under proto2 semantics")
case fd.Syntax() == protoreflect.Proto3:
return errors.New("invalid under proto3 semantics")
case md == nil || md.IsPlaceholder():
return errors.New("message must be resolvable")
case fd.FullName().Parent() != md.FullName().Parent():

View file

@ -5,14 +5,16 @@
package protodesc
import (
_ "embed"
"fmt"
"os"
"sync"
"google.golang.org/protobuf/internal/editiondefaults"
"google.golang.org/protobuf/internal/filedesc"
"google.golang.org/protobuf/proto"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/descriptorpb"
gofeaturespb "google.golang.org/protobuf/types/gofeaturespb"
)
const (
@ -20,14 +22,12 @@ const (
SupportedEditionsMaximum = descriptorpb.Edition_EDITION_2023
)
//go:embed editions_defaults.binpb
var binaryEditionDefaults []byte
var defaults = &descriptorpb.FeatureSetDefaults{}
var defaultsCacheMu sync.Mutex
var defaultsCache = make(map[filedesc.Edition]*descriptorpb.FeatureSet)
func init() {
err := proto.Unmarshal(binaryEditionDefaults, defaults)
err := proto.Unmarshal(editiondefaults.Defaults, defaults)
if err != nil {
fmt.Fprintf(os.Stderr, "unmarshal editions defaults: %v\n", err)
os.Exit(1)
@ -83,37 +83,56 @@ func getFeatureSetFor(ed filedesc.Edition) *descriptorpb.FeatureSet {
return fs
}
func resolveFeatureHasFieldPresence(fileDesc *filedesc.File, fieldDesc *descriptorpb.FieldDescriptorProto) bool {
fs := fieldDesc.GetOptions().GetFeatures()
if fs == nil || fs.FieldPresence == nil {
return fileDesc.L1.EditionFeatures.IsFieldPresence
// mergeEditionFeatures merges the parent and child feature sets. This function
// should be used when initializing Go descriptors from descriptor protos which
// is why the parent is a filedesc.EditionsFeatures (Go representation) while
// the child is a descriptorproto.FeatureSet (protoc representation).
// Any feature set by the child overwrites what is set by the parent.
func mergeEditionFeatures(parentDesc protoreflect.Descriptor, child *descriptorpb.FeatureSet) filedesc.EditionFeatures {
var parentFS filedesc.EditionFeatures
switch p := parentDesc.(type) {
case *filedesc.File:
parentFS = p.L1.EditionFeatures
case *filedesc.Message:
parentFS = p.L1.EditionFeatures
default:
panic(fmt.Sprintf("unknown parent type %T", parentDesc))
}
if child == nil {
return parentFS
}
if fp := child.FieldPresence; fp != nil {
parentFS.IsFieldPresence = *fp == descriptorpb.FeatureSet_LEGACY_REQUIRED ||
*fp == descriptorpb.FeatureSet_EXPLICIT
parentFS.IsLegacyRequired = *fp == descriptorpb.FeatureSet_LEGACY_REQUIRED
}
if et := child.EnumType; et != nil {
parentFS.IsOpenEnum = *et == descriptorpb.FeatureSet_OPEN
}
return fs.GetFieldPresence() == descriptorpb.FeatureSet_LEGACY_REQUIRED ||
fs.GetFieldPresence() == descriptorpb.FeatureSet_EXPLICIT
}
func resolveFeatureRepeatedFieldEncodingPacked(fileDesc *filedesc.File, fieldDesc *descriptorpb.FieldDescriptorProto) bool {
fs := fieldDesc.GetOptions().GetFeatures()
if fs == nil || fs.RepeatedFieldEncoding == nil {
return fileDesc.L1.EditionFeatures.IsPacked
if rfe := child.RepeatedFieldEncoding; rfe != nil {
parentFS.IsPacked = *rfe == descriptorpb.FeatureSet_PACKED
}
return fs.GetRepeatedFieldEncoding() == descriptorpb.FeatureSet_PACKED
}
func resolveFeatureEnforceUTF8(fileDesc *filedesc.File, fieldDesc *descriptorpb.FieldDescriptorProto) bool {
fs := fieldDesc.GetOptions().GetFeatures()
if fs == nil || fs.Utf8Validation == nil {
return fileDesc.L1.EditionFeatures.IsUTF8Validated
if utf8val := child.Utf8Validation; utf8val != nil {
parentFS.IsUTF8Validated = *utf8val == descriptorpb.FeatureSet_VERIFY
}
return fs.GetUtf8Validation() == descriptorpb.FeatureSet_VERIFY
}
func resolveFeatureDelimitedEncoding(fileDesc *filedesc.File, fieldDesc *descriptorpb.FieldDescriptorProto) bool {
fs := fieldDesc.GetOptions().GetFeatures()
if fs == nil || fs.MessageEncoding == nil {
return fileDesc.L1.EditionFeatures.IsDelimitedEncoded
if me := child.MessageEncoding; me != nil {
parentFS.IsDelimitedEncoded = *me == descriptorpb.FeatureSet_DELIMITED
}
return fs.GetMessageEncoding() == descriptorpb.FeatureSet_DELIMITED
if jf := child.JsonFormat; jf != nil {
parentFS.IsJSONCompliant = *jf == descriptorpb.FeatureSet_ALLOW
}
if goFeatures, ok := proto.GetExtension(child, gofeaturespb.E_Go).(*gofeaturespb.GoFeatures); ok && goFeatures != nil {
if luje := goFeatures.LegacyUnmarshalJsonEnum; luje != nil {
parentFS.GenerateLegacyUnmarshalJSON = *luje
}
}
return parentFS
}
// initFileDescFromFeatureSet initializes editions related fields in fd based
@ -122,56 +141,8 @@ func resolveFeatureDelimitedEncoding(fileDesc *filedesc.File, fieldDesc *descrip
// before calling this function.
func initFileDescFromFeatureSet(fd *filedesc.File, fs *descriptorpb.FeatureSet) {
dfs := getFeatureSetFor(fd.L1.Edition)
if fs == nil {
fs = &descriptorpb.FeatureSet{}
}
var fieldPresence descriptorpb.FeatureSet_FieldPresence
if fp := fs.FieldPresence; fp != nil {
fieldPresence = *fp
} else {
fieldPresence = *dfs.FieldPresence
}
fd.L1.EditionFeatures.IsFieldPresence = fieldPresence == descriptorpb.FeatureSet_LEGACY_REQUIRED ||
fieldPresence == descriptorpb.FeatureSet_EXPLICIT
var enumType descriptorpb.FeatureSet_EnumType
if et := fs.EnumType; et != nil {
enumType = *et
} else {
enumType = *dfs.EnumType
}
fd.L1.EditionFeatures.IsOpenEnum = enumType == descriptorpb.FeatureSet_OPEN
var respeatedFieldEncoding descriptorpb.FeatureSet_RepeatedFieldEncoding
if rfe := fs.RepeatedFieldEncoding; rfe != nil {
respeatedFieldEncoding = *rfe
} else {
respeatedFieldEncoding = *dfs.RepeatedFieldEncoding
}
fd.L1.EditionFeatures.IsPacked = respeatedFieldEncoding == descriptorpb.FeatureSet_PACKED
var isUTF8Validated descriptorpb.FeatureSet_Utf8Validation
if utf8val := fs.Utf8Validation; utf8val != nil {
isUTF8Validated = *utf8val
} else {
isUTF8Validated = *dfs.Utf8Validation
}
fd.L1.EditionFeatures.IsUTF8Validated = isUTF8Validated == descriptorpb.FeatureSet_VERIFY
var messageEncoding descriptorpb.FeatureSet_MessageEncoding
if me := fs.MessageEncoding; me != nil {
messageEncoding = *me
} else {
messageEncoding = *dfs.MessageEncoding
}
fd.L1.EditionFeatures.IsDelimitedEncoded = messageEncoding == descriptorpb.FeatureSet_DELIMITED
var jsonFormat descriptorpb.FeatureSet_JsonFormat
if jf := fs.JsonFormat; jf != nil {
jsonFormat = *jf
} else {
jsonFormat = *dfs.JsonFormat
}
fd.L1.EditionFeatures.IsJSONCompliant = jsonFormat == descriptorpb.FeatureSet_ALLOW
// initialize the featureset with the defaults
fd.L1.EditionFeatures = mergeEditionFeatures(fd, dfs)
// overwrite any options explicitly specified
fd.L1.EditionFeatures = mergeEditionFeatures(fd, fs)
}

View file

@ -1,4 +0,0 @@
  (0ж
  (0з
  (0и ж

View file

@ -175,6 +175,8 @@ func (s Syntax) String() string {
return "proto2"
case Proto3:
return "proto3"
case Editions:
return "editions"
default:
return fmt.Sprintf("<unknown:%d>", s)
}

View file

@ -160,8 +160,6 @@ func (p *SourcePath) appendFileOptions(b []byte) []byte {
b = p.appendSingularField(b, "java_generic_services", nil)
case 18:
b = p.appendSingularField(b, "py_generic_services", nil)
case 42:
b = p.appendSingularField(b, "php_generic_services", nil)
case 23:
b = p.appendSingularField(b, "deprecated", nil)
case 31:

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,177 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2023 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: reflect/protodesc/proto/go_features.proto
package proto
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
descriptorpb "google.golang.org/protobuf/types/descriptorpb"
reflect "reflect"
sync "sync"
)
type GoFeatures struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Whether or not to generate the deprecated UnmarshalJSON method for enums.
LegacyUnmarshalJsonEnum *bool `protobuf:"varint,1,opt,name=legacy_unmarshal_json_enum,json=legacyUnmarshalJsonEnum" json:"legacy_unmarshal_json_enum,omitempty"`
}
func (x *GoFeatures) Reset() {
*x = GoFeatures{}
if protoimpl.UnsafeEnabled {
mi := &file_reflect_protodesc_proto_go_features_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *GoFeatures) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GoFeatures) ProtoMessage() {}
func (x *GoFeatures) ProtoReflect() protoreflect.Message {
mi := &file_reflect_protodesc_proto_go_features_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GoFeatures.ProtoReflect.Descriptor instead.
func (*GoFeatures) Descriptor() ([]byte, []int) {
return file_reflect_protodesc_proto_go_features_proto_rawDescGZIP(), []int{0}
}
func (x *GoFeatures) GetLegacyUnmarshalJsonEnum() bool {
if x != nil && x.LegacyUnmarshalJsonEnum != nil {
return *x.LegacyUnmarshalJsonEnum
}
return false
}
var file_reflect_protodesc_proto_go_features_proto_extTypes = []protoimpl.ExtensionInfo{
{
ExtendedType: (*descriptorpb.FeatureSet)(nil),
ExtensionType: (*GoFeatures)(nil),
Field: 1002,
Name: "google.protobuf.go",
Tag: "bytes,1002,opt,name=go",
Filename: "reflect/protodesc/proto/go_features.proto",
},
}
// Extension fields to descriptorpb.FeatureSet.
var (
// optional google.protobuf.GoFeatures go = 1002;
E_Go = &file_reflect_protodesc_proto_go_features_proto_extTypes[0]
)
var File_reflect_protodesc_proto_go_features_proto protoreflect.FileDescriptor
var file_reflect_protodesc_proto_go_features_proto_rawDesc = []byte{
0x0a, 0x29, 0x72, 0x65, 0x66, 0x6c, 0x65, 0x63, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x64,
0x65, 0x73, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x5f, 0x66, 0x65, 0x61,
0x74, 0x75, 0x72, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x1a, 0x20, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x65,
0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x6a,
0x0a, 0x0a, 0x47, 0x6f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0x5c, 0x0a, 0x1a,
0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x5f, 0x75, 0x6e, 0x6d, 0x61, 0x72, 0x73, 0x68, 0x61, 0x6c,
0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x65, 0x6e, 0x75, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08,
0x42, 0x1f, 0x88, 0x01, 0x01, 0x98, 0x01, 0x06, 0xa2, 0x01, 0x09, 0x12, 0x04, 0x74, 0x72, 0x75,
0x65, 0x18, 0xe6, 0x07, 0xa2, 0x01, 0x0a, 0x12, 0x05, 0x66, 0x61, 0x6c, 0x73, 0x65, 0x18, 0xe7,
0x07, 0x52, 0x17, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x55, 0x6e, 0x6d, 0x61, 0x72, 0x73, 0x68,
0x61, 0x6c, 0x4a, 0x73, 0x6f, 0x6e, 0x45, 0x6e, 0x75, 0x6d, 0x3a, 0x49, 0x0a, 0x02, 0x67, 0x6f,
0x12, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,
0x75, 0x66, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x18, 0xea, 0x07,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x47, 0x6f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65,
0x73, 0x52, 0x02, 0x67, 0x6f, 0x42, 0x34, 0x5a, 0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x62, 0x75, 0x66, 0x2f, 0x72, 0x65, 0x66, 0x6c, 0x65, 0x63, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x64, 0x65, 0x73, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f,
}
var (
file_reflect_protodesc_proto_go_features_proto_rawDescOnce sync.Once
file_reflect_protodesc_proto_go_features_proto_rawDescData = file_reflect_protodesc_proto_go_features_proto_rawDesc
)
func file_reflect_protodesc_proto_go_features_proto_rawDescGZIP() []byte {
file_reflect_protodesc_proto_go_features_proto_rawDescOnce.Do(func() {
file_reflect_protodesc_proto_go_features_proto_rawDescData = protoimpl.X.CompressGZIP(file_reflect_protodesc_proto_go_features_proto_rawDescData)
})
return file_reflect_protodesc_proto_go_features_proto_rawDescData
}
var file_reflect_protodesc_proto_go_features_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_reflect_protodesc_proto_go_features_proto_goTypes = []interface{}{
(*GoFeatures)(nil), // 0: google.protobuf.GoFeatures
(*descriptorpb.FeatureSet)(nil), // 1: google.protobuf.FeatureSet
}
var file_reflect_protodesc_proto_go_features_proto_depIdxs = []int32{
1, // 0: google.protobuf.go:extendee -> google.protobuf.FeatureSet
0, // 1: google.protobuf.go:type_name -> google.protobuf.GoFeatures
2, // [2:2] is the sub-list for method output_type
2, // [2:2] is the sub-list for method input_type
1, // [1:2] is the sub-list for extension type_name
0, // [0:1] is the sub-list for extension extendee
0, // [0:0] is the sub-list for field type_name
}
func init() { file_reflect_protodesc_proto_go_features_proto_init() }
func file_reflect_protodesc_proto_go_features_proto_init() {
if File_reflect_protodesc_proto_go_features_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_reflect_protodesc_proto_go_features_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GoFeatures); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_reflect_protodesc_proto_go_features_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 1,
NumServices: 0,
},
GoTypes: file_reflect_protodesc_proto_go_features_proto_goTypes,
DependencyIndexes: file_reflect_protodesc_proto_go_features_proto_depIdxs,
MessageInfos: file_reflect_protodesc_proto_go_features_proto_msgTypes,
ExtensionInfos: file_reflect_protodesc_proto_go_features_proto_extTypes,
}.Build()
File_reflect_protodesc_proto_go_features_proto = out.File
file_reflect_protodesc_proto_go_features_proto_rawDesc = nil
file_reflect_protodesc_proto_go_features_proto_goTypes = nil
file_reflect_protodesc_proto_go_features_proto_depIdxs = nil
}

View file

@ -0,0 +1,28 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2023 Google Inc. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd
syntax = "proto2";
package google.protobuf;
import "google/protobuf/descriptor.proto";
option go_package = "google.golang.org/protobuf/types/gofeaturespb";
extend google.protobuf.FeatureSet {
optional GoFeatures go = 1002;
}
message GoFeatures {
// Whether or not to generate the deprecated UnmarshalJSON method for enums.
optional bool legacy_unmarshal_json_enum = 1 [
retention = RETENTION_RUNTIME,
targets = TARGET_TYPE_ENUM,
edition_defaults = { edition: EDITION_PROTO2, value: "true" },
edition_defaults = { edition: EDITION_PROTO3, value: "false" }
];
}

59
vendor/modules.txt vendored
View file

@ -4,7 +4,7 @@ cloud.google.com/go/internal
cloud.google.com/go/internal/optional
cloud.google.com/go/internal/trace
cloud.google.com/go/internal/version
# cloud.google.com/go/compute v1.24.0
# cloud.google.com/go/compute v1.25.0
## explicit; go 1.19
cloud.google.com/go/compute/internal
# cloud.google.com/go/compute/metadata v0.2.3
@ -112,7 +112,7 @@ github.com/VividCortex/ewma
# github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9
## explicit; go 1.15
github.com/alecthomas/units
# github.com/aws/aws-sdk-go v1.50.29
# github.com/aws/aws-sdk-go v1.50.32
## explicit; go 1.19
github.com/aws/aws-sdk-go/aws
github.com/aws/aws-sdk-go/aws/auth/bearer
@ -188,10 +188,10 @@ github.com/aws/aws-sdk-go-v2/internal/timeconv
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream/eventstreamapi
# github.com/aws/aws-sdk-go-v2/config v1.27.4
# github.com/aws/aws-sdk-go-v2/config v1.27.6
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/config
# github.com/aws/aws-sdk-go-v2/credentials v1.17.4
# github.com/aws/aws-sdk-go-v2/credentials v1.17.6
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/credentials
github.com/aws/aws-sdk-go-v2/credentials/ec2rolecreds
@ -204,7 +204,7 @@ github.com/aws/aws-sdk-go-v2/credentials/stscreds
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/feature/ec2/imds
github.com/aws/aws-sdk-go-v2/feature/ec2/imds/internal/config
# github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.6
# github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.16.8
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/feature/s3/manager
# github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.2
@ -224,10 +224,10 @@ github.com/aws/aws-sdk-go-v2/internal/v4a/internal/v4
# github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding
# github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.2
# github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.3.4
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/service/internal/checksum
# github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.2
# github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.11.4
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url
# github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.17.2
@ -235,7 +235,7 @@ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url
github.com/aws/aws-sdk-go-v2/service/internal/s3shared
github.com/aws/aws-sdk-go-v2/service/internal/s3shared/arn
github.com/aws/aws-sdk-go-v2/service/internal/s3shared/config
# github.com/aws/aws-sdk-go-v2/service/s3 v1.51.1
# github.com/aws/aws-sdk-go-v2/service/s3 v1.51.3
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/service/s3
github.com/aws/aws-sdk-go-v2/service/s3/internal/arn
@ -252,7 +252,7 @@ github.com/aws/aws-sdk-go-v2/service/sso/types
github.com/aws/aws-sdk-go-v2/service/ssooidc
github.com/aws/aws-sdk-go-v2/service/ssooidc/internal/endpoints
github.com/aws/aws-sdk-go-v2/service/ssooidc/types
# github.com/aws/aws-sdk-go-v2/service/sts v1.28.1
# github.com/aws/aws-sdk-go-v2/service/sts v1.28.3
## explicit; go 1.20
github.com/aws/aws-sdk-go-v2/service/sts
github.com/aws/aws-sdk-go-v2/service/sts/internal/endpoints
@ -337,14 +337,14 @@ github.com/gogo/protobuf/proto
github.com/gogo/protobuf/protoc-gen-gogo/descriptor
github.com/gogo/protobuf/sortkeys
github.com/gogo/protobuf/types
# github.com/golang-jwt/jwt/v5 v5.2.0
# github.com/golang-jwt/jwt/v5 v5.2.1
## explicit; go 1.18
github.com/golang-jwt/jwt/v5
# github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
## explicit
github.com/golang/groupcache/lru
# github.com/golang/protobuf v1.5.3
## explicit; go 1.9
# github.com/golang/protobuf v1.5.4
## explicit; go 1.17
github.com/golang/protobuf/proto
# github.com/golang/snappy v0.0.4
## explicit
@ -528,8 +528,8 @@ github.com/rivo/uniseg
# github.com/russross/blackfriday/v2 v2.1.0
## explicit
github.com/russross/blackfriday/v2
# github.com/stretchr/testify v1.8.4
## explicit; go 1.20
# github.com/stretchr/testify v1.9.0
## explicit; go 1.17
github.com/stretchr/testify/assert
github.com/stretchr/testify/require
# github.com/urfave/cli/v2 v2.27.1
@ -579,10 +579,10 @@ go.opencensus.io/trace
go.opencensus.io/trace/internal
go.opencensus.io/trace/propagation
go.opencensus.io/trace/tracestate
# go.opentelemetry.io/collector/featuregate v1.2.0
# go.opentelemetry.io/collector/featuregate v1.3.0
## explicit; go 1.21
go.opentelemetry.io/collector/featuregate
# go.opentelemetry.io/collector/pdata v1.2.0
# go.opentelemetry.io/collector/pdata v1.3.0
## explicit; go 1.21
go.opentelemetry.io/collector/pdata/internal
go.opentelemetry.io/collector/pdata/internal/data
@ -599,7 +599,7 @@ go.opentelemetry.io/collector/pdata/internal/otlp
go.opentelemetry.io/collector/pdata/pcommon
go.opentelemetry.io/collector/pdata/pmetric
go.opentelemetry.io/collector/pdata/pmetric/pmetricotlp
# go.opentelemetry.io/collector/semconv v0.95.0
# go.opentelemetry.io/collector/semconv v0.96.0
## explicit; go 1.21
go.opentelemetry.io/collector/semconv/v1.6.1
# go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
@ -643,7 +643,7 @@ go.uber.org/goleak/internal/stack
# go.uber.org/multierr v1.11.0
## explicit; go 1.19
go.uber.org/multierr
# golang.org/x/crypto v0.20.0
# golang.org/x/crypto v0.21.0
## explicit; go 1.18
golang.org/x/crypto/chacha20
golang.org/x/crypto/chacha20poly1305
@ -658,7 +658,7 @@ golang.org/x/crypto/pkcs12/internal/rc2
## explicit; go 1.20
golang.org/x/exp/constraints
golang.org/x/exp/slices
# golang.org/x/net v0.21.0
# golang.org/x/net v0.22.0
## explicit; go 1.18
golang.org/x/net/http/httpguts
golang.org/x/net/http/httpproxy
@ -667,14 +667,15 @@ golang.org/x/net/http2/hpack
golang.org/x/net/idna
golang.org/x/net/internal/timeseries
golang.org/x/net/trace
# golang.org/x/oauth2 v0.17.0
# golang.org/x/oauth2 v0.18.0
## explicit; go 1.18
golang.org/x/oauth2
golang.org/x/oauth2/authhandler
golang.org/x/oauth2/clientcredentials
golang.org/x/oauth2/google
golang.org/x/oauth2/google/internal/externalaccount
golang.org/x/oauth2/google/externalaccount
golang.org/x/oauth2/google/internal/externalaccountauthorizeduser
golang.org/x/oauth2/google/internal/impersonate
golang.org/x/oauth2/google/internal/stsexchange
golang.org/x/oauth2/internal
golang.org/x/oauth2/jws
@ -683,7 +684,7 @@ golang.org/x/oauth2/jwt
## explicit; go 1.18
golang.org/x/sync/errgroup
golang.org/x/sync/semaphore
# golang.org/x/sys v0.17.0
# golang.org/x/sys v0.18.0
## explicit; go 1.18
golang.org/x/sys/cpu
golang.org/x/sys/unix
@ -697,7 +698,7 @@ golang.org/x/text/unicode/norm
# golang.org/x/time v0.5.0
## explicit; go 1.18
golang.org/x/time/rate
# google.golang.org/api v0.167.0
# google.golang.org/api v0.168.0
## explicit; go 1.19
google.golang.org/api/googleapi
google.golang.org/api/googleapi/transport
@ -727,20 +728,20 @@ google.golang.org/appengine/internal/modules
google.golang.org/appengine/internal/remote_api
google.golang.org/appengine/internal/urlfetch
google.golang.org/appengine/urlfetch
# google.golang.org/genproto v0.0.0-20240228224816-df926f6c8641
# google.golang.org/genproto v0.0.0-20240304212257-790db918fca8
## explicit; go 1.19
google.golang.org/genproto/googleapis/type/date
google.golang.org/genproto/googleapis/type/expr
# google.golang.org/genproto/googleapis/api v0.0.0-20240228224816-df926f6c8641
# google.golang.org/genproto/googleapis/api v0.0.0-20240304212257-790db918fca8
## explicit; go 1.19
google.golang.org/genproto/googleapis/api
google.golang.org/genproto/googleapis/api/annotations
# google.golang.org/genproto/googleapis/rpc v0.0.0-20240228224816-df926f6c8641
# google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8
## explicit; go 1.19
google.golang.org/genproto/googleapis/rpc/code
google.golang.org/genproto/googleapis/rpc/errdetails
google.golang.org/genproto/googleapis/rpc/status
# google.golang.org/grpc v1.62.0
# google.golang.org/grpc v1.62.1
## explicit; go 1.19
google.golang.org/grpc
google.golang.org/grpc/attributes
@ -806,7 +807,7 @@ google.golang.org/grpc/serviceconfig
google.golang.org/grpc/stats
google.golang.org/grpc/status
google.golang.org/grpc/tap
# google.golang.org/protobuf v1.32.0
# google.golang.org/protobuf v1.33.0
## explicit; go 1.17
google.golang.org/protobuf/encoding/protodelim
google.golang.org/protobuf/encoding/protojson
@ -815,6 +816,7 @@ google.golang.org/protobuf/encoding/protowire
google.golang.org/protobuf/internal/descfmt
google.golang.org/protobuf/internal/descopts
google.golang.org/protobuf/internal/detrand
google.golang.org/protobuf/internal/editiondefaults
google.golang.org/protobuf/internal/encoding/defval
google.golang.org/protobuf/internal/encoding/json
google.golang.org/protobuf/internal/encoding/messageset
@ -839,6 +841,7 @@ google.golang.org/protobuf/reflect/protoregistry
google.golang.org/protobuf/runtime/protoiface
google.golang.org/protobuf/runtime/protoimpl
google.golang.org/protobuf/types/descriptorpb
google.golang.org/protobuf/types/gofeaturespb
google.golang.org/protobuf/types/known/anypb
google.golang.org/protobuf/types/known/durationpb
google.golang.org/protobuf/types/known/emptypb