auto-regenerate .pb.go files (#286)
This is an auto-generated regeneration of the .pb.go files by
cloud.google.com/go/internal/gapicgen. Once this PR is submitted, autotogen will
update the corresponding CL at gocloud to depend on the newer version of
go-genproto, and assign reviewers. Whilst this or any regen PR is open in
go-genproto, gapicgen will not create any more regeneration PRs or CLs. If all
regen PRs are closed, gapicgen will create a new set of regeneration PRs and
CLs once per night.
If you have been assigned to review this CL, please:
- Ensure that CI is passin If it's failing, it requires your manual attention.
- Approve and submit this PR if you believe it's ready to ship. That will prompt
gapicgen to assign reviewers to the gocloud CL.
Corresponding gocloud CL: https://code-review.googlesource.com/c/gocloud/+/49930
diff --git a/googleapis/cloud/bigquery/storage/v1alpha2/protobuf.pb.go b/googleapis/cloud/bigquery/storage/v1alpha2/protobuf.pb.go
new file mode 100644
index 0000000..510ad52
--- /dev/null
+++ b/googleapis/cloud/bigquery/storage/v1alpha2/protobuf.pb.go
@@ -0,0 +1,142 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/cloud/bigquery/storage/v1alpha2/protobuf.proto
+
+package storage
+
+import (
+ fmt "fmt"
+ math "math"
+
+ proto "github.com/golang/protobuf/proto"
+ descriptor "github.com/golang/protobuf/protoc-gen-go/descriptor"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
+
+// Protobuf schema is an API presentation the proto buffer schema.
+type ProtoSchema struct {
+ // Message descriptor for the data. The descriptor has to be self contained
+ // to include all the nested type definition, excepted for proto buffer well
+ // known types
+ // (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
+ // and zetasql public protos
+ // (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
+ ProtoDescriptor *descriptor.DescriptorProto `protobuf:"bytes,1,opt,name=proto_descriptor,json=protoDescriptor,proto3" json:"proto_descriptor,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *ProtoSchema) Reset() { *m = ProtoSchema{} }
+func (m *ProtoSchema) String() string { return proto.CompactTextString(m) }
+func (*ProtoSchema) ProtoMessage() {}
+func (*ProtoSchema) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f22c37f871902e39, []int{0}
+}
+
+func (m *ProtoSchema) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_ProtoSchema.Unmarshal(m, b)
+}
+func (m *ProtoSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_ProtoSchema.Marshal(b, m, deterministic)
+}
+func (m *ProtoSchema) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_ProtoSchema.Merge(m, src)
+}
+func (m *ProtoSchema) XXX_Size() int {
+ return xxx_messageInfo_ProtoSchema.Size(m)
+}
+func (m *ProtoSchema) XXX_DiscardUnknown() {
+ xxx_messageInfo_ProtoSchema.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ProtoSchema proto.InternalMessageInfo
+
+func (m *ProtoSchema) GetProtoDescriptor() *descriptor.DescriptorProto {
+ if m != nil {
+ return m.ProtoDescriptor
+ }
+ return nil
+}
+
+// Protobuf rows.
+type ProtoRows struct {
+ // A sequence of rows serialized as a Protocol Buffer.
+ //
+ // See https://developers.google.com/protocol-buffers/docs/overview for more
+ // information on deserializing this field.
+ SerializedRows [][]byte `protobuf:"bytes,1,rep,name=serialized_rows,json=serializedRows,proto3" json:"serialized_rows,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *ProtoRows) Reset() { *m = ProtoRows{} }
+func (m *ProtoRows) String() string { return proto.CompactTextString(m) }
+func (*ProtoRows) ProtoMessage() {}
+func (*ProtoRows) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f22c37f871902e39, []int{1}
+}
+
+func (m *ProtoRows) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_ProtoRows.Unmarshal(m, b)
+}
+func (m *ProtoRows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_ProtoRows.Marshal(b, m, deterministic)
+}
+func (m *ProtoRows) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_ProtoRows.Merge(m, src)
+}
+func (m *ProtoRows) XXX_Size() int {
+ return xxx_messageInfo_ProtoRows.Size(m)
+}
+func (m *ProtoRows) XXX_DiscardUnknown() {
+ xxx_messageInfo_ProtoRows.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ProtoRows proto.InternalMessageInfo
+
+func (m *ProtoRows) GetSerializedRows() [][]byte {
+ if m != nil {
+ return m.SerializedRows
+ }
+ return nil
+}
+
+func init() {
+ proto.RegisterType((*ProtoSchema)(nil), "google.cloud.bigquery.storage.v1alpha2.ProtoSchema")
+ proto.RegisterType((*ProtoRows)(nil), "google.cloud.bigquery.storage.v1alpha2.ProtoRows")
+}
+
+func init() {
+ proto.RegisterFile("google/cloud/bigquery/storage/v1alpha2/protobuf.proto", fileDescriptor_f22c37f871902e39)
+}
+
+var fileDescriptor_f22c37f871902e39 = []byte{
+ // 252 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0x41, 0x4b, 0xc3, 0x40,
+ 0x10, 0x85, 0x09, 0x82, 0xe0, 0x56, 0xad, 0xe4, 0x54, 0x3c, 0x85, 0x1e, 0xb4, 0x78, 0xd8, 0xc5,
+ 0xaa, 0x27, 0x6f, 0xc1, 0x9b, 0x08, 0x52, 0x6f, 0xbd, 0x94, 0x4d, 0xb2, 0x9d, 0x2e, 0x6c, 0x3b,
+ 0x71, 0x36, 0xb1, 0xe8, 0x4f, 0xf0, 0x57, 0x4b, 0x67, 0x37, 0xc9, 0xd1, 0x9e, 0x32, 0xbc, 0xf7,
+ 0xbe, 0x37, 0x93, 0x15, 0x4f, 0x80, 0x08, 0xce, 0xa8, 0xd2, 0x61, 0x5b, 0xa9, 0xc2, 0xc2, 0x67,
+ 0x6b, 0xe8, 0x5b, 0xf9, 0x06, 0x49, 0x83, 0x51, 0x5f, 0xf7, 0xda, 0xd5, 0x1b, 0x3d, 0x57, 0x35,
+ 0x61, 0x83, 0x45, 0xbb, 0x96, 0x3c, 0xa4, 0x37, 0x01, 0x93, 0x8c, 0xc9, 0x0e, 0x93, 0x11, 0x93,
+ 0x1d, 0x76, 0x9d, 0xc5, 0xfa, 0x0e, 0x57, 0x95, 0xf1, 0x25, 0xd9, 0xba, 0x41, 0x0a, 0x4d, 0xd3,
+ 0xa5, 0x18, 0xbd, 0x1f, 0x86, 0x8f, 0x72, 0x63, 0xb6, 0x3a, 0x7d, 0x15, 0x57, 0xac, 0xaf, 0x86,
+ 0xe0, 0x24, 0xc9, 0x92, 0xd9, 0x68, 0x9e, 0xc9, 0xb8, 0xb3, 0x3f, 0xe5, 0xa5, 0x8f, 0x70, 0xc3,
+ 0x62, 0xcc, 0xce, 0xa0, 0x4e, 0x1f, 0xc5, 0x59, 0x70, 0x70, 0xef, 0xd3, 0x5b, 0x31, 0xf6, 0x86,
+ 0xac, 0x76, 0xf6, 0xc7, 0x54, 0x2b, 0xc2, 0xbd, 0x9f, 0x24, 0xd9, 0xc9, 0xec, 0x7c, 0x71, 0x39,
+ 0xc8, 0x87, 0x60, 0xfe, 0x9b, 0x88, 0xbb, 0x12, 0xb7, 0xf2, 0xb8, 0x5f, 0xcc, 0x2f, 0x78, 0x45,
+ 0xde, 0xae, 0xf9, 0xbb, 0x7c, 0x8b, 0x18, 0xa0, 0xd3, 0x3b, 0x90, 0x48, 0xa0, 0xc0, 0xec, 0xf8,
+ 0x32, 0x15, 0x2c, 0x5d, 0x5b, 0xff, 0xdf, 0x7b, 0x3f, 0x47, 0xa1, 0x38, 0x65, 0xf2, 0xe1, 0x2f,
+ 0x00, 0x00, 0xff, 0xff, 0x9b, 0x23, 0x20, 0x08, 0xa8, 0x01, 0x00, 0x00,
+}
diff --git a/googleapis/cloud/bigquery/storage/v1alpha2/storage.pb.go b/googleapis/cloud/bigquery/storage/v1alpha2/storage.pb.go
new file mode 100644
index 0000000..df26d01
--- /dev/null
+++ b/googleapis/cloud/bigquery/storage/v1alpha2/storage.pb.go
@@ -0,0 +1,911 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/cloud/bigquery/storage/v1alpha2/storage.proto
+
+package storage
+
+import (
+ context "context"
+ fmt "fmt"
+ math "math"
+
+ proto "github.com/golang/protobuf/proto"
+ _ "github.com/golang/protobuf/ptypes/empty"
+ timestamp "github.com/golang/protobuf/ptypes/timestamp"
+ wrappers "github.com/golang/protobuf/ptypes/wrappers"
+ _ "google.golang.org/genproto/googleapis/api/annotations"
+ status "google.golang.org/genproto/googleapis/rpc/status"
+ grpc "google.golang.org/grpc"
+ codes "google.golang.org/grpc/codes"
+ status1 "google.golang.org/grpc/status"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
+
+// Request message for `CreateWriteStream`.
+type CreateWriteStreamRequest struct {
+ // Required. Reference to the table to which the stream belongs, in the format
+ // of `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
+ // Required. Stream to be created.
+ WriteStream *WriteStream `protobuf:"bytes,2,opt,name=write_stream,json=writeStream,proto3" json:"write_stream,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *CreateWriteStreamRequest) Reset() { *m = CreateWriteStreamRequest{} }
+func (m *CreateWriteStreamRequest) String() string { return proto.CompactTextString(m) }
+func (*CreateWriteStreamRequest) ProtoMessage() {}
+func (*CreateWriteStreamRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{0}
+}
+
+func (m *CreateWriteStreamRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_CreateWriteStreamRequest.Unmarshal(m, b)
+}
+func (m *CreateWriteStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_CreateWriteStreamRequest.Marshal(b, m, deterministic)
+}
+func (m *CreateWriteStreamRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_CreateWriteStreamRequest.Merge(m, src)
+}
+func (m *CreateWriteStreamRequest) XXX_Size() int {
+ return xxx_messageInfo_CreateWriteStreamRequest.Size(m)
+}
+func (m *CreateWriteStreamRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_CreateWriteStreamRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateWriteStreamRequest proto.InternalMessageInfo
+
+func (m *CreateWriteStreamRequest) GetParent() string {
+ if m != nil {
+ return m.Parent
+ }
+ return ""
+}
+
+func (m *CreateWriteStreamRequest) GetWriteStream() *WriteStream {
+ if m != nil {
+ return m.WriteStream
+ }
+ return nil
+}
+
+// Request message for `AppendRows`.
+type AppendRowsRequest struct {
+ // Required. The stream that is the target of the append operation. This value must be
+ // specified for the initial request. If subsequent requests specify the
+ // stream name, it must equal to the value provided in the first request.
+ WriteStream string `protobuf:"bytes,1,opt,name=write_stream,json=writeStream,proto3" json:"write_stream,omitempty"`
+ // Optional. If present, the write is only performed if the next append offset is same
+ // as the provided value. If not present, the write is performed at the
+ // current end of stream.
+ Offset *wrappers.Int64Value `protobuf:"bytes,2,opt,name=offset,proto3" json:"offset,omitempty"`
+ // Input rows. The `writer_schema` field must be specified at the initial
+ // request and currently, it will be ignored if specified in following
+ // requests. Following requests must have data in the same format as the
+ // initial request.
+ //
+ // Types that are valid to be assigned to Rows:
+ // *AppendRowsRequest_ProtoRows
+ Rows isAppendRowsRequest_Rows `protobuf_oneof:"rows"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *AppendRowsRequest) Reset() { *m = AppendRowsRequest{} }
+func (m *AppendRowsRequest) String() string { return proto.CompactTextString(m) }
+func (*AppendRowsRequest) ProtoMessage() {}
+func (*AppendRowsRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{1}
+}
+
+func (m *AppendRowsRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_AppendRowsRequest.Unmarshal(m, b)
+}
+func (m *AppendRowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_AppendRowsRequest.Marshal(b, m, deterministic)
+}
+func (m *AppendRowsRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_AppendRowsRequest.Merge(m, src)
+}
+func (m *AppendRowsRequest) XXX_Size() int {
+ return xxx_messageInfo_AppendRowsRequest.Size(m)
+}
+func (m *AppendRowsRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_AppendRowsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AppendRowsRequest proto.InternalMessageInfo
+
+func (m *AppendRowsRequest) GetWriteStream() string {
+ if m != nil {
+ return m.WriteStream
+ }
+ return ""
+}
+
+func (m *AppendRowsRequest) GetOffset() *wrappers.Int64Value {
+ if m != nil {
+ return m.Offset
+ }
+ return nil
+}
+
+type isAppendRowsRequest_Rows interface {
+ isAppendRowsRequest_Rows()
+}
+
+type AppendRowsRequest_ProtoRows struct {
+ ProtoRows *AppendRowsRequest_ProtoData `protobuf:"bytes,4,opt,name=proto_rows,json=protoRows,proto3,oneof"`
+}
+
+func (*AppendRowsRequest_ProtoRows) isAppendRowsRequest_Rows() {}
+
+func (m *AppendRowsRequest) GetRows() isAppendRowsRequest_Rows {
+ if m != nil {
+ return m.Rows
+ }
+ return nil
+}
+
+func (m *AppendRowsRequest) GetProtoRows() *AppendRowsRequest_ProtoData {
+ if x, ok := m.GetRows().(*AppendRowsRequest_ProtoRows); ok {
+ return x.ProtoRows
+ }
+ return nil
+}
+
+// XXX_OneofWrappers is for the internal use of the proto package.
+func (*AppendRowsRequest) XXX_OneofWrappers() []interface{} {
+ return []interface{}{
+ (*AppendRowsRequest_ProtoRows)(nil),
+ }
+}
+
+type AppendRowsRequest_ProtoData struct {
+ // Proto schema used to serialize the data.
+ WriterSchema *ProtoSchema `protobuf:"bytes,1,opt,name=writer_schema,json=writerSchema,proto3" json:"writer_schema,omitempty"`
+ // Serialized row data in protobuf message format.
+ Rows *ProtoRows `protobuf:"bytes,2,opt,name=rows,proto3" json:"rows,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *AppendRowsRequest_ProtoData) Reset() { *m = AppendRowsRequest_ProtoData{} }
+func (m *AppendRowsRequest_ProtoData) String() string { return proto.CompactTextString(m) }
+func (*AppendRowsRequest_ProtoData) ProtoMessage() {}
+func (*AppendRowsRequest_ProtoData) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{1, 0}
+}
+
+func (m *AppendRowsRequest_ProtoData) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_AppendRowsRequest_ProtoData.Unmarshal(m, b)
+}
+func (m *AppendRowsRequest_ProtoData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_AppendRowsRequest_ProtoData.Marshal(b, m, deterministic)
+}
+func (m *AppendRowsRequest_ProtoData) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_AppendRowsRequest_ProtoData.Merge(m, src)
+}
+func (m *AppendRowsRequest_ProtoData) XXX_Size() int {
+ return xxx_messageInfo_AppendRowsRequest_ProtoData.Size(m)
+}
+func (m *AppendRowsRequest_ProtoData) XXX_DiscardUnknown() {
+ xxx_messageInfo_AppendRowsRequest_ProtoData.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AppendRowsRequest_ProtoData proto.InternalMessageInfo
+
+func (m *AppendRowsRequest_ProtoData) GetWriterSchema() *ProtoSchema {
+ if m != nil {
+ return m.WriterSchema
+ }
+ return nil
+}
+
+func (m *AppendRowsRequest_ProtoData) GetRows() *ProtoRows {
+ if m != nil {
+ return m.Rows
+ }
+ return nil
+}
+
+// Response message for `AppendRows`.
+type AppendRowsResponse struct {
+ // Types that are valid to be assigned to Response:
+ // *AppendRowsResponse_Offset
+ // *AppendRowsResponse_Error
+ Response isAppendRowsResponse_Response `protobuf_oneof:"response"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *AppendRowsResponse) Reset() { *m = AppendRowsResponse{} }
+func (m *AppendRowsResponse) String() string { return proto.CompactTextString(m) }
+func (*AppendRowsResponse) ProtoMessage() {}
+func (*AppendRowsResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{2}
+}
+
+func (m *AppendRowsResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_AppendRowsResponse.Unmarshal(m, b)
+}
+func (m *AppendRowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_AppendRowsResponse.Marshal(b, m, deterministic)
+}
+func (m *AppendRowsResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_AppendRowsResponse.Merge(m, src)
+}
+func (m *AppendRowsResponse) XXX_Size() int {
+ return xxx_messageInfo_AppendRowsResponse.Size(m)
+}
+func (m *AppendRowsResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_AppendRowsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AppendRowsResponse proto.InternalMessageInfo
+
+type isAppendRowsResponse_Response interface {
+ isAppendRowsResponse_Response()
+}
+
+type AppendRowsResponse_Offset struct {
+ Offset int64 `protobuf:"varint,1,opt,name=offset,proto3,oneof"`
+}
+
+type AppendRowsResponse_Error struct {
+ Error *status.Status `protobuf:"bytes,2,opt,name=error,proto3,oneof"`
+}
+
+func (*AppendRowsResponse_Offset) isAppendRowsResponse_Response() {}
+
+func (*AppendRowsResponse_Error) isAppendRowsResponse_Response() {}
+
+func (m *AppendRowsResponse) GetResponse() isAppendRowsResponse_Response {
+ if m != nil {
+ return m.Response
+ }
+ return nil
+}
+
+func (m *AppendRowsResponse) GetOffset() int64 {
+ if x, ok := m.GetResponse().(*AppendRowsResponse_Offset); ok {
+ return x.Offset
+ }
+ return 0
+}
+
+func (m *AppendRowsResponse) GetError() *status.Status {
+ if x, ok := m.GetResponse().(*AppendRowsResponse_Error); ok {
+ return x.Error
+ }
+ return nil
+}
+
+// XXX_OneofWrappers is for the internal use of the proto package.
+func (*AppendRowsResponse) XXX_OneofWrappers() []interface{} {
+ return []interface{}{
+ (*AppendRowsResponse_Offset)(nil),
+ (*AppendRowsResponse_Error)(nil),
+ }
+}
+
+// Request message for `GetWriteStreamRequest`.
+type GetWriteStreamRequest struct {
+ // Required. Name of the stream to get, in the form of
+ // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *GetWriteStreamRequest) Reset() { *m = GetWriteStreamRequest{} }
+func (m *GetWriteStreamRequest) String() string { return proto.CompactTextString(m) }
+func (*GetWriteStreamRequest) ProtoMessage() {}
+func (*GetWriteStreamRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{3}
+}
+
+func (m *GetWriteStreamRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_GetWriteStreamRequest.Unmarshal(m, b)
+}
+func (m *GetWriteStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_GetWriteStreamRequest.Marshal(b, m, deterministic)
+}
+func (m *GetWriteStreamRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_GetWriteStreamRequest.Merge(m, src)
+}
+func (m *GetWriteStreamRequest) XXX_Size() int {
+ return xxx_messageInfo_GetWriteStreamRequest.Size(m)
+}
+func (m *GetWriteStreamRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_GetWriteStreamRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetWriteStreamRequest proto.InternalMessageInfo
+
+func (m *GetWriteStreamRequest) GetName() string {
+ if m != nil {
+ return m.Name
+ }
+ return ""
+}
+
+// Request message for `BatchCommitWriteStreams`.
+type BatchCommitWriteStreamsRequest struct {
+ // Required. Parent table that all the streams should belong to, in the form of
+ // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}`.
+ Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
+ // Required. The group of streams that will be committed atomically.
+ WriteStreams []string `protobuf:"bytes,2,rep,name=write_streams,json=writeStreams,proto3" json:"write_streams,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *BatchCommitWriteStreamsRequest) Reset() { *m = BatchCommitWriteStreamsRequest{} }
+func (m *BatchCommitWriteStreamsRequest) String() string { return proto.CompactTextString(m) }
+func (*BatchCommitWriteStreamsRequest) ProtoMessage() {}
+func (*BatchCommitWriteStreamsRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{4}
+}
+
+func (m *BatchCommitWriteStreamsRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_BatchCommitWriteStreamsRequest.Unmarshal(m, b)
+}
+func (m *BatchCommitWriteStreamsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_BatchCommitWriteStreamsRequest.Marshal(b, m, deterministic)
+}
+func (m *BatchCommitWriteStreamsRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_BatchCommitWriteStreamsRequest.Merge(m, src)
+}
+func (m *BatchCommitWriteStreamsRequest) XXX_Size() int {
+ return xxx_messageInfo_BatchCommitWriteStreamsRequest.Size(m)
+}
+func (m *BatchCommitWriteStreamsRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_BatchCommitWriteStreamsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BatchCommitWriteStreamsRequest proto.InternalMessageInfo
+
+func (m *BatchCommitWriteStreamsRequest) GetParent() string {
+ if m != nil {
+ return m.Parent
+ }
+ return ""
+}
+
+func (m *BatchCommitWriteStreamsRequest) GetWriteStreams() []string {
+ if m != nil {
+ return m.WriteStreams
+ }
+ return nil
+}
+
+// Response message for `BatchCommitWriteStreams`.
+type BatchCommitWriteStreamsResponse struct {
+ // The time at which streams were committed in microseconds granularity.
+ CommitTime *timestamp.Timestamp `protobuf:"bytes,1,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *BatchCommitWriteStreamsResponse) Reset() { *m = BatchCommitWriteStreamsResponse{} }
+func (m *BatchCommitWriteStreamsResponse) String() string { return proto.CompactTextString(m) }
+func (*BatchCommitWriteStreamsResponse) ProtoMessage() {}
+func (*BatchCommitWriteStreamsResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{5}
+}
+
+func (m *BatchCommitWriteStreamsResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_BatchCommitWriteStreamsResponse.Unmarshal(m, b)
+}
+func (m *BatchCommitWriteStreamsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_BatchCommitWriteStreamsResponse.Marshal(b, m, deterministic)
+}
+func (m *BatchCommitWriteStreamsResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_BatchCommitWriteStreamsResponse.Merge(m, src)
+}
+func (m *BatchCommitWriteStreamsResponse) XXX_Size() int {
+ return xxx_messageInfo_BatchCommitWriteStreamsResponse.Size(m)
+}
+func (m *BatchCommitWriteStreamsResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_BatchCommitWriteStreamsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BatchCommitWriteStreamsResponse proto.InternalMessageInfo
+
+func (m *BatchCommitWriteStreamsResponse) GetCommitTime() *timestamp.Timestamp {
+ if m != nil {
+ return m.CommitTime
+ }
+ return nil
+}
+
+// Request message for invoking `FinalizeWriteStream`.
+type FinalizeWriteStreamRequest struct {
+ // Required. Name of the stream to finalize, in the form of
+ // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *FinalizeWriteStreamRequest) Reset() { *m = FinalizeWriteStreamRequest{} }
+func (m *FinalizeWriteStreamRequest) String() string { return proto.CompactTextString(m) }
+func (*FinalizeWriteStreamRequest) ProtoMessage() {}
+func (*FinalizeWriteStreamRequest) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{6}
+}
+
+func (m *FinalizeWriteStreamRequest) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_FinalizeWriteStreamRequest.Unmarshal(m, b)
+}
+func (m *FinalizeWriteStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_FinalizeWriteStreamRequest.Marshal(b, m, deterministic)
+}
+func (m *FinalizeWriteStreamRequest) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_FinalizeWriteStreamRequest.Merge(m, src)
+}
+func (m *FinalizeWriteStreamRequest) XXX_Size() int {
+ return xxx_messageInfo_FinalizeWriteStreamRequest.Size(m)
+}
+func (m *FinalizeWriteStreamRequest) XXX_DiscardUnknown() {
+ xxx_messageInfo_FinalizeWriteStreamRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_FinalizeWriteStreamRequest proto.InternalMessageInfo
+
+func (m *FinalizeWriteStreamRequest) GetName() string {
+ if m != nil {
+ return m.Name
+ }
+ return ""
+}
+
+// Response message for `FinalizeWriteStream`.
+type FinalizeWriteStreamResponse struct {
+ // Number of rows in the finalized stream.
+ RowCount int64 `protobuf:"varint,1,opt,name=row_count,json=rowCount,proto3" json:"row_count,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *FinalizeWriteStreamResponse) Reset() { *m = FinalizeWriteStreamResponse{} }
+func (m *FinalizeWriteStreamResponse) String() string { return proto.CompactTextString(m) }
+func (*FinalizeWriteStreamResponse) ProtoMessage() {}
+func (*FinalizeWriteStreamResponse) Descriptor() ([]byte, []int) {
+ return fileDescriptor_f4f9d0517c05d712, []int{7}
+}
+
+func (m *FinalizeWriteStreamResponse) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_FinalizeWriteStreamResponse.Unmarshal(m, b)
+}
+func (m *FinalizeWriteStreamResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_FinalizeWriteStreamResponse.Marshal(b, m, deterministic)
+}
+func (m *FinalizeWriteStreamResponse) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_FinalizeWriteStreamResponse.Merge(m, src)
+}
+func (m *FinalizeWriteStreamResponse) XXX_Size() int {
+ return xxx_messageInfo_FinalizeWriteStreamResponse.Size(m)
+}
+func (m *FinalizeWriteStreamResponse) XXX_DiscardUnknown() {
+ xxx_messageInfo_FinalizeWriteStreamResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_FinalizeWriteStreamResponse proto.InternalMessageInfo
+
+func (m *FinalizeWriteStreamResponse) GetRowCount() int64 {
+ if m != nil {
+ return m.RowCount
+ }
+ return 0
+}
+
+func init() {
+ proto.RegisterType((*CreateWriteStreamRequest)(nil), "google.cloud.bigquery.storage.v1alpha2.CreateWriteStreamRequest")
+ proto.RegisterType((*AppendRowsRequest)(nil), "google.cloud.bigquery.storage.v1alpha2.AppendRowsRequest")
+ proto.RegisterType((*AppendRowsRequest_ProtoData)(nil), "google.cloud.bigquery.storage.v1alpha2.AppendRowsRequest.ProtoData")
+ proto.RegisterType((*AppendRowsResponse)(nil), "google.cloud.bigquery.storage.v1alpha2.AppendRowsResponse")
+ proto.RegisterType((*GetWriteStreamRequest)(nil), "google.cloud.bigquery.storage.v1alpha2.GetWriteStreamRequest")
+ proto.RegisterType((*BatchCommitWriteStreamsRequest)(nil), "google.cloud.bigquery.storage.v1alpha2.BatchCommitWriteStreamsRequest")
+ proto.RegisterType((*BatchCommitWriteStreamsResponse)(nil), "google.cloud.bigquery.storage.v1alpha2.BatchCommitWriteStreamsResponse")
+ proto.RegisterType((*FinalizeWriteStreamRequest)(nil), "google.cloud.bigquery.storage.v1alpha2.FinalizeWriteStreamRequest")
+ proto.RegisterType((*FinalizeWriteStreamResponse)(nil), "google.cloud.bigquery.storage.v1alpha2.FinalizeWriteStreamResponse")
+}
+
+func init() {
+ proto.RegisterFile("google/cloud/bigquery/storage/v1alpha2/storage.proto", fileDescriptor_f4f9d0517c05d712)
+}
+
+var fileDescriptor_f4f9d0517c05d712 = []byte{
+ // 886 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4f, 0x6f, 0x1b, 0x45,
+ 0x14, 0xcf, 0xc4, 0x21, 0x6a, 0x5e, 0x5a, 0xa4, 0x0e, 0x42, 0xb1, 0xd6, 0x28, 0xad, 0xf6, 0x50,
+ 0x45, 0x16, 0xec, 0x36, 0x49, 0x5b, 0x91, 0x94, 0x22, 0x6c, 0x97, 0x34, 0x48, 0x20, 0xc1, 0x06,
+ 0x01, 0x02, 0x09, 0x6b, 0xbc, 0x1e, 0xaf, 0x17, 0xed, 0xee, 0x4c, 0x67, 0x66, 0xbb, 0x2a, 0x15,
+ 0x07, 0xb8, 0x72, 0x03, 0x3e, 0x06, 0x07, 0x3e, 0x06, 0xea, 0x11, 0xc4, 0x9d, 0x43, 0xf9, 0x1e,
+ 0x68, 0x67, 0x67, 0xed, 0xc5, 0xb1, 0xc9, 0xda, 0xe9, 0x2d, 0x9e, 0xf7, 0x7e, 0xef, 0xfd, 0x7e,
+ 0xef, 0x5f, 0x16, 0xee, 0x04, 0x8c, 0x05, 0x11, 0x75, 0xfd, 0x88, 0xa5, 0x43, 0x77, 0x10, 0x06,
+ 0x8f, 0x53, 0x2a, 0x9e, 0xba, 0x52, 0x31, 0x41, 0x02, 0xea, 0x3e, 0xd9, 0x27, 0x11, 0x1f, 0x93,
+ 0x83, 0xf2, 0xc1, 0xe1, 0x82, 0x29, 0x86, 0x6f, 0x15, 0x28, 0x47, 0xa3, 0x9c, 0x12, 0xe5, 0x94,
+ 0x4e, 0x25, 0xca, 0x7a, 0xc3, 0x44, 0x27, 0x3c, 0x74, 0x49, 0x92, 0x30, 0x45, 0x54, 0xc8, 0x12,
+ 0x59, 0x44, 0xb1, 0x76, 0x2a, 0x56, 0x3f, 0x0a, 0x69, 0xa2, 0x8c, 0xe1, 0x46, 0xc5, 0x30, 0x0a,
+ 0x69, 0x34, 0xec, 0x0f, 0xe8, 0x98, 0x3c, 0x09, 0x99, 0x30, 0x0e, 0x77, 0x6b, 0xb2, 0xd6, 0xde,
+ 0x83, 0x74, 0x64, 0x60, 0x87, 0xb5, 0xc5, 0x0a, 0x4a, 0x62, 0x03, 0x6a, 0x19, 0x50, 0x19, 0xcb,
+ 0xa5, 0x31, 0x57, 0x4f, 0x67, 0x98, 0x4e, 0x8c, 0x2a, 0x8c, 0xa9, 0x54, 0x24, 0xe6, 0xc6, 0x61,
+ 0x77, 0xd6, 0x21, 0x13, 0x84, 0x73, 0x2a, 0x66, 0x6b, 0x20, 0xb8, 0xef, 0x4a, 0x45, 0x54, 0x6a,
+ 0x0c, 0xf6, 0x2f, 0x08, 0x9a, 0x3d, 0x41, 0x89, 0xa2, 0x9f, 0x8b, 0x50, 0xd1, 0x33, 0x4d, 0xc9,
+ 0xa3, 0x8f, 0x53, 0x2a, 0x15, 0x6e, 0xc1, 0x26, 0x27, 0x82, 0x26, 0xaa, 0x89, 0x6e, 0xa2, 0xbd,
+ 0xad, 0x6e, 0xe3, 0xef, 0xce, 0xba, 0x67, 0x9e, 0xf0, 0x57, 0x70, 0x35, 0xcb, 0x21, 0xfd, 0x42,
+ 0x46, 0x73, 0xfd, 0x26, 0xda, 0xdb, 0x3e, 0x38, 0x74, 0xea, 0xf5, 0xcc, 0xa9, 0xa4, 0x2b, 0xe2,
+ 0x6e, 0x67, 0xd3, 0x17, 0xfb, 0xa7, 0x06, 0x5c, 0xef, 0x70, 0x4e, 0x93, 0xa1, 0xc7, 0x32, 0x59,
+ 0xf2, 0xb9, 0x35, 0x93, 0xb2, 0xc2, 0xaa, 0x8a, 0xc6, 0x6f, 0xc3, 0x26, 0x1b, 0x8d, 0x24, 0x55,
+ 0x86, 0x54, 0xab, 0x24, 0x35, 0x69, 0xd4, 0x07, 0x89, 0xba, 0x77, 0xe7, 0x33, 0x12, 0xa5, 0x34,
+ 0x87, 0x23, 0xcf, 0xf8, 0xe3, 0x21, 0x80, 0xf6, 0xe9, 0x0b, 0x96, 0xc9, 0xe6, 0x86, 0x46, 0xf7,
+ 0xea, 0x4a, 0x3a, 0x47, 0xd8, 0xf9, 0x38, 0x8f, 0xf5, 0x90, 0x28, 0x72, 0xba, 0xe6, 0x6d, 0xe9,
+ 0xc0, 0xb9, 0xd5, 0xfa, 0x15, 0xc1, 0xd6, 0xc4, 0x84, 0xbf, 0x80, 0x6b, 0x9a, 0xbc, 0xe8, 0x4b,
+ 0x7f, 0x4c, 0x63, 0xa2, 0x65, 0x2d, 0x51, 0x49, 0x1d, 0xe9, 0x4c, 0x43, 0xbd, 0xa2, 0x3e, 0xa2,
+ 0xf8, 0x85, 0xdf, 0x87, 0x0d, 0xad, 0xa3, 0xa8, 0xc2, 0xfe, 0x52, 0x01, 0xb5, 0x0c, 0x0d, 0xef,
+ 0x6e, 0x16, 0x61, 0xec, 0x11, 0xe0, 0xaa, 0x44, 0xc9, 0x59, 0x22, 0x29, 0x6e, 0x4e, 0x8a, 0x9d,
+ 0xf3, 0x6e, 0x9c, 0xae, 0x4d, 0x8a, 0xd9, 0x86, 0x57, 0xa8, 0x10, 0x4c, 0x98, 0xfc, 0xb8, 0xcc,
+ 0x2f, 0xb8, 0xef, 0x9c, 0xe9, 0x21, 0x3c, 0x5d, 0xf3, 0x0a, 0x97, 0x2e, 0xc0, 0x15, 0x61, 0x22,
+ 0xda, 0xb7, 0xe1, 0xf5, 0x47, 0x54, 0xcd, 0x99, 0xc7, 0x1d, 0xd8, 0x48, 0x48, 0x4c, 0xab, 0x7d,
+ 0xd7, 0x0f, 0x76, 0x00, 0xbb, 0x5d, 0xa2, 0xfc, 0x71, 0x8f, 0xc5, 0x71, 0x58, 0x45, 0xca, 0x5a,
+ 0xa3, 0xbc, 0x67, 0x3a, 0x60, 0xe6, 0x2a, 0x2f, 0x58, 0xa3, 0xf4, 0xb9, 0x5a, 0x19, 0x2c, 0x69,
+ 0x7f, 0x0d, 0x37, 0x16, 0x26, 0x32, 0xf5, 0xb8, 0x0f, 0xdb, 0xbe, 0xb6, 0xf6, 0xf3, 0x25, 0x35,
+ 0xcd, 0xb4, 0xce, 0x4d, 0xe0, 0xa7, 0xe5, 0x06, 0x7b, 0x50, 0xb8, 0xe7, 0x0f, 0xf6, 0x5d, 0xb0,
+ 0x4e, 0xc2, 0x84, 0x44, 0xe1, 0xb7, 0x74, 0x19, 0xfd, 0xc7, 0xd0, 0x9a, 0x0b, 0x33, 0x94, 0x5a,
+ 0xb0, 0x25, 0x58, 0xd6, 0xf7, 0x59, 0x6a, 0xf4, 0x37, 0xbc, 0x2b, 0x82, 0x65, 0xbd, 0xfc, 0xf7,
+ 0xc1, 0xf7, 0x00, 0xd7, 0xba, 0x61, 0xf0, 0x49, 0x3e, 0x0b, 0x1a, 0x8c, 0x7f, 0x47, 0x70, 0xfd,
+ 0xdc, 0x4d, 0xc0, 0xef, 0xd5, 0x1d, 0x9f, 0x45, 0xe7, 0xc4, 0x5a, 0xe5, 0x36, 0xd8, 0xef, 0xfc,
+ 0xf0, 0xe7, 0x8b, 0x9f, 0xd7, 0xef, 0xd9, 0xfb, 0xd3, 0xbb, 0xf9, 0xac, 0x68, 0xdb, 0x03, 0x2e,
+ 0xd8, 0x37, 0xd4, 0x57, 0xd2, 0x6d, 0xbb, 0x43, 0xa2, 0x88, 0xa4, 0xfa, 0x4f, 0x45, 0x06, 0x11,
+ 0x95, 0x6e, 0xfb, 0xbb, 0x63, 0xd4, 0xc6, 0x7f, 0x21, 0x80, 0xe9, 0xcc, 0xe2, 0xa3, 0x95, 0x57,
+ 0xd9, 0x3a, 0x5e, 0x05, 0x6a, 0x06, 0xfa, 0x43, 0xad, 0xe1, 0xc4, 0xee, 0x54, 0x34, 0x54, 0x07,
+ 0xee, 0x02, 0x25, 0xe6, 0x1f, 0x85, 0xd1, 0xb4, 0x87, 0x6e, 0x23, 0xfc, 0x1c, 0xc1, 0xab, 0xff,
+ 0xdd, 0x11, 0xfc, 0xa0, 0x2e, 0xc1, 0xb9, 0xbb, 0xb5, 0x5a, 0x73, 0x1e, 0x6a, 0x61, 0xef, 0xda,
+ 0x47, 0x15, 0x61, 0xf9, 0x40, 0x2e, 0x25, 0x08, 0xbf, 0x40, 0xf0, 0xda, 0x9c, 0xf1, 0xc5, 0xdd,
+ 0xba, 0x94, 0x16, 0xaf, 0x8c, 0xd5, 0xbb, 0x54, 0x0c, 0xd3, 0xbf, 0x97, 0x23, 0xf3, 0x1f, 0x04,
+ 0x3b, 0x0b, 0x8e, 0x07, 0x3e, 0xa9, 0x4b, 0xf3, 0xff, 0xcf, 0x9c, 0xf5, 0xe8, 0xd2, 0x71, 0x8c,
+ 0xe4, 0x23, 0x2d, 0xf9, 0x10, 0x2f, 0xbf, 0x76, 0xd6, 0x6f, 0xe8, 0x79, 0x67, 0xb7, 0xcc, 0x5c,
+ 0x26, 0x2e, 0x68, 0x11, 0x1e, 0x4a, 0xc7, 0x67, 0xf1, 0x1f, 0x9d, 0x1f, 0xd1, 0x58, 0x29, 0x2e,
+ 0x8f, 0x5d, 0x37, 0xcb, 0xb2, 0x19, 0xab, 0x4b, 0x52, 0x35, 0x9e, 0x7c, 0x37, 0xbd, 0x59, 0xd7,
+ 0xd1, 0x09, 0x13, 0x49, 0x85, 0xca, 0x39, 0x5d, 0x88, 0xd1, 0xe5, 0x79, 0x8b, 0x47, 0x44, 0x8d,
+ 0x98, 0x88, 0xbb, 0xcf, 0xa0, 0xed, 0xb3, 0xb8, 0x66, 0xed, 0xbe, 0xfc, 0xc8, 0xf8, 0x05, 0x2c,
+ 0x22, 0x49, 0xe0, 0x30, 0x11, 0xb8, 0x01, 0x4d, 0xf4, 0x65, 0x77, 0xa7, 0xc9, 0x2e, 0xfa, 0xfc,
+ 0xbb, 0x6f, 0x1e, 0x06, 0x9b, 0x1a, 0x79, 0xf8, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x76, 0xc7,
+ 0x14, 0x8b, 0x24, 0x0b, 0x00, 0x00,
+}
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ context.Context
+var _ grpc.ClientConn
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the grpc package it is being compiled against.
+const _ = grpc.SupportPackageIsVersion4
+
+// BigQueryWriteClient is the client API for BigQueryWrite service.
+//
+// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
+type BigQueryWriteClient interface {
+ // Creates a write stream to the given table.
+ CreateWriteStream(ctx context.Context, in *CreateWriteStreamRequest, opts ...grpc.CallOption) (*WriteStream, error)
+ // Appends data to the given stream.
+ //
+ // If `offset` is specified, the `offset` is checked against the end of
+ // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
+ // attempt is made to append to an offset beyond the current end of the stream
+ // or `ALREADY_EXISTS` if user provids an `offset` that has already been
+ // written to. User can retry with adjusted offset within the same RPC
+ // stream. If `offset` is not specified, append happens at the end of the
+ // stream.
+ //
+ // The response contains the offset at which the append happened. Responses
+ // are received in the same order in which requests are sent. There will be
+ // one response for each successful request. If the `offset` is not set in
+ // response, it means append didn't happen due to some errors. If one request
+ // fails, all the subsequent requests will also fail until a success request
+ // is made again.
+ //
+ // If the stream is of `PENDING` type, data will only be available for read
+ // operations after the stream is committed.
+ AppendRows(ctx context.Context, opts ...grpc.CallOption) (BigQueryWrite_AppendRowsClient, error)
+ // Gets a write stream.
+ GetWriteStream(ctx context.Context, in *GetWriteStreamRequest, opts ...grpc.CallOption) (*WriteStream, error)
+ // Finalize a write stream so that no new data can be appended to the
+ // stream.
+ FinalizeWriteStream(ctx context.Context, in *FinalizeWriteStreamRequest, opts ...grpc.CallOption) (*FinalizeWriteStreamResponse, error)
+ // Atomically commits a group of `PENDING` streams that belong to the same
+ // `parent` table.
+ // Streams must be finalized before commit and cannot be committed multiple
+ // times. Once a stream is committed, data in the stream becomes available
+ // for read operations.
+ BatchCommitWriteStreams(ctx context.Context, in *BatchCommitWriteStreamsRequest, opts ...grpc.CallOption) (*BatchCommitWriteStreamsResponse, error)
+}
+
+type bigQueryWriteClient struct {
+ cc *grpc.ClientConn
+}
+
+func NewBigQueryWriteClient(cc *grpc.ClientConn) BigQueryWriteClient {
+ return &bigQueryWriteClient{cc}
+}
+
+func (c *bigQueryWriteClient) CreateWriteStream(ctx context.Context, in *CreateWriteStreamRequest, opts ...grpc.CallOption) (*WriteStream, error) {
+ out := new(WriteStream)
+ err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/CreateWriteStream", in, out, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *bigQueryWriteClient) AppendRows(ctx context.Context, opts ...grpc.CallOption) (BigQueryWrite_AppendRowsClient, error) {
+ stream, err := c.cc.NewStream(ctx, &_BigQueryWrite_serviceDesc.Streams[0], "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/AppendRows", opts...)
+ if err != nil {
+ return nil, err
+ }
+ x := &bigQueryWriteAppendRowsClient{stream}
+ return x, nil
+}
+
+type BigQueryWrite_AppendRowsClient interface {
+ Send(*AppendRowsRequest) error
+ Recv() (*AppendRowsResponse, error)
+ grpc.ClientStream
+}
+
+type bigQueryWriteAppendRowsClient struct {
+ grpc.ClientStream
+}
+
+func (x *bigQueryWriteAppendRowsClient) Send(m *AppendRowsRequest) error {
+ return x.ClientStream.SendMsg(m)
+}
+
+func (x *bigQueryWriteAppendRowsClient) Recv() (*AppendRowsResponse, error) {
+ m := new(AppendRowsResponse)
+ if err := x.ClientStream.RecvMsg(m); err != nil {
+ return nil, err
+ }
+ return m, nil
+}
+
+func (c *bigQueryWriteClient) GetWriteStream(ctx context.Context, in *GetWriteStreamRequest, opts ...grpc.CallOption) (*WriteStream, error) {
+ out := new(WriteStream)
+ err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/GetWriteStream", in, out, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *bigQueryWriteClient) FinalizeWriteStream(ctx context.Context, in *FinalizeWriteStreamRequest, opts ...grpc.CallOption) (*FinalizeWriteStreamResponse, error) {
+ out := new(FinalizeWriteStreamResponse)
+ err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FinalizeWriteStream", in, out, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *bigQueryWriteClient) BatchCommitWriteStreams(ctx context.Context, in *BatchCommitWriteStreamsRequest, opts ...grpc.CallOption) (*BatchCommitWriteStreamsResponse, error) {
+ out := new(BatchCommitWriteStreamsResponse)
+ err := c.cc.Invoke(ctx, "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/BatchCommitWriteStreams", in, out, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// BigQueryWriteServer is the server API for BigQueryWrite service.
+type BigQueryWriteServer interface {
+ // Creates a write stream to the given table.
+ CreateWriteStream(context.Context, *CreateWriteStreamRequest) (*WriteStream, error)
+ // Appends data to the given stream.
+ //
+ // If `offset` is specified, the `offset` is checked against the end of
+ // stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
+ // attempt is made to append to an offset beyond the current end of the stream
+ // or `ALREADY_EXISTS` if user provids an `offset` that has already been
+ // written to. User can retry with adjusted offset within the same RPC
+ // stream. If `offset` is not specified, append happens at the end of the
+ // stream.
+ //
+ // The response contains the offset at which the append happened. Responses
+ // are received in the same order in which requests are sent. There will be
+ // one response for each successful request. If the `offset` is not set in
+ // response, it means append didn't happen due to some errors. If one request
+ // fails, all the subsequent requests will also fail until a success request
+ // is made again.
+ //
+ // If the stream is of `PENDING` type, data will only be available for read
+ // operations after the stream is committed.
+ AppendRows(BigQueryWrite_AppendRowsServer) error
+ // Gets a write stream.
+ GetWriteStream(context.Context, *GetWriteStreamRequest) (*WriteStream, error)
+ // Finalize a write stream so that no new data can be appended to the
+ // stream.
+ FinalizeWriteStream(context.Context, *FinalizeWriteStreamRequest) (*FinalizeWriteStreamResponse, error)
+ // Atomically commits a group of `PENDING` streams that belong to the same
+ // `parent` table.
+ // Streams must be finalized before commit and cannot be committed multiple
+ // times. Once a stream is committed, data in the stream becomes available
+ // for read operations.
+ BatchCommitWriteStreams(context.Context, *BatchCommitWriteStreamsRequest) (*BatchCommitWriteStreamsResponse, error)
+}
+
+// UnimplementedBigQueryWriteServer can be embedded to have forward compatible implementations.
+type UnimplementedBigQueryWriteServer struct {
+}
+
+func (*UnimplementedBigQueryWriteServer) CreateWriteStream(ctx context.Context, req *CreateWriteStreamRequest) (*WriteStream, error) {
+ return nil, status1.Errorf(codes.Unimplemented, "method CreateWriteStream not implemented")
+}
+func (*UnimplementedBigQueryWriteServer) AppendRows(srv BigQueryWrite_AppendRowsServer) error {
+ return status1.Errorf(codes.Unimplemented, "method AppendRows not implemented")
+}
+func (*UnimplementedBigQueryWriteServer) GetWriteStream(ctx context.Context, req *GetWriteStreamRequest) (*WriteStream, error) {
+ return nil, status1.Errorf(codes.Unimplemented, "method GetWriteStream not implemented")
+}
+func (*UnimplementedBigQueryWriteServer) FinalizeWriteStream(ctx context.Context, req *FinalizeWriteStreamRequest) (*FinalizeWriteStreamResponse, error) {
+ return nil, status1.Errorf(codes.Unimplemented, "method FinalizeWriteStream not implemented")
+}
+func (*UnimplementedBigQueryWriteServer) BatchCommitWriteStreams(ctx context.Context, req *BatchCommitWriteStreamsRequest) (*BatchCommitWriteStreamsResponse, error) {
+ return nil, status1.Errorf(codes.Unimplemented, "method BatchCommitWriteStreams not implemented")
+}
+
+func RegisterBigQueryWriteServer(s *grpc.Server, srv BigQueryWriteServer) {
+ s.RegisterService(&_BigQueryWrite_serviceDesc, srv)
+}
+
+func _BigQueryWrite_CreateWriteStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(CreateWriteStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(BigQueryWriteServer).CreateWriteStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/CreateWriteStream",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(BigQueryWriteServer).CreateWriteStream(ctx, req.(*CreateWriteStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _BigQueryWrite_AppendRows_Handler(srv interface{}, stream grpc.ServerStream) error {
+ return srv.(BigQueryWriteServer).AppendRows(&bigQueryWriteAppendRowsServer{stream})
+}
+
+type BigQueryWrite_AppendRowsServer interface {
+ Send(*AppendRowsResponse) error
+ Recv() (*AppendRowsRequest, error)
+ grpc.ServerStream
+}
+
+type bigQueryWriteAppendRowsServer struct {
+ grpc.ServerStream
+}
+
+func (x *bigQueryWriteAppendRowsServer) Send(m *AppendRowsResponse) error {
+ return x.ServerStream.SendMsg(m)
+}
+
+func (x *bigQueryWriteAppendRowsServer) Recv() (*AppendRowsRequest, error) {
+ m := new(AppendRowsRequest)
+ if err := x.ServerStream.RecvMsg(m); err != nil {
+ return nil, err
+ }
+ return m, nil
+}
+
+func _BigQueryWrite_GetWriteStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(GetWriteStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(BigQueryWriteServer).GetWriteStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/GetWriteStream",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(BigQueryWriteServer).GetWriteStream(ctx, req.(*GetWriteStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _BigQueryWrite_FinalizeWriteStream_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(FinalizeWriteStreamRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(BigQueryWriteServer).FinalizeWriteStream(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/FinalizeWriteStream",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(BigQueryWriteServer).FinalizeWriteStream(ctx, req.(*FinalizeWriteStreamRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _BigQueryWrite_BatchCommitWriteStreams_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(BatchCommitWriteStreamsRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(BigQueryWriteServer).BatchCommitWriteStreams(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.bigquery.storage.v1alpha2.BigQueryWrite/BatchCommitWriteStreams",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(BigQueryWriteServer).BatchCommitWriteStreams(ctx, req.(*BatchCommitWriteStreamsRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+var _BigQueryWrite_serviceDesc = grpc.ServiceDesc{
+ ServiceName: "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite",
+ HandlerType: (*BigQueryWriteServer)(nil),
+ Methods: []grpc.MethodDesc{
+ {
+ MethodName: "CreateWriteStream",
+ Handler: _BigQueryWrite_CreateWriteStream_Handler,
+ },
+ {
+ MethodName: "GetWriteStream",
+ Handler: _BigQueryWrite_GetWriteStream_Handler,
+ },
+ {
+ MethodName: "FinalizeWriteStream",
+ Handler: _BigQueryWrite_FinalizeWriteStream_Handler,
+ },
+ {
+ MethodName: "BatchCommitWriteStreams",
+ Handler: _BigQueryWrite_BatchCommitWriteStreams_Handler,
+ },
+ },
+ Streams: []grpc.StreamDesc{
+ {
+ StreamName: "AppendRows",
+ Handler: _BigQueryWrite_AppendRows_Handler,
+ ServerStreams: true,
+ ClientStreams: true,
+ },
+ },
+ Metadata: "google/cloud/bigquery/storage/v1alpha2/storage.proto",
+}
diff --git a/googleapis/cloud/bigquery/storage/v1alpha2/stream.pb.go b/googleapis/cloud/bigquery/storage/v1alpha2/stream.pb.go
new file mode 100644
index 0000000..63c276e
--- /dev/null
+++ b/googleapis/cloud/bigquery/storage/v1alpha2/stream.pb.go
@@ -0,0 +1,187 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/cloud/bigquery/storage/v1alpha2/stream.proto
+
+package storage
+
+import (
+ fmt "fmt"
+ math "math"
+
+ proto "github.com/golang/protobuf/proto"
+ timestamp "github.com/golang/protobuf/ptypes/timestamp"
+ _ "google.golang.org/genproto/googleapis/api/annotations"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
+
+type WriteStream_Type int32
+
+const (
+ // Unknown type.
+ WriteStream_TYPE_UNSPECIFIED WriteStream_Type = 0
+ // Data will commit automatically and appear as soon as the write is
+ // acknowledged.
+ WriteStream_COMMITTED WriteStream_Type = 1
+ // Data is invisible until the stream is committed.
+ WriteStream_PENDING WriteStream_Type = 2
+)
+
+var WriteStream_Type_name = map[int32]string{
+ 0: "TYPE_UNSPECIFIED",
+ 1: "COMMITTED",
+ 2: "PENDING",
+}
+
+var WriteStream_Type_value = map[string]int32{
+ "TYPE_UNSPECIFIED": 0,
+ "COMMITTED": 1,
+ "PENDING": 2,
+}
+
+func (x WriteStream_Type) String() string {
+ return proto.EnumName(WriteStream_Type_name, int32(x))
+}
+
+func (WriteStream_Type) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_cfe5157f29fe5b3a, []int{0, 0}
+}
+
+// Information about a single stream that gets data inside the storage system.
+type WriteStream struct {
+ // Output only. Name of the stream, in the form
+ // `projects/{project_id}/datasets/{dataset_id}/tables/{table_id}/streams/{stream_id}`.
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ Type WriteStream_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.bigquery.storage.v1alpha2.WriteStream_Type" json:"type,omitempty"`
+ // Output only. Create time of the stream.
+ CreateTime *timestamp.Timestamp `protobuf:"bytes,3,opt,name=create_time,json=createTime,proto3" json:"create_time,omitempty"`
+ // Output only. Commit time of the stream.
+ // If a stream is of `COMMITTED` type, then it will have a commit_time same as
+ // `create_time`. If the stream is of `PENDING` type, commit_time being empty
+ // means it is not committed.
+ CommitTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"`
+ // Output only. The schema of the destination table. It is only returned in
+ // `CreateWriteStream` response. Caller should generate data that's
+ // compatible with this schema to send in initial `AppendRowsRequest`.
+ // The table schema could go out of date during the life time of the stream.
+ TableSchema *TableSchema `protobuf:"bytes,5,opt,name=table_schema,json=tableSchema,proto3" json:"table_schema,omitempty"`
+ // Id set by client to annotate its identity.
+ ExternalId string `protobuf:"bytes,6,opt,name=external_id,json=externalId,proto3" json:"external_id,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *WriteStream) Reset() { *m = WriteStream{} }
+func (m *WriteStream) String() string { return proto.CompactTextString(m) }
+func (*WriteStream) ProtoMessage() {}
+func (*WriteStream) Descriptor() ([]byte, []int) {
+ return fileDescriptor_cfe5157f29fe5b3a, []int{0}
+}
+
+func (m *WriteStream) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_WriteStream.Unmarshal(m, b)
+}
+func (m *WriteStream) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_WriteStream.Marshal(b, m, deterministic)
+}
+func (m *WriteStream) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_WriteStream.Merge(m, src)
+}
+func (m *WriteStream) XXX_Size() int {
+ return xxx_messageInfo_WriteStream.Size(m)
+}
+func (m *WriteStream) XXX_DiscardUnknown() {
+ xxx_messageInfo_WriteStream.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_WriteStream proto.InternalMessageInfo
+
+func (m *WriteStream) GetName() string {
+ if m != nil {
+ return m.Name
+ }
+ return ""
+}
+
+func (m *WriteStream) GetType() WriteStream_Type {
+ if m != nil {
+ return m.Type
+ }
+ return WriteStream_TYPE_UNSPECIFIED
+}
+
+func (m *WriteStream) GetCreateTime() *timestamp.Timestamp {
+ if m != nil {
+ return m.CreateTime
+ }
+ return nil
+}
+
+func (m *WriteStream) GetCommitTime() *timestamp.Timestamp {
+ if m != nil {
+ return m.CommitTime
+ }
+ return nil
+}
+
+func (m *WriteStream) GetTableSchema() *TableSchema {
+ if m != nil {
+ return m.TableSchema
+ }
+ return nil
+}
+
+func (m *WriteStream) GetExternalId() string {
+ if m != nil {
+ return m.ExternalId
+ }
+ return ""
+}
+
+func init() {
+ proto.RegisterEnum("google.cloud.bigquery.storage.v1alpha2.WriteStream_Type", WriteStream_Type_name, WriteStream_Type_value)
+ proto.RegisterType((*WriteStream)(nil), "google.cloud.bigquery.storage.v1alpha2.WriteStream")
+}
+
+func init() {
+ proto.RegisterFile("google/cloud/bigquery/storage/v1alpha2/stream.proto", fileDescriptor_cfe5157f29fe5b3a)
+}
+
+var fileDescriptor_cfe5157f29fe5b3a = []byte{
+ // 405 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6e, 0xd3, 0x30,
+ 0x1c, 0xc6, 0xc9, 0xda, 0x0d, 0xcd, 0x01, 0x54, 0x59, 0x48, 0x44, 0xbd, 0xac, 0xda, 0x01, 0x55,
+ 0x1c, 0x6c, 0x91, 0x5e, 0x26, 0x71, 0x81, 0xad, 0x01, 0xe5, 0xd0, 0x52, 0xda, 0x20, 0x04, 0x1c,
+ 0x22, 0x27, 0xf9, 0xcf, 0xb5, 0x14, 0xc7, 0xc1, 0x71, 0x2b, 0x2a, 0x9e, 0x8b, 0xf7, 0xe1, 0x51,
+ 0x50, 0xec, 0x44, 0x70, 0x5b, 0x8e, 0xf9, 0xe2, 0xdf, 0x2f, 0x9f, 0x3f, 0x05, 0x2d, 0xb8, 0x52,
+ 0xbc, 0x04, 0x9a, 0x97, 0xea, 0x50, 0xd0, 0x4c, 0xf0, 0x1f, 0x07, 0xd0, 0x27, 0xda, 0x18, 0xa5,
+ 0x19, 0x07, 0x7a, 0x7c, 0xcd, 0xca, 0x7a, 0xcf, 0x42, 0xda, 0x18, 0x0d, 0x4c, 0x92, 0x5a, 0x2b,
+ 0xa3, 0xf0, 0x4b, 0x07, 0x11, 0x0b, 0x91, 0x1e, 0x22, 0x1d, 0x44, 0x7a, 0x68, 0x7a, 0xd5, 0xc9,
+ 0x59, 0x2d, 0xe8, 0xbd, 0x80, 0xb2, 0x48, 0x33, 0xd8, 0xb3, 0xa3, 0x50, 0xda, 0x89, 0xa6, 0xe1,
+ 0xc0, 0xaf, 0x1b, 0x96, 0x95, 0xd0, 0x31, 0xbd, 0xd4, 0x3e, 0x65, 0x87, 0x7b, 0x6a, 0x84, 0x84,
+ 0xc6, 0x30, 0x59, 0xbb, 0x03, 0xd7, 0xbf, 0x47, 0xc8, 0xff, 0xa2, 0x85, 0x81, 0x9d, 0xed, 0x8c,
+ 0x5f, 0xa0, 0x71, 0xc5, 0x24, 0x04, 0xde, 0xcc, 0x9b, 0x5f, 0xde, 0x8e, 0xfe, 0xbc, 0x1b, 0x6d,
+ 0x6d, 0x80, 0x3f, 0xa1, 0xb1, 0x39, 0xd5, 0x10, 0x9c, 0xcd, 0xbc, 0xf9, 0xb3, 0xf0, 0x86, 0x0c,
+ 0xbb, 0x15, 0xf9, 0xcf, 0x4d, 0x92, 0x53, 0x0d, 0xad, 0xf2, 0x7c, 0x6b, 0x55, 0xf8, 0x2d, 0xf2,
+ 0x73, 0x0d, 0xcc, 0x40, 0xda, 0xb6, 0x0a, 0x46, 0x33, 0x6f, 0xee, 0x87, 0xd3, 0xde, 0xdc, 0x57,
+ 0x26, 0x49, 0x5f, 0xd9, 0xd5, 0x41, 0x8e, 0x69, 0x53, 0x6b, 0x50, 0x52, 0x0a, 0xe3, 0x0c, 0xe3,
+ 0xa1, 0x06, 0xcb, 0x58, 0xc3, 0x77, 0xf4, 0xc4, 0xee, 0x95, 0x36, 0xf9, 0x1e, 0x24, 0x0b, 0xce,
+ 0xad, 0x62, 0x31, 0xf4, 0x7a, 0x49, 0xcb, 0xee, 0x2c, 0xea, 0xdc, 0xbe, 0xf9, 0x97, 0xe0, 0x2b,
+ 0xe4, 0xc3, 0x4f, 0x03, 0xba, 0x62, 0x65, 0x2a, 0x8a, 0xe0, 0xa2, 0xdd, 0x74, 0x8b, 0xfa, 0x28,
+ 0x2e, 0xae, 0x6f, 0xd0, 0xb8, 0x1d, 0x05, 0x3f, 0x47, 0x93, 0xe4, 0xeb, 0x26, 0x4a, 0x3f, 0xaf,
+ 0x77, 0x9b, 0xe8, 0x2e, 0x7e, 0x1f, 0x47, 0xcb, 0xc9, 0x23, 0xfc, 0x14, 0x5d, 0xde, 0x7d, 0x5c,
+ 0xad, 0xe2, 0x24, 0x89, 0x96, 0x13, 0x0f, 0xfb, 0xe8, 0xf1, 0x26, 0x5a, 0x2f, 0xe3, 0xf5, 0x87,
+ 0xc9, 0xd9, 0xed, 0x2f, 0xf4, 0x2a, 0x57, 0x72, 0x60, 0xcd, 0x6f, 0xab, 0xee, 0x1c, 0x57, 0x25,
+ 0xab, 0x38, 0x51, 0x9a, 0x53, 0x0e, 0x95, 0xdd, 0x87, 0xba, 0x57, 0xac, 0x16, 0xcd, 0x43, 0x7f,
+ 0xd6, 0x9b, 0x2e, 0xc8, 0x2e, 0x2c, 0xb9, 0xf8, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x0d, 0xb0, 0xe0,
+ 0xe9, 0x10, 0x03, 0x00, 0x00,
+}
diff --git a/googleapis/cloud/bigquery/storage/v1alpha2/table.pb.go b/googleapis/cloud/bigquery/storage/v1alpha2/table.pb.go
new file mode 100644
index 0000000..749dcf9
--- /dev/null
+++ b/googleapis/cloud/bigquery/storage/v1alpha2/table.pb.go
@@ -0,0 +1,292 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/cloud/bigquery/storage/v1alpha2/table.proto
+
+package storage
+
+import (
+ fmt "fmt"
+ math "math"
+
+ proto "github.com/golang/protobuf/proto"
+ _ "google.golang.org/genproto/googleapis/api/annotations"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
+
+type TableFieldSchema_Type int32
+
+const (
+ // Illegal value
+ TableFieldSchema_TYPE_UNSPECIFIED TableFieldSchema_Type = 0
+ // 64K, UTF8
+ TableFieldSchema_STRING TableFieldSchema_Type = 1
+ // 64-bit signed
+ TableFieldSchema_INT64 TableFieldSchema_Type = 2
+ // 64-bit IEEE floating point
+ TableFieldSchema_DOUBLE TableFieldSchema_Type = 3
+ // Aggregate type
+ TableFieldSchema_STRUCT TableFieldSchema_Type = 4
+ // 64K, Binary
+ TableFieldSchema_BYTES TableFieldSchema_Type = 5
+ // 2-valued
+ TableFieldSchema_BOOL TableFieldSchema_Type = 6
+ // 64-bit signed usec since UTC epoch
+ TableFieldSchema_TIMESTAMP TableFieldSchema_Type = 7
+ // Civil date - Year, Month, Day
+ TableFieldSchema_DATE TableFieldSchema_Type = 8
+ // Civil time - Hour, Minute, Second, Microseconds
+ TableFieldSchema_TIME TableFieldSchema_Type = 9
+ // Combination of civil date and civil time
+ TableFieldSchema_DATETIME TableFieldSchema_Type = 10
+ // Geography object (go/googlesql_geography)
+ TableFieldSchema_GEOGRAPHY TableFieldSchema_Type = 11
+ // Numeric value (go/googlesql_numeric)
+ TableFieldSchema_NUMERIC TableFieldSchema_Type = 12
+)
+
+var TableFieldSchema_Type_name = map[int32]string{
+ 0: "TYPE_UNSPECIFIED",
+ 1: "STRING",
+ 2: "INT64",
+ 3: "DOUBLE",
+ 4: "STRUCT",
+ 5: "BYTES",
+ 6: "BOOL",
+ 7: "TIMESTAMP",
+ 8: "DATE",
+ 9: "TIME",
+ 10: "DATETIME",
+ 11: "GEOGRAPHY",
+ 12: "NUMERIC",
+}
+
+var TableFieldSchema_Type_value = map[string]int32{
+ "TYPE_UNSPECIFIED": 0,
+ "STRING": 1,
+ "INT64": 2,
+ "DOUBLE": 3,
+ "STRUCT": 4,
+ "BYTES": 5,
+ "BOOL": 6,
+ "TIMESTAMP": 7,
+ "DATE": 8,
+ "TIME": 9,
+ "DATETIME": 10,
+ "GEOGRAPHY": 11,
+ "NUMERIC": 12,
+}
+
+func (x TableFieldSchema_Type) String() string {
+ return proto.EnumName(TableFieldSchema_Type_name, int32(x))
+}
+
+func (TableFieldSchema_Type) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_5b6e4e741ae7ca1f, []int{1, 0}
+}
+
+type TableFieldSchema_Mode int32
+
+const (
+ // Illegal value
+ TableFieldSchema_MODE_UNSPECIFIED TableFieldSchema_Mode = 0
+ TableFieldSchema_NULLABLE TableFieldSchema_Mode = 1
+ TableFieldSchema_REQUIRED TableFieldSchema_Mode = 2
+ TableFieldSchema_REPEATED TableFieldSchema_Mode = 3
+)
+
+var TableFieldSchema_Mode_name = map[int32]string{
+ 0: "MODE_UNSPECIFIED",
+ 1: "NULLABLE",
+ 2: "REQUIRED",
+ 3: "REPEATED",
+}
+
+var TableFieldSchema_Mode_value = map[string]int32{
+ "MODE_UNSPECIFIED": 0,
+ "NULLABLE": 1,
+ "REQUIRED": 2,
+ "REPEATED": 3,
+}
+
+func (x TableFieldSchema_Mode) String() string {
+ return proto.EnumName(TableFieldSchema_Mode_name, int32(x))
+}
+
+func (TableFieldSchema_Mode) EnumDescriptor() ([]byte, []int) {
+ return fileDescriptor_5b6e4e741ae7ca1f, []int{1, 1}
+}
+
+// Schema of a table
+type TableSchema struct {
+ // Describes the fields in a table.
+ Fields []*TableFieldSchema `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *TableSchema) Reset() { *m = TableSchema{} }
+func (m *TableSchema) String() string { return proto.CompactTextString(m) }
+func (*TableSchema) ProtoMessage() {}
+func (*TableSchema) Descriptor() ([]byte, []int) {
+ return fileDescriptor_5b6e4e741ae7ca1f, []int{0}
+}
+
+func (m *TableSchema) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_TableSchema.Unmarshal(m, b)
+}
+func (m *TableSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_TableSchema.Marshal(b, m, deterministic)
+}
+func (m *TableSchema) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_TableSchema.Merge(m, src)
+}
+func (m *TableSchema) XXX_Size() int {
+ return xxx_messageInfo_TableSchema.Size(m)
+}
+func (m *TableSchema) XXX_DiscardUnknown() {
+ xxx_messageInfo_TableSchema.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TableSchema proto.InternalMessageInfo
+
+func (m *TableSchema) GetFields() []*TableFieldSchema {
+ if m != nil {
+ return m.Fields
+ }
+ return nil
+}
+
+// A field in TableSchema
+type TableFieldSchema struct {
+ // Required. The field name. The name must contain only letters (a-z, A-Z),
+ // numbers (0-9), or underscores (_), and must start with a letter or
+ // underscore. The maximum length is 128 characters.
+ Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
+ // Required. The field data type.
+ Type TableFieldSchema_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.bigquery.storage.v1alpha2.TableFieldSchema_Type" json:"type,omitempty"`
+ // Optional. The field mode. The default value is NULLABLE.
+ Mode TableFieldSchema_Mode `protobuf:"varint,3,opt,name=mode,proto3,enum=google.cloud.bigquery.storage.v1alpha2.TableFieldSchema_Mode" json:"mode,omitempty"`
+ // Optional. Describes the nested schema fields if the type property is set to STRUCT.
+ Fields []*TableFieldSchema `protobuf:"bytes,4,rep,name=fields,proto3" json:"fields,omitempty"`
+ // Optional. The field description. The maximum length is 1,024 characters.
+ Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"`
+ XXX_NoUnkeyedLiteral struct{} `json:"-"`
+ XXX_unrecognized []byte `json:"-"`
+ XXX_sizecache int32 `json:"-"`
+}
+
+func (m *TableFieldSchema) Reset() { *m = TableFieldSchema{} }
+func (m *TableFieldSchema) String() string { return proto.CompactTextString(m) }
+func (*TableFieldSchema) ProtoMessage() {}
+func (*TableFieldSchema) Descriptor() ([]byte, []int) {
+ return fileDescriptor_5b6e4e741ae7ca1f, []int{1}
+}
+
+func (m *TableFieldSchema) XXX_Unmarshal(b []byte) error {
+ return xxx_messageInfo_TableFieldSchema.Unmarshal(m, b)
+}
+func (m *TableFieldSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+ return xxx_messageInfo_TableFieldSchema.Marshal(b, m, deterministic)
+}
+func (m *TableFieldSchema) XXX_Merge(src proto.Message) {
+ xxx_messageInfo_TableFieldSchema.Merge(m, src)
+}
+func (m *TableFieldSchema) XXX_Size() int {
+ return xxx_messageInfo_TableFieldSchema.Size(m)
+}
+func (m *TableFieldSchema) XXX_DiscardUnknown() {
+ xxx_messageInfo_TableFieldSchema.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TableFieldSchema proto.InternalMessageInfo
+
+func (m *TableFieldSchema) GetName() string {
+ if m != nil {
+ return m.Name
+ }
+ return ""
+}
+
+func (m *TableFieldSchema) GetType() TableFieldSchema_Type {
+ if m != nil {
+ return m.Type
+ }
+ return TableFieldSchema_TYPE_UNSPECIFIED
+}
+
+func (m *TableFieldSchema) GetMode() TableFieldSchema_Mode {
+ if m != nil {
+ return m.Mode
+ }
+ return TableFieldSchema_MODE_UNSPECIFIED
+}
+
+func (m *TableFieldSchema) GetFields() []*TableFieldSchema {
+ if m != nil {
+ return m.Fields
+ }
+ return nil
+}
+
+func (m *TableFieldSchema) GetDescription() string {
+ if m != nil {
+ return m.Description
+ }
+ return ""
+}
+
+func init() {
+ proto.RegisterEnum("google.cloud.bigquery.storage.v1alpha2.TableFieldSchema_Type", TableFieldSchema_Type_name, TableFieldSchema_Type_value)
+ proto.RegisterEnum("google.cloud.bigquery.storage.v1alpha2.TableFieldSchema_Mode", TableFieldSchema_Mode_name, TableFieldSchema_Mode_value)
+ proto.RegisterType((*TableSchema)(nil), "google.cloud.bigquery.storage.v1alpha2.TableSchema")
+ proto.RegisterType((*TableFieldSchema)(nil), "google.cloud.bigquery.storage.v1alpha2.TableFieldSchema")
+}
+
+func init() {
+ proto.RegisterFile("google/cloud/bigquery/storage/v1alpha2/table.proto", fileDescriptor_5b6e4e741ae7ca1f)
+}
+
+var fileDescriptor_5b6e4e741ae7ca1f = []byte{
+ // 484 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x93, 0xdf, 0x8a, 0xd3, 0x40,
+ 0x14, 0xc6, 0x4d, 0x9b, 0xed, 0xb6, 0xa7, 0xab, 0x0c, 0x83, 0x60, 0xf0, 0xc6, 0x52, 0x50, 0x8a,
+ 0x17, 0x09, 0x56, 0x11, 0x41, 0xbc, 0x48, 0x9a, 0x69, 0x0d, 0x34, 0x7f, 0x4c, 0x26, 0x42, 0xbd,
+ 0x29, 0xd3, 0x66, 0x4c, 0x03, 0x69, 0x27, 0xa6, 0xd9, 0x85, 0xe2, 0x33, 0xf9, 0x30, 0xbe, 0x81,
+ 0x8f, 0x22, 0x93, 0xa4, 0x22, 0x7b, 0xe3, 0xe2, 0xde, 0xe5, 0x7c, 0xe7, 0x7c, 0xbf, 0x73, 0xf2,
+ 0x91, 0xc0, 0x34, 0x15, 0x22, 0xcd, 0xb9, 0xb1, 0xcd, 0xc5, 0x75, 0x62, 0x6c, 0xb2, 0xf4, 0xdb,
+ 0x35, 0x2f, 0x4f, 0xc6, 0xb1, 0x12, 0x25, 0x4b, 0xb9, 0x71, 0xf3, 0x8a, 0xe5, 0xc5, 0x8e, 0x4d,
+ 0x8d, 0x8a, 0x6d, 0x72, 0xae, 0x17, 0xa5, 0xa8, 0x04, 0x7e, 0xd1, 0x78, 0xf4, 0xda, 0xa3, 0x9f,
+ 0x3d, 0x7a, 0xeb, 0xd1, 0xcf, 0x9e, 0xa7, 0xcf, 0x5a, 0x36, 0x2b, 0x32, 0xe3, 0x6b, 0xc6, 0xf3,
+ 0x64, 0xbd, 0xe1, 0x3b, 0x76, 0x93, 0x89, 0xb2, 0x01, 0x8d, 0xd7, 0x30, 0xa4, 0x92, 0x1b, 0x6d,
+ 0x77, 0x7c, 0xcf, 0x70, 0x00, 0xbd, 0x7a, 0xec, 0xa8, 0x29, 0xa3, 0xee, 0x64, 0x38, 0x7d, 0xa7,
+ 0xdf, 0x6d, 0x91, 0x5e, 0x43, 0xe6, 0xd2, 0xda, 0x90, 0xc2, 0x96, 0x33, 0xfe, 0xa9, 0x02, 0xba,
+ 0xdd, 0xc4, 0x4f, 0x40, 0x3d, 0xb0, 0x3d, 0xd7, 0x94, 0x91, 0x32, 0x19, 0x58, 0xdd, 0x5f, 0x66,
+ 0x27, 0xac, 0x05, 0xfc, 0x19, 0xd4, 0xea, 0x54, 0x70, 0xad, 0x33, 0x52, 0x26, 0x8f, 0xa6, 0x1f,
+ 0xfe, 0x77, 0xbb, 0x4e, 0x4f, 0x05, 0x6f, 0xb9, 0x92, 0x27, 0xb9, 0x7b, 0x91, 0x70, 0xad, 0x7b,
+ 0x4f, 0xae, 0x2b, 0x92, 0x9a, 0xab, 0x84, 0x35, 0x0f, 0xc7, 0x7f, 0xf2, 0x52, 0xef, 0x97, 0x57,
+ 0x03, 0x6d, 0x61, 0xf8, 0x39, 0x0c, 0x13, 0x7e, 0xdc, 0x96, 0x59, 0x51, 0x65, 0xe2, 0xa0, 0xf5,
+ 0xce, 0x31, 0x29, 0xe1, 0xdf, 0xfa, 0xf8, 0x87, 0x02, 0xaa, 0x7c, 0x53, 0xfc, 0x18, 0x10, 0x5d,
+ 0x05, 0x64, 0x1d, 0x7b, 0x51, 0x40, 0x66, 0xce, 0xdc, 0x21, 0x36, 0x7a, 0x80, 0x01, 0x7a, 0x11,
+ 0x0d, 0x1d, 0x6f, 0x81, 0x14, 0x3c, 0x80, 0x0b, 0xc7, 0xa3, 0x6f, 0xdf, 0xa0, 0x8e, 0x94, 0x6d,
+ 0x3f, 0xb6, 0x96, 0x04, 0x75, 0xdb, 0x91, 0x78, 0x46, 0x91, 0x2a, 0x47, 0xac, 0x15, 0x25, 0x11,
+ 0xba, 0xc0, 0x7d, 0x50, 0x2d, 0xdf, 0x5f, 0xa2, 0x1e, 0x7e, 0x08, 0x03, 0xea, 0xb8, 0x24, 0xa2,
+ 0xa6, 0x1b, 0xa0, 0x4b, 0xd9, 0xb0, 0x4d, 0x4a, 0x50, 0x5f, 0x3e, 0xc9, 0x06, 0x1a, 0xe0, 0x2b,
+ 0xe8, 0x4b, 0xad, 0xae, 0x40, 0x1a, 0x16, 0xc4, 0x5f, 0x84, 0x66, 0xf0, 0x71, 0x85, 0x86, 0x78,
+ 0x08, 0x97, 0x5e, 0xec, 0x92, 0xd0, 0x99, 0xa1, 0xab, 0xf1, 0x1c, 0x54, 0x19, 0xa0, 0x3c, 0xd7,
+ 0xf5, 0xed, 0xdb, 0xe7, 0x5e, 0x41, 0xdf, 0x8b, 0x97, 0x4b, 0x53, 0x5e, 0xa6, 0xc8, 0x2a, 0x24,
+ 0x9f, 0x62, 0x27, 0x24, 0x36, 0xea, 0x34, 0x55, 0x40, 0x4c, 0x4a, 0x6c, 0xd4, 0xb5, 0xbe, 0xc3,
+ 0xcb, 0xad, 0xd8, 0xdf, 0x31, 0xea, 0x2f, 0x6e, 0x3b, 0x97, 0x8a, 0x9c, 0x1d, 0x52, 0x5d, 0x94,
+ 0xa9, 0x91, 0xf2, 0x43, 0xfd, 0xf9, 0x1b, 0x4d, 0x8b, 0x15, 0xd9, 0xf1, 0x5f, 0xbf, 0xdf, 0xfb,
+ 0x56, 0xd8, 0xf4, 0x6a, 0xe7, 0xeb, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xcd, 0x0b, 0xd9, 0x62,
+ 0xb7, 0x03, 0x00, 0x00,
+}
diff --git a/googleapis/firestore/v1/firestore.pb.go b/googleapis/firestore/v1/firestore.pb.go
index 90ea182..5b90c04 100644
--- a/googleapis/firestore/v1/firestore.pb.go
+++ b/googleapis/firestore/v1/firestore.pb.go
@@ -1015,7 +1015,8 @@
// This i-th write result corresponds to the i-th write in the
// request.
WriteResults []*WriteResult `protobuf:"bytes,1,rep,name=write_results,json=writeResults,proto3" json:"write_results,omitempty"`
- // The time at which the commit occurred.
+ // The time at which the commit occurred. Any read with an equal or greater
+ // `read_time` is guaranteed to see the effects of the commit.
CommitTime *timestamp.Timestamp `protobuf:"bytes,2,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
@@ -1457,7 +1458,8 @@
// This i-th write result corresponds to the i-th write in the
// request.
WriteResults []*WriteResult `protobuf:"bytes,3,rep,name=write_results,json=writeResults,proto3" json:"write_results,omitempty"`
- // The time at which the commit occurred.
+ // The time at which the commit occurred. Any read with an equal or greater
+ // `read_time` is guaranteed to see the effects of the write.
CommitTime *timestamp.Timestamp `protobuf:"bytes,4,opt,name=commit_time,json=commitTime,proto3" json:"commit_time,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
diff --git a/googleapis/firestore/v1/write.pb.go b/googleapis/firestore/v1/write.pb.go
index b23cb08..cb5c5ca 100644
--- a/googleapis/firestore/v1/write.pb.go
+++ b/googleapis/firestore/v1/write.pb.go
@@ -72,6 +72,12 @@
// deleted from the document on the server.
// The field paths in this mask must not contain a reserved field name.
UpdateMask *DocumentMask `protobuf:"bytes,3,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
+ // The transforms to perform after update.
+ //
+ // This field can be set only when the operation is `update`. If present, this
+ // write is equivalent to performing `update` and `transform` to the same
+ // document atomically and in order.
+ UpdateTransforms []*DocumentTransform_FieldTransform `protobuf:"bytes,7,rep,name=update_transforms,json=updateTransforms,proto3" json:"update_transforms,omitempty"`
// An optional precondition on the document.
//
// The write will fail if this is set and not met by the target document.
@@ -163,6 +169,13 @@
return nil
}
+func (m *Write) GetUpdateTransforms() []*DocumentTransform_FieldTransform {
+ if m != nil {
+ return m.UpdateTransforms
+ }
+ return nil
+}
+
func (m *Write) GetCurrentDocument() *Precondition {
if m != nil {
return m.CurrentDocument
@@ -713,59 +726,60 @@
func init() { proto.RegisterFile("google/firestore/v1/write.proto", fileDescriptor_5c77c99aae0973dc) }
var fileDescriptor_5c77c99aae0973dc = []byte{
- // 853 bytes of a gzipped FileDescriptorProto
+ // 867 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x41, 0x6f, 0xe3, 0x44,
- 0x14, 0x6e, 0xd2, 0x26, 0x5b, 0xbf, 0xa0, 0xd6, 0x3b, 0xcb, 0x6a, 0x4d, 0xd8, 0x6a, 0x43, 0x0e,
- 0xa8, 0x07, 0xe4, 0xa8, 0x45, 0xb0, 0x82, 0x85, 0x43, 0xd3, 0x3a, 0x6d, 0xa5, 0x2d, 0x0a, 0x4e,
- 0x1a, 0x04, 0xaa, 0x34, 0xcc, 0xda, 0x13, 0xd7, 0x5a, 0x7b, 0xc6, 0x9a, 0x19, 0x87, 0xdd, 0xdf,
- 0xc1, 0x85, 0x33, 0xe2, 0xc4, 0xbf, 0xe0, 0xca, 0x8d, 0x1f, 0xc1, 0xff, 0x40, 0x9e, 0xb1, 0xdd,
- 0x06, 0xa2, 0x6c, 0x59, 0xed, 0x2d, 0x6f, 0xde, 0xf7, 0x7d, 0xef, 0xf3, 0x7b, 0x33, 0x2f, 0xf0,
- 0x24, 0xe2, 0x3c, 0x4a, 0xe8, 0x60, 0x1e, 0x0b, 0x2a, 0x15, 0x17, 0x74, 0xb0, 0x38, 0x18, 0xfc,
- 0x24, 0x62, 0x45, 0xdd, 0x4c, 0x70, 0xc5, 0xd1, 0x03, 0x03, 0x70, 0x6b, 0x80, 0xbb, 0x38, 0xe8,
- 0xf6, 0x56, 0xb1, 0x02, 0x9e, 0xa6, 0x9c, 0x19, 0x5a, 0xb7, 0xbf, 0x0a, 0x11, 0xf2, 0x20, 0x4f,
- 0x29, 0x53, 0x25, 0xa6, 0xaa, 0xad, 0xa3, 0x17, 0xf9, 0x7c, 0xa0, 0xe2, 0x94, 0x4a, 0x45, 0xd2,
- 0xac, 0x04, 0x3c, 0x2e, 0x01, 0x24, 0x8b, 0x07, 0x84, 0x31, 0xae, 0x88, 0x8a, 0x39, 0x93, 0x26,
- 0xdb, 0xff, 0xa3, 0x09, 0xad, 0xef, 0x0a, 0xa7, 0xe8, 0x29, 0xb4, 0xf3, 0x2c, 0x24, 0x8a, 0x3a,
- 0x8d, 0x5e, 0x63, 0xbf, 0x73, 0xb8, 0xe7, 0xae, 0x30, 0xed, 0x9e, 0x94, 0xd5, 0xcf, 0x36, 0xfc,
- 0x12, 0x8e, 0x1c, 0x68, 0x87, 0x34, 0xa1, 0x8a, 0x3a, 0xcd, 0x5e, 0x63, 0xdf, 0x2a, 0x32, 0x26,
- 0x46, 0x23, 0xb0, 0x94, 0x20, 0x4c, 0xce, 0xb9, 0x48, 0x9d, 0xb6, 0x56, 0xfd, 0x78, 0xad, 0xea,
- 0xb4, 0x42, 0x9f, 0x6d, 0xf8, 0x37, 0x54, 0x34, 0x84, 0x8e, 0xa9, 0x85, 0x53, 0x22, 0x5f, 0x3a,
- 0x9b, 0x5a, 0xe9, 0xa3, 0xb5, 0x4a, 0x17, 0x44, 0xbe, 0xf4, 0xc1, 0xb0, 0x8a, 0xdf, 0xe8, 0x39,
- 0xd8, 0x41, 0x2e, 0x04, 0x65, 0x0a, 0x57, 0x1d, 0x74, 0xb6, 0xd6, 0x08, 0x8d, 0x05, 0x0d, 0x38,
- 0x0b, 0xe3, 0xa2, 0x59, 0xfe, 0x6e, 0x49, 0xad, 0xd4, 0x87, 0x1d, 0xb0, 0x78, 0x46, 0x85, 0x6e,
- 0x65, 0xff, 0xef, 0x16, 0xdc, 0xff, 0xcf, 0x17, 0xa0, 0x2e, 0x6c, 0xd7, 0x85, 0x8a, 0x8e, 0x5a,
- 0x7e, 0x1d, 0xa3, 0x1f, 0xc1, 0x9e, 0xc7, 0x34, 0x09, 0x71, 0xfd, 0x8d, 0xd2, 0x69, 0xf6, 0x36,
- 0xf7, 0x3b, 0x87, 0x9f, 0xdd, 0xad, 0x3f, 0xee, 0xa8, 0xa0, 0xd7, 0xa1, 0xbf, 0x3b, 0x5f, 0x8a,
- 0x65, 0xf7, 0xaf, 0x2d, 0xd8, 0x59, 0xc6, 0xa0, 0x3d, 0x00, 0x53, 0x34, 0x23, 0xea, 0xba, 0xb4,
- 0x64, 0xe9, 0x93, 0x31, 0x51, 0xd7, 0x48, 0xc0, 0x03, 0x49, 0x15, 0x56, 0x1c, 0x4b, 0x2a, 0x16,
- 0x54, 0xe0, 0x05, 0x49, 0x72, 0x33, 0xd3, 0x9d, 0xc3, 0xa3, 0xb7, 0xb2, 0xe5, 0x4e, 0xb4, 0xd2,
- 0xac, 0x10, 0x3a, 0xdb, 0xf0, 0x6d, 0x49, 0xd5, 0x94, 0xdf, 0x3a, 0x43, 0x5f, 0x82, 0x15, 0xb3,
- 0x40, 0x50, 0xdd, 0x24, 0x33, 0xd6, 0xee, 0xca, 0x4a, 0x95, 0xc4, 0x0d, 0x1c, 0x7d, 0x0e, 0xf7,
- 0x52, 0xf2, 0x2a, 0x4e, 0xf3, 0xb4, 0x9c, 0xe3, 0x7a, 0x66, 0x05, 0xd6, 0xbc, 0x98, 0x69, 0x5e,
- 0xeb, 0x4e, 0x3c, 0x03, 0x46, 0xdf, 0xc3, 0x23, 0x92, 0x65, 0x94, 0x85, 0x38, 0x8d, 0xa5, 0x8c,
- 0x59, 0x84, 0x69, 0xa2, 0x9d, 0xc8, 0xf2, 0x6a, 0x3f, 0x59, 0xa9, 0x73, 0x24, 0x04, 0x79, 0x5d,
- 0x89, 0x3d, 0x34, 0x0a, 0x17, 0x46, 0xc0, 0x2b, 0xf9, 0x68, 0x0a, 0x0f, 0x05, 0x4d, 0xf9, 0x82,
- 0x62, 0x92, 0x24, 0x78, 0x2e, 0x78, 0x8a, 0x49, 0x41, 0x73, 0xee, 0xdd, 0x55, 0x18, 0x19, 0xfe,
- 0x51, 0x92, 0x8c, 0x04, 0x4f, 0x75, 0xaa, 0xff, 0x35, 0x74, 0x6e, 0xf7, 0xfa, 0x31, 0x38, 0x13,
- 0xcf, 0x9f, 0x79, 0x3e, 0x9e, 0x1d, 0x3d, 0xbf, 0xf4, 0xf0, 0xe5, 0x37, 0x93, 0xb1, 0x77, 0x7c,
- 0x3e, 0x3a, 0xf7, 0x4e, 0xec, 0x0d, 0x64, 0xc3, 0x7b, 0xbe, 0xf7, 0xed, 0xa5, 0x37, 0x99, 0xe2,
- 0xe9, 0xf9, 0x85, 0x67, 0x37, 0x86, 0x36, 0xec, 0xd4, 0xb7, 0x13, 0xab, 0xd7, 0x19, 0xed, 0xff,
- 0xdc, 0x80, 0x8e, 0xde, 0x15, 0x3e, 0x95, 0x79, 0xa2, 0xd0, 0xb3, 0xfa, 0x59, 0x16, 0x3b, 0xa7,
- 0x5c, 0x1b, 0x75, 0x37, 0xab, 0x85, 0xe4, 0x4e, 0xab, 0x85, 0x54, 0xbd, 0xc7, 0xe2, 0x00, 0x9d,
- 0xc2, 0xfd, 0x1b, 0x79, 0xa1, 0x05, 0xab, 0x37, 0xb0, 0x66, 0x20, 0xbe, 0x5d, 0x93, 0x8c, 0x09,
- 0xd9, 0xff, 0xa5, 0x01, 0x3b, 0xd5, 0x45, 0x3c, 0xbe, 0x26, 0x2c, 0xa2, 0xe8, 0x8b, 0x7f, 0x3d,
- 0xbd, 0x37, 0x2d, 0xb3, 0x5b, 0x2f, 0x73, 0x0f, 0x40, 0x11, 0x11, 0x51, 0x85, 0xe3, 0x50, 0x3a,
- 0xad, 0xde, 0xe6, 0x7e, 0xcb, 0xb7, 0xcc, 0xc9, 0x79, 0x28, 0xd1, 0x27, 0x50, 0x76, 0x3a, 0xc4,
- 0xb7, 0x60, 0x6d, 0x0d, 0xb3, 0xcb, 0xcc, 0xb4, 0x42, 0x17, 0x0d, 0xab, 0xad, 0x9d, 0x98, 0x95,
- 0xb8, 0x6e, 0x2b, 0xfc, 0x2f, 0x71, 0xf4, 0x14, 0x2c, 0x41, 0x49, 0x68, 0x7a, 0xbf, 0xf5, 0xc6,
- 0xde, 0x6f, 0x17, 0xe0, 0x22, 0x5c, 0x72, 0xe5, 0x6b, 0xd5, 0xb7, 0x70, 0xd5, 0x7c, 0xd7, 0xae,
- 0x4e, 0x60, 0xd7, 0x7b, 0x15, 0x4b, 0x45, 0x59, 0x40, 0x47, 0x71, 0xa2, 0xa8, 0x40, 0x1f, 0x82,
- 0x55, 0x57, 0xd4, 0xb6, 0x5a, 0xfe, 0x76, 0x35, 0x0a, 0xf4, 0x3e, 0xb4, 0x02, 0x9e, 0x33, 0xa5,
- 0x17, 0x54, 0xcb, 0x37, 0xc1, 0xf0, 0xb7, 0x06, 0x3c, 0x0a, 0x78, 0xba, 0x6a, 0xda, 0x43, 0xd0,
- 0x77, 0x77, 0x5c, 0x98, 0x18, 0x37, 0x7e, 0xf8, 0xaa, 0x84, 0x44, 0x3c, 0x21, 0x2c, 0x72, 0xb9,
- 0x88, 0x06, 0x11, 0x65, 0xda, 0xe2, 0xc0, 0xa4, 0x48, 0x16, 0xcb, 0xa5, 0xbf, 0xde, 0x67, 0x75,
- 0xf0, 0x6b, 0x73, 0xeb, 0xf4, 0x78, 0x34, 0xf9, 0xbd, 0xf9, 0xc1, 0xa9, 0x51, 0x39, 0x4e, 0x78,
- 0x1e, 0xba, 0xa3, 0xba, 0xdc, 0xec, 0xe0, 0xcf, 0x2a, 0x77, 0xa5, 0x73, 0x57, 0x75, 0xee, 0x6a,
- 0x76, 0xf0, 0xa2, 0xad, 0xeb, 0x7c, 0xfa, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x69, 0x45, 0xd0,
- 0x55, 0x39, 0x08, 0x00, 0x00,
+ 0x14, 0x6e, 0xd2, 0x26, 0xad, 0x5f, 0x50, 0xeb, 0xce, 0xb2, 0x5a, 0x13, 0xb6, 0xda, 0x90, 0x03,
+ 0xea, 0x01, 0x39, 0x6a, 0x11, 0xac, 0x60, 0xe1, 0xd0, 0xb4, 0x4e, 0x5b, 0x69, 0x8b, 0x82, 0x93,
+ 0x06, 0x81, 0x2a, 0x99, 0x69, 0x3c, 0x71, 0x47, 0x6b, 0xcf, 0x58, 0x33, 0xe3, 0xb0, 0xfb, 0x3b,
+ 0xb8, 0x70, 0x46, 0x70, 0xe1, 0xa7, 0x70, 0xe3, 0x47, 0xf0, 0x3f, 0x90, 0x67, 0x6c, 0x37, 0x85,
+ 0x28, 0x5b, 0x0a, 0xb7, 0xbc, 0x79, 0xdf, 0xf7, 0xbd, 0xcf, 0xef, 0xcd, 0xbc, 0xc0, 0xb3, 0x88,
+ 0xf3, 0x28, 0x26, 0xbd, 0x19, 0x15, 0x44, 0x2a, 0x2e, 0x48, 0x6f, 0x7e, 0xd0, 0xfb, 0x41, 0x50,
+ 0x45, 0xdc, 0x54, 0x70, 0xc5, 0xd1, 0x23, 0x03, 0x70, 0x2b, 0x80, 0x3b, 0x3f, 0x68, 0x77, 0x96,
+ 0xb1, 0xa6, 0x3c, 0x49, 0x38, 0x33, 0xb4, 0x76, 0x77, 0x19, 0x22, 0xe4, 0xd3, 0x2c, 0x21, 0x4c,
+ 0x15, 0x98, 0xb2, 0xb6, 0x8e, 0xae, 0xb3, 0x59, 0x4f, 0xd1, 0x84, 0x48, 0x85, 0x93, 0xb4, 0x00,
+ 0x3c, 0x2d, 0x00, 0x38, 0xa5, 0x3d, 0xcc, 0x18, 0x57, 0x58, 0x51, 0xce, 0xa4, 0xc9, 0x76, 0x7f,
+ 0x5d, 0x87, 0xc6, 0x37, 0xb9, 0x53, 0xf4, 0x1c, 0x9a, 0x59, 0x1a, 0x62, 0x45, 0x9c, 0x5a, 0xa7,
+ 0xb6, 0xdf, 0x3a, 0xdc, 0x73, 0x97, 0x98, 0x76, 0x4f, 0x8a, 0xea, 0x67, 0x6b, 0x7e, 0x01, 0x47,
+ 0x0e, 0x34, 0x43, 0x12, 0x13, 0x45, 0x9c, 0x7a, 0xa7, 0xb6, 0x6f, 0xe5, 0x19, 0x13, 0xa3, 0x01,
+ 0x58, 0x4a, 0x60, 0x26, 0x67, 0x5c, 0x24, 0x4e, 0x53, 0xab, 0x7e, 0xb8, 0x52, 0x75, 0x5c, 0xa2,
+ 0xcf, 0xd6, 0xfc, 0x5b, 0x2a, 0xea, 0x43, 0xcb, 0xd4, 0x0a, 0x12, 0x2c, 0x5f, 0x39, 0xeb, 0x5a,
+ 0xe9, 0x83, 0x95, 0x4a, 0x17, 0x58, 0xbe, 0xf2, 0xc1, 0xb0, 0xf2, 0xdf, 0xe8, 0x1a, 0x76, 0x0b,
+ 0x8d, 0x4a, 0x57, 0x3a, 0x9b, 0x9d, 0xf5, 0xfd, 0xd6, 0xe1, 0x27, 0xf7, 0xf3, 0xe4, 0x0e, 0x28,
+ 0x89, 0xc3, 0x2a, 0xf4, 0x6d, 0xa3, 0x57, 0x1d, 0x48, 0xf4, 0x12, 0xec, 0x69, 0x26, 0x04, 0x61,
+ 0x2a, 0x28, 0xa7, 0xe4, 0x6c, 0xac, 0x30, 0x3b, 0x14, 0x64, 0xca, 0x59, 0x48, 0xf3, 0x81, 0xf8,
+ 0x3b, 0x05, 0xb5, 0xac, 0xdb, 0x6f, 0x81, 0xc5, 0x53, 0x22, 0xf4, 0xb8, 0xba, 0x7f, 0x36, 0x60,
+ 0xf7, 0x1f, 0x8e, 0x50, 0x1b, 0xb6, 0xaa, 0x42, 0xf9, 0xd4, 0x2c, 0xbf, 0x8a, 0xd1, 0xf7, 0x60,
+ 0xcf, 0x72, 0xc3, 0x8b, 0xdf, 0x5b, 0xff, 0x2f, 0xdf, 0xbb, 0x33, 0xbb, 0x13, 0xcb, 0xf6, 0x1f,
+ 0x1b, 0xb0, 0x7d, 0x17, 0x83, 0xf6, 0x00, 0x4c, 0xd1, 0x14, 0xab, 0x9b, 0xc2, 0x92, 0xa5, 0x4f,
+ 0x86, 0x58, 0xdd, 0x20, 0x01, 0x8f, 0x24, 0x51, 0x81, 0xe2, 0x81, 0x24, 0x62, 0x4e, 0x44, 0x30,
+ 0xc7, 0x71, 0x66, 0xee, 0xcd, 0xf6, 0xe1, 0xd1, 0x83, 0x6c, 0xb9, 0x23, 0xad, 0x34, 0xc9, 0x85,
+ 0xce, 0xd6, 0x7c, 0x5b, 0x12, 0x35, 0xe6, 0x0b, 0x67, 0xe8, 0x73, 0xb0, 0x28, 0x9b, 0x0a, 0xa2,
+ 0x9b, 0x64, 0xae, 0x4e, 0x7b, 0x69, 0xa5, 0x52, 0xe2, 0x16, 0x8e, 0x3e, 0x85, 0xcd, 0x04, 0xbf,
+ 0xa6, 0x49, 0x96, 0x14, 0x73, 0x5c, 0xcd, 0x2c, 0xc1, 0x9a, 0x47, 0x99, 0xe6, 0x35, 0xee, 0xc5,
+ 0x33, 0x60, 0xf4, 0x2d, 0x3c, 0xc1, 0x69, 0x4a, 0x58, 0x18, 0x24, 0x54, 0x4a, 0xca, 0xa2, 0x80,
+ 0xc4, 0xda, 0x89, 0x2c, 0x9e, 0xcf, 0xb3, 0xa5, 0x3a, 0x47, 0x42, 0xe0, 0x37, 0xa5, 0xd8, 0x63,
+ 0xa3, 0x70, 0x61, 0x04, 0xbc, 0x82, 0x8f, 0xc6, 0xf0, 0x58, 0x90, 0x84, 0xcf, 0x49, 0x80, 0xe3,
+ 0x38, 0x98, 0x09, 0x9e, 0x04, 0x38, 0xa7, 0x39, 0x9b, 0xf7, 0x15, 0x46, 0x86, 0x7f, 0x14, 0xc7,
+ 0x03, 0xc1, 0x13, 0x9d, 0xea, 0x7e, 0x09, 0xad, 0xc5, 0x5e, 0x3f, 0x05, 0x67, 0xe4, 0xf9, 0x13,
+ 0xcf, 0x0f, 0x26, 0x47, 0x2f, 0x2f, 0xbd, 0xe0, 0xf2, 0xab, 0xd1, 0xd0, 0x3b, 0x3e, 0x1f, 0x9c,
+ 0x7b, 0x27, 0xf6, 0x1a, 0xb2, 0xe1, 0x1d, 0xdf, 0xfb, 0xfa, 0xd2, 0x1b, 0x8d, 0x83, 0xf1, 0xf9,
+ 0x85, 0x67, 0xd7, 0xfa, 0x36, 0x6c, 0x57, 0xb7, 0x33, 0x50, 0x6f, 0x52, 0xd2, 0xfd, 0xb1, 0x06,
+ 0x2d, 0xbd, 0x8f, 0x7c, 0x22, 0xb3, 0x58, 0xa1, 0x17, 0xd5, 0xd3, 0xcf, 0xf7, 0x5a, 0xb1, 0x9a,
+ 0xaa, 0x6e, 0x96, 0x4b, 0xcf, 0x1d, 0x97, 0x4b, 0xaf, 0x7c, 0xf3, 0xf9, 0x01, 0x3a, 0x85, 0xdd,
+ 0x5b, 0x79, 0xa1, 0x05, 0xcb, 0x37, 0xb0, 0x62, 0x20, 0xbe, 0x5d, 0x91, 0x8c, 0x09, 0xd9, 0xfd,
+ 0xa9, 0x06, 0xdb, 0xe5, 0x45, 0x3c, 0xbe, 0xc1, 0x2c, 0x22, 0xe8, 0xb3, 0xbf, 0x3d, 0xbd, 0xb7,
+ 0x2d, 0xcc, 0x85, 0x97, 0xb9, 0x07, 0xa0, 0xb0, 0x88, 0x88, 0x0a, 0x68, 0x28, 0x9d, 0x46, 0x67,
+ 0x7d, 0xbf, 0xe1, 0x5b, 0xe6, 0xe4, 0x3c, 0x94, 0xe8, 0x23, 0x28, 0x3a, 0x1d, 0x06, 0x0b, 0xb0,
+ 0xa6, 0x86, 0xd9, 0x45, 0x66, 0x5c, 0xa2, 0xf3, 0x86, 0x55, 0xd6, 0x4e, 0xcc, 0xda, 0x5d, 0xb5,
+ 0x15, 0xfe, 0x95, 0x38, 0x7a, 0x0e, 0x96, 0x20, 0x38, 0x34, 0xbd, 0xdf, 0x78, 0x6b, 0xef, 0xb7,
+ 0x72, 0x70, 0x1e, 0xde, 0x71, 0xe5, 0x6b, 0xd5, 0x07, 0xb8, 0xaa, 0xff, 0xdf, 0xae, 0x4e, 0x60,
+ 0xc7, 0x7b, 0x4d, 0xa5, 0x22, 0x6c, 0x4a, 0x06, 0x34, 0x56, 0x44, 0xa0, 0xf7, 0xc1, 0xaa, 0x2a,
+ 0x6a, 0x5b, 0x0d, 0x7f, 0xab, 0x1c, 0x05, 0x7a, 0x17, 0x1a, 0x53, 0x9e, 0x31, 0xa5, 0x17, 0x54,
+ 0xc3, 0x37, 0x41, 0xff, 0x97, 0x1a, 0x3c, 0x99, 0xf2, 0x64, 0xd9, 0xb4, 0xfb, 0xa0, 0xef, 0xee,
+ 0x30, 0x37, 0x31, 0xac, 0x7d, 0xf7, 0x45, 0x01, 0x89, 0x78, 0x8c, 0x59, 0xe4, 0x72, 0x11, 0xf5,
+ 0x22, 0xc2, 0xb4, 0xc5, 0x9e, 0x49, 0xe1, 0x94, 0xca, 0x3b, 0x7f, 0xef, 0x2f, 0xaa, 0xe0, 0xe7,
+ 0xfa, 0xc6, 0xe9, 0xf1, 0x60, 0xf4, 0x5b, 0xfd, 0xbd, 0x53, 0xa3, 0x72, 0x1c, 0xf3, 0x2c, 0x74,
+ 0x07, 0x55, 0xb9, 0xc9, 0xc1, 0xef, 0x65, 0xee, 0x4a, 0xe7, 0xae, 0xaa, 0xdc, 0xd5, 0xe4, 0xe0,
+ 0xba, 0xa9, 0xeb, 0x7c, 0xfc, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0x17, 0x98, 0xa8, 0x43, 0x9d,
+ 0x08, 0x00, 0x00,
}