blob: 15a17f06ea1975a0bd725e739ee07c7d9e2b6def [file] [log] [blame]
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/cloud/bigquery/datatransfer/v1/datasource.proto
package datatransfer
import (
context "context"
fmt "fmt"
math "math"
proto "github.com/golang/protobuf/proto"
duration "github.com/golang/protobuf/ptypes/duration"
empty "github.com/golang/protobuf/ptypes/empty"
_ "github.com/golang/protobuf/ptypes/timestamp"
wrappers "github.com/golang/protobuf/ptypes/wrappers"
_ "google.golang.org/genproto/googleapis/api/annotations"
field_mask "google.golang.org/genproto/protobuf/field_mask"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Options for writing to the table.
// The WRITE_EMPTY option is intentionally excluded from the enum and is not
// supported by the data transfer service.
type WriteDisposition int32
const (
// The default writeDispostion
WriteDisposition_WRITE_DISPOSITION_UNSPECIFIED WriteDisposition = 0
// overwrites the table data.
WriteDisposition_WRITE_TRUNCATE WriteDisposition = 1
// the data is appended to the table.
// Note duplication might happen if this mode is used.
WriteDisposition_WRITE_APPEND WriteDisposition = 2
)
var WriteDisposition_name = map[int32]string{
0: "WRITE_DISPOSITION_UNSPECIFIED",
1: "WRITE_TRUNCATE",
2: "WRITE_APPEND",
}
var WriteDisposition_value = map[string]int32{
"WRITE_DISPOSITION_UNSPECIFIED": 0,
"WRITE_TRUNCATE": 1,
"WRITE_APPEND": 2,
}
func (x WriteDisposition) String() string {
return proto.EnumName(WriteDisposition_name, int32(x))
}
func (WriteDisposition) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0}
}
// Data format.
type ImportedDataInfo_Format int32
const (
// Unspecified format. In this case, we have to infer the format from the
// data source.
ImportedDataInfo_FORMAT_UNSPECIFIED ImportedDataInfo_Format = 0
// CSV format.
ImportedDataInfo_CSV ImportedDataInfo_Format = 1
// Newline-delimited JSON.
ImportedDataInfo_JSON ImportedDataInfo_Format = 2
// Avro format. See http://avro.apache.org .
ImportedDataInfo_AVRO ImportedDataInfo_Format = 3
// RecordIO.
ImportedDataInfo_RECORDIO ImportedDataInfo_Format = 4
// ColumnIO.
ImportedDataInfo_COLUMNIO ImportedDataInfo_Format = 5
// Capacitor.
ImportedDataInfo_CAPACITOR ImportedDataInfo_Format = 6
// Parquet format. See https://parquet.apache.org .
ImportedDataInfo_PARQUET ImportedDataInfo_Format = 7
// ORC format. See https://orc.apache.org .
ImportedDataInfo_ORC ImportedDataInfo_Format = 8
)
var ImportedDataInfo_Format_name = map[int32]string{
0: "FORMAT_UNSPECIFIED",
1: "CSV",
2: "JSON",
3: "AVRO",
4: "RECORDIO",
5: "COLUMNIO",
6: "CAPACITOR",
7: "PARQUET",
8: "ORC",
}
var ImportedDataInfo_Format_value = map[string]int32{
"FORMAT_UNSPECIFIED": 0,
"CSV": 1,
"JSON": 2,
"AVRO": 3,
"RECORDIO": 4,
"COLUMNIO": 5,
"CAPACITOR": 6,
"PARQUET": 7,
"ORC": 8,
}
func (x ImportedDataInfo_Format) String() string {
return proto.EnumName(ImportedDataInfo_Format_name, int32(x))
}
func (ImportedDataInfo_Format) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0, 0}
}
// Encoding of input data in CSV/JSON format.
type ImportedDataInfo_Encoding int32
const (
// Default encoding (UTF8).
ImportedDataInfo_ENCODING_UNSPECIFIED ImportedDataInfo_Encoding = 0
// ISO_8859_1 encoding.
ImportedDataInfo_ISO_8859_1 ImportedDataInfo_Encoding = 1
// UTF8 encoding.
ImportedDataInfo_UTF8 ImportedDataInfo_Encoding = 2
)
var ImportedDataInfo_Encoding_name = map[int32]string{
0: "ENCODING_UNSPECIFIED",
1: "ISO_8859_1",
2: "UTF8",
}
var ImportedDataInfo_Encoding_value = map[string]int32{
"ENCODING_UNSPECIFIED": 0,
"ISO_8859_1": 1,
"UTF8": 2,
}
func (x ImportedDataInfo_Encoding) String() string {
return proto.EnumName(ImportedDataInfo_Encoding_name, int32(x))
}
func (ImportedDataInfo_Encoding) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0, 1}
}
// LINT.IfChange
// Field type.
type ImportedDataInfo_FieldSchema_Type int32
const (
// Illegal value.
ImportedDataInfo_FieldSchema_TYPE_UNSPECIFIED ImportedDataInfo_FieldSchema_Type = 0
// 64K, UTF8.
ImportedDataInfo_FieldSchema_STRING ImportedDataInfo_FieldSchema_Type = 1
// 64-bit signed.
ImportedDataInfo_FieldSchema_INTEGER ImportedDataInfo_FieldSchema_Type = 2
// 64-bit IEEE floating point.
ImportedDataInfo_FieldSchema_FLOAT ImportedDataInfo_FieldSchema_Type = 3
// Aggregate type.
ImportedDataInfo_FieldSchema_RECORD ImportedDataInfo_FieldSchema_Type = 4
// 64K, Binary.
ImportedDataInfo_FieldSchema_BYTES ImportedDataInfo_FieldSchema_Type = 5
// 2-valued.
ImportedDataInfo_FieldSchema_BOOLEAN ImportedDataInfo_FieldSchema_Type = 6
// 64-bit signed usec since UTC epoch.
ImportedDataInfo_FieldSchema_TIMESTAMP ImportedDataInfo_FieldSchema_Type = 7
// Civil date - Year, Month, Day.
ImportedDataInfo_FieldSchema_DATE ImportedDataInfo_FieldSchema_Type = 8
// Civil time - Hour, Minute, Second, Microseconds.
ImportedDataInfo_FieldSchema_TIME ImportedDataInfo_FieldSchema_Type = 9
// Combination of civil date and civil time.
ImportedDataInfo_FieldSchema_DATETIME ImportedDataInfo_FieldSchema_Type = 10
// Numeric type with 38 decimal digits of precision and 9 decimal digits
// of scale.
ImportedDataInfo_FieldSchema_NUMERIC ImportedDataInfo_FieldSchema_Type = 11
// Geography object (go/googlesql_geography).
ImportedDataInfo_FieldSchema_GEOGRAPHY ImportedDataInfo_FieldSchema_Type = 12
)
var ImportedDataInfo_FieldSchema_Type_name = map[int32]string{
0: "TYPE_UNSPECIFIED",
1: "STRING",
2: "INTEGER",
3: "FLOAT",
4: "RECORD",
5: "BYTES",
6: "BOOLEAN",
7: "TIMESTAMP",
8: "DATE",
9: "TIME",
10: "DATETIME",
11: "NUMERIC",
12: "GEOGRAPHY",
}
var ImportedDataInfo_FieldSchema_Type_value = map[string]int32{
"TYPE_UNSPECIFIED": 0,
"STRING": 1,
"INTEGER": 2,
"FLOAT": 3,
"RECORD": 4,
"BYTES": 5,
"BOOLEAN": 6,
"TIMESTAMP": 7,
"DATE": 8,
"TIME": 9,
"DATETIME": 10,
"NUMERIC": 11,
"GEOGRAPHY": 12,
}
func (x ImportedDataInfo_FieldSchema_Type) String() string {
return proto.EnumName(ImportedDataInfo_FieldSchema_Type_name, int32(x))
}
func (ImportedDataInfo_FieldSchema_Type) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0, 0, 0}
}
// Describes data which should be imported.
type ImportedDataInfo struct {
// SQL query to run. When empty, API checks that there is only one
// table_def specified and loads this table. Only Standard SQL queries
// are accepted. Legacy SQL is not allowed.
Sql string `protobuf:"bytes,1,opt,name=sql,proto3" json:"sql,omitempty"`
// Table where results should be written.
DestinationTableId string `protobuf:"bytes,2,opt,name=destination_table_id,json=destinationTableId,proto3" json:"destination_table_id,omitempty"`
// The description of a destination table. This can be several sentences
// or paragraphs describing the table contents in detail.
DestinationTableDescription string `protobuf:"bytes,10,opt,name=destination_table_description,json=destinationTableDescription,proto3" json:"destination_table_description,omitempty"`
// When used WITHOUT the "sql" parameter, describes the schema of the
// destination table.
// When used WITH the "sql" parameter, describes tables with data stored
// outside of BigQuery.
TableDefs []*ImportedDataInfo_TableDefinition `protobuf:"bytes,3,rep,name=table_defs,json=tableDefs,proto3" json:"table_defs,omitempty"`
// Inline code for User-defined function resources.
// Ignored when "sql" parameter is empty.
UserDefinedFunctions []string `protobuf:"bytes,4,rep,name=user_defined_functions,json=userDefinedFunctions,proto3" json:"user_defined_functions,omitempty"`
// Specifies the action if the destination table already exists.
WriteDisposition WriteDisposition `protobuf:"varint,6,opt,name=write_disposition,json=writeDisposition,proto3,enum=google.cloud.bigquery.datatransfer.v1.WriteDisposition" json:"write_disposition,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ImportedDataInfo) Reset() { *m = ImportedDataInfo{} }
func (m *ImportedDataInfo) String() string { return proto.CompactTextString(m) }
func (*ImportedDataInfo) ProtoMessage() {}
func (*ImportedDataInfo) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0}
}
func (m *ImportedDataInfo) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ImportedDataInfo.Unmarshal(m, b)
}
func (m *ImportedDataInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ImportedDataInfo.Marshal(b, m, deterministic)
}
func (m *ImportedDataInfo) XXX_Merge(src proto.Message) {
xxx_messageInfo_ImportedDataInfo.Merge(m, src)
}
func (m *ImportedDataInfo) XXX_Size() int {
return xxx_messageInfo_ImportedDataInfo.Size(m)
}
func (m *ImportedDataInfo) XXX_DiscardUnknown() {
xxx_messageInfo_ImportedDataInfo.DiscardUnknown(m)
}
var xxx_messageInfo_ImportedDataInfo proto.InternalMessageInfo
func (m *ImportedDataInfo) GetSql() string {
if m != nil {
return m.Sql
}
return ""
}
func (m *ImportedDataInfo) GetDestinationTableId() string {
if m != nil {
return m.DestinationTableId
}
return ""
}
func (m *ImportedDataInfo) GetDestinationTableDescription() string {
if m != nil {
return m.DestinationTableDescription
}
return ""
}
func (m *ImportedDataInfo) GetTableDefs() []*ImportedDataInfo_TableDefinition {
if m != nil {
return m.TableDefs
}
return nil
}
func (m *ImportedDataInfo) GetUserDefinedFunctions() []string {
if m != nil {
return m.UserDefinedFunctions
}
return nil
}
func (m *ImportedDataInfo) GetWriteDisposition() WriteDisposition {
if m != nil {
return m.WriteDisposition
}
return WriteDisposition_WRITE_DISPOSITION_UNSPECIFIED
}
// Defines schema of a field in the imported data.
type ImportedDataInfo_FieldSchema struct {
// Field name. Matches: [A-Za-z_][A-Za-z_0-9]{0,127}
FieldName string `protobuf:"bytes,1,opt,name=field_name,json=fieldName,proto3" json:"field_name,omitempty"`
// Field type
Type ImportedDataInfo_FieldSchema_Type `protobuf:"varint,2,opt,name=type,proto3,enum=google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_FieldSchema_Type" json:"type,omitempty"`
// Is field repeated.
IsRepeated bool `protobuf:"varint,3,opt,name=is_repeated,json=isRepeated,proto3" json:"is_repeated,omitempty"`
// Description for this field.
Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"`
// Present iff type == RECORD.
Schema *ImportedDataInfo_RecordSchema `protobuf:"bytes,5,opt,name=schema,proto3" json:"schema,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ImportedDataInfo_FieldSchema) Reset() { *m = ImportedDataInfo_FieldSchema{} }
func (m *ImportedDataInfo_FieldSchema) String() string { return proto.CompactTextString(m) }
func (*ImportedDataInfo_FieldSchema) ProtoMessage() {}
func (*ImportedDataInfo_FieldSchema) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0, 0}
}
func (m *ImportedDataInfo_FieldSchema) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ImportedDataInfo_FieldSchema.Unmarshal(m, b)
}
func (m *ImportedDataInfo_FieldSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ImportedDataInfo_FieldSchema.Marshal(b, m, deterministic)
}
func (m *ImportedDataInfo_FieldSchema) XXX_Merge(src proto.Message) {
xxx_messageInfo_ImportedDataInfo_FieldSchema.Merge(m, src)
}
func (m *ImportedDataInfo_FieldSchema) XXX_Size() int {
return xxx_messageInfo_ImportedDataInfo_FieldSchema.Size(m)
}
func (m *ImportedDataInfo_FieldSchema) XXX_DiscardUnknown() {
xxx_messageInfo_ImportedDataInfo_FieldSchema.DiscardUnknown(m)
}
var xxx_messageInfo_ImportedDataInfo_FieldSchema proto.InternalMessageInfo
func (m *ImportedDataInfo_FieldSchema) GetFieldName() string {
if m != nil {
return m.FieldName
}
return ""
}
func (m *ImportedDataInfo_FieldSchema) GetType() ImportedDataInfo_FieldSchema_Type {
if m != nil {
return m.Type
}
return ImportedDataInfo_FieldSchema_TYPE_UNSPECIFIED
}
func (m *ImportedDataInfo_FieldSchema) GetIsRepeated() bool {
if m != nil {
return m.IsRepeated
}
return false
}
func (m *ImportedDataInfo_FieldSchema) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *ImportedDataInfo_FieldSchema) GetSchema() *ImportedDataInfo_RecordSchema {
if m != nil {
return m.Schema
}
return nil
}
// Describes schema of the data to be ingested.
type ImportedDataInfo_RecordSchema struct {
// One field per column in the record.
Fields []*ImportedDataInfo_FieldSchema `protobuf:"bytes,1,rep,name=fields,proto3" json:"fields,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ImportedDataInfo_RecordSchema) Reset() { *m = ImportedDataInfo_RecordSchema{} }
func (m *ImportedDataInfo_RecordSchema) String() string { return proto.CompactTextString(m) }
func (*ImportedDataInfo_RecordSchema) ProtoMessage() {}
func (*ImportedDataInfo_RecordSchema) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0, 1}
}
func (m *ImportedDataInfo_RecordSchema) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ImportedDataInfo_RecordSchema.Unmarshal(m, b)
}
func (m *ImportedDataInfo_RecordSchema) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ImportedDataInfo_RecordSchema.Marshal(b, m, deterministic)
}
func (m *ImportedDataInfo_RecordSchema) XXX_Merge(src proto.Message) {
xxx_messageInfo_ImportedDataInfo_RecordSchema.Merge(m, src)
}
func (m *ImportedDataInfo_RecordSchema) XXX_Size() int {
return xxx_messageInfo_ImportedDataInfo_RecordSchema.Size(m)
}
func (m *ImportedDataInfo_RecordSchema) XXX_DiscardUnknown() {
xxx_messageInfo_ImportedDataInfo_RecordSchema.DiscardUnknown(m)
}
var xxx_messageInfo_ImportedDataInfo_RecordSchema proto.InternalMessageInfo
func (m *ImportedDataInfo_RecordSchema) GetFields() []*ImportedDataInfo_FieldSchema {
if m != nil {
return m.Fields
}
return nil
}
// External table definition. These tables can be referenced with 'name'
// in the query and can be read just like any other table.
type ImportedDataInfo_TableDefinition struct {
// BigQuery table_id (required). This will be used to reference this
// table in the query.
TableId string `protobuf:"bytes,1,opt,name=table_id,json=tableId,proto3" json:"table_id,omitempty"`
// URIs for the data to be imported. All URIs must be from the same storage
// system.
SourceUris []string `protobuf:"bytes,2,rep,name=source_uris,json=sourceUris,proto3" json:"source_uris,omitempty"`
// Describes the format of the data in source_uri.
Format ImportedDataInfo_Format `protobuf:"varint,3,opt,name=format,proto3,enum=google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Format" json:"format,omitempty"`
// Specify the maximum number of bad records that can be ignored.
// If bad records exceed this threshold the query is aborted.
MaxBadRecords int32 `protobuf:"varint,4,opt,name=max_bad_records,json=maxBadRecords,proto3" json:"max_bad_records,omitempty"`
// Character encoding of the input when applicable (CSV, JSON).
// Defaults to UTF8.
Encoding ImportedDataInfo_Encoding `protobuf:"varint,5,opt,name=encoding,proto3,enum=google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Encoding" json:"encoding,omitempty"`
// CSV specific options.
CsvOptions *ImportedDataInfo_TableDefinition_CsvOptions `protobuf:"bytes,6,opt,name=csv_options,json=csvOptions,proto3" json:"csv_options,omitempty"`
// Optional schema for the data. When not specified for JSON and CSV formats
// we will try to detect it automatically.
Schema *ImportedDataInfo_RecordSchema `protobuf:"bytes,7,opt,name=schema,proto3" json:"schema,omitempty"`
// Indicates if extra values that are not represented in the table schema is
// allowed.
IgnoreUnknownValues *wrappers.BoolValue `protobuf:"bytes,10,opt,name=ignore_unknown_values,json=ignoreUnknownValues,proto3" json:"ignore_unknown_values,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ImportedDataInfo_TableDefinition) Reset() { *m = ImportedDataInfo_TableDefinition{} }
func (m *ImportedDataInfo_TableDefinition) String() string { return proto.CompactTextString(m) }
func (*ImportedDataInfo_TableDefinition) ProtoMessage() {}
func (*ImportedDataInfo_TableDefinition) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0, 2}
}
func (m *ImportedDataInfo_TableDefinition) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ImportedDataInfo_TableDefinition.Unmarshal(m, b)
}
func (m *ImportedDataInfo_TableDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ImportedDataInfo_TableDefinition.Marshal(b, m, deterministic)
}
func (m *ImportedDataInfo_TableDefinition) XXX_Merge(src proto.Message) {
xxx_messageInfo_ImportedDataInfo_TableDefinition.Merge(m, src)
}
func (m *ImportedDataInfo_TableDefinition) XXX_Size() int {
return xxx_messageInfo_ImportedDataInfo_TableDefinition.Size(m)
}
func (m *ImportedDataInfo_TableDefinition) XXX_DiscardUnknown() {
xxx_messageInfo_ImportedDataInfo_TableDefinition.DiscardUnknown(m)
}
var xxx_messageInfo_ImportedDataInfo_TableDefinition proto.InternalMessageInfo
func (m *ImportedDataInfo_TableDefinition) GetTableId() string {
if m != nil {
return m.TableId
}
return ""
}
func (m *ImportedDataInfo_TableDefinition) GetSourceUris() []string {
if m != nil {
return m.SourceUris
}
return nil
}
func (m *ImportedDataInfo_TableDefinition) GetFormat() ImportedDataInfo_Format {
if m != nil {
return m.Format
}
return ImportedDataInfo_FORMAT_UNSPECIFIED
}
func (m *ImportedDataInfo_TableDefinition) GetMaxBadRecords() int32 {
if m != nil {
return m.MaxBadRecords
}
return 0
}
func (m *ImportedDataInfo_TableDefinition) GetEncoding() ImportedDataInfo_Encoding {
if m != nil {
return m.Encoding
}
return ImportedDataInfo_ENCODING_UNSPECIFIED
}
func (m *ImportedDataInfo_TableDefinition) GetCsvOptions() *ImportedDataInfo_TableDefinition_CsvOptions {
if m != nil {
return m.CsvOptions
}
return nil
}
func (m *ImportedDataInfo_TableDefinition) GetSchema() *ImportedDataInfo_RecordSchema {
if m != nil {
return m.Schema
}
return nil
}
func (m *ImportedDataInfo_TableDefinition) GetIgnoreUnknownValues() *wrappers.BoolValue {
if m != nil {
return m.IgnoreUnknownValues
}
return nil
}
// CSV specific options.
type ImportedDataInfo_TableDefinition_CsvOptions struct {
// The delimiter. We currently restrict this to U+0001 to U+00FF and
// apply additional constraints during validation.
FieldDelimiter *wrappers.StringValue `protobuf:"bytes,1,opt,name=field_delimiter,json=fieldDelimiter,proto3" json:"field_delimiter,omitempty"`
// Whether CSV files are allowed to have quoted newlines. If quoted
// newlines are allowed, we can't split CSV files.
AllowQuotedNewlines *wrappers.BoolValue `protobuf:"bytes,2,opt,name=allow_quoted_newlines,json=allowQuotedNewlines,proto3" json:"allow_quoted_newlines,omitempty"`
// The quote character. We currently restrict this to U+0000 to U+00FF
// and apply additional constraints during validation. Set to '\0' to
// indicate no quote is used.
QuoteChar *wrappers.StringValue `protobuf:"bytes,3,opt,name=quote_char,json=quoteChar,proto3" json:"quote_char,omitempty"`
// Number of leading rows to skip.
SkipLeadingRows *wrappers.Int64Value `protobuf:"bytes,4,opt,name=skip_leading_rows,json=skipLeadingRows,proto3" json:"skip_leading_rows,omitempty"`
// Accept rows that are missing trailing optional columns.
AllowJaggedRows *wrappers.BoolValue `protobuf:"bytes,5,opt,name=allow_jagged_rows,json=allowJaggedRows,proto3" json:"allow_jagged_rows,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) Reset() {
*m = ImportedDataInfo_TableDefinition_CsvOptions{}
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) String() string {
return proto.CompactTextString(m)
}
func (*ImportedDataInfo_TableDefinition_CsvOptions) ProtoMessage() {}
func (*ImportedDataInfo_TableDefinition_CsvOptions) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{0, 2, 0}
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Unmarshal(m, b)
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Marshal(b, m, deterministic)
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Merge(src proto.Message) {
xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Merge(m, src)
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_Size() int {
return xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.Size(m)
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) XXX_DiscardUnknown() {
xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions.DiscardUnknown(m)
}
var xxx_messageInfo_ImportedDataInfo_TableDefinition_CsvOptions proto.InternalMessageInfo
func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetFieldDelimiter() *wrappers.StringValue {
if m != nil {
return m.FieldDelimiter
}
return nil
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetAllowQuotedNewlines() *wrappers.BoolValue {
if m != nil {
return m.AllowQuotedNewlines
}
return nil
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetQuoteChar() *wrappers.StringValue {
if m != nil {
return m.QuoteChar
}
return nil
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetSkipLeadingRows() *wrappers.Int64Value {
if m != nil {
return m.SkipLeadingRows
}
return nil
}
func (m *ImportedDataInfo_TableDefinition_CsvOptions) GetAllowJaggedRows() *wrappers.BoolValue {
if m != nil {
return m.AllowJaggedRows
}
return nil
}
// A request to update a transfer run.
type UpdateTransferRunRequest struct {
// Run name must be set and correspond to an already existing run. Only
// state, error_status, and data_version fields will be updated. All other
// fields will be ignored.
TransferRun *TransferRun `protobuf:"bytes,1,opt,name=transfer_run,json=transferRun,proto3" json:"transfer_run,omitempty"`
// Required list of fields to be updated in this request.
UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *UpdateTransferRunRequest) Reset() { *m = UpdateTransferRunRequest{} }
func (m *UpdateTransferRunRequest) String() string { return proto.CompactTextString(m) }
func (*UpdateTransferRunRequest) ProtoMessage() {}
func (*UpdateTransferRunRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{1}
}
func (m *UpdateTransferRunRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_UpdateTransferRunRequest.Unmarshal(m, b)
}
func (m *UpdateTransferRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_UpdateTransferRunRequest.Marshal(b, m, deterministic)
}
func (m *UpdateTransferRunRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_UpdateTransferRunRequest.Merge(m, src)
}
func (m *UpdateTransferRunRequest) XXX_Size() int {
return xxx_messageInfo_UpdateTransferRunRequest.Size(m)
}
func (m *UpdateTransferRunRequest) XXX_DiscardUnknown() {
xxx_messageInfo_UpdateTransferRunRequest.DiscardUnknown(m)
}
var xxx_messageInfo_UpdateTransferRunRequest proto.InternalMessageInfo
func (m *UpdateTransferRunRequest) GetTransferRun() *TransferRun {
if m != nil {
return m.TransferRun
}
return nil
}
func (m *UpdateTransferRunRequest) GetUpdateMask() *field_mask.FieldMask {
if m != nil {
return m.UpdateMask
}
return nil
}
// A request to add transfer status messages to the run.
type LogTransferRunMessagesRequest struct {
// Name of the resource in the form:
// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Messages to append.
TransferMessages []*TransferMessage `protobuf:"bytes,2,rep,name=transfer_messages,json=transferMessages,proto3" json:"transfer_messages,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *LogTransferRunMessagesRequest) Reset() { *m = LogTransferRunMessagesRequest{} }
func (m *LogTransferRunMessagesRequest) String() string { return proto.CompactTextString(m) }
func (*LogTransferRunMessagesRequest) ProtoMessage() {}
func (*LogTransferRunMessagesRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{2}
}
func (m *LogTransferRunMessagesRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_LogTransferRunMessagesRequest.Unmarshal(m, b)
}
func (m *LogTransferRunMessagesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_LogTransferRunMessagesRequest.Marshal(b, m, deterministic)
}
func (m *LogTransferRunMessagesRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_LogTransferRunMessagesRequest.Merge(m, src)
}
func (m *LogTransferRunMessagesRequest) XXX_Size() int {
return xxx_messageInfo_LogTransferRunMessagesRequest.Size(m)
}
func (m *LogTransferRunMessagesRequest) XXX_DiscardUnknown() {
xxx_messageInfo_LogTransferRunMessagesRequest.DiscardUnknown(m)
}
var xxx_messageInfo_LogTransferRunMessagesRequest proto.InternalMessageInfo
func (m *LogTransferRunMessagesRequest) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *LogTransferRunMessagesRequest) GetTransferMessages() []*TransferMessage {
if m != nil {
return m.TransferMessages
}
return nil
}
// A request to start and monitor a BigQuery load job.
type StartBigQueryJobsRequest struct {
// Name of the resource in the form:
// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Import jobs which should be started and monitored.
ImportedData []*ImportedDataInfo `protobuf:"bytes,2,rep,name=imported_data,json=importedData,proto3" json:"imported_data,omitempty"`
// User credentials which should be used to start/monitor
// BigQuery jobs. If not specified, then jobs
// are started using data source service account credentials.
// This may be OAuth token or JWT token.
UserCredentials []byte `protobuf:"bytes,3,opt,name=user_credentials,json=userCredentials,proto3" json:"user_credentials,omitempty"`
// The number of BQ Jobs that can run in parallel.
MaxParallelism int32 `protobuf:"varint,8,opt,name=max_parallelism,json=maxParallelism,proto3" json:"max_parallelism,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *StartBigQueryJobsRequest) Reset() { *m = StartBigQueryJobsRequest{} }
func (m *StartBigQueryJobsRequest) String() string { return proto.CompactTextString(m) }
func (*StartBigQueryJobsRequest) ProtoMessage() {}
func (*StartBigQueryJobsRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{3}
}
func (m *StartBigQueryJobsRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_StartBigQueryJobsRequest.Unmarshal(m, b)
}
func (m *StartBigQueryJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_StartBigQueryJobsRequest.Marshal(b, m, deterministic)
}
func (m *StartBigQueryJobsRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_StartBigQueryJobsRequest.Merge(m, src)
}
func (m *StartBigQueryJobsRequest) XXX_Size() int {
return xxx_messageInfo_StartBigQueryJobsRequest.Size(m)
}
func (m *StartBigQueryJobsRequest) XXX_DiscardUnknown() {
xxx_messageInfo_StartBigQueryJobsRequest.DiscardUnknown(m)
}
var xxx_messageInfo_StartBigQueryJobsRequest proto.InternalMessageInfo
func (m *StartBigQueryJobsRequest) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *StartBigQueryJobsRequest) GetImportedData() []*ImportedDataInfo {
if m != nil {
return m.ImportedData
}
return nil
}
func (m *StartBigQueryJobsRequest) GetUserCredentials() []byte {
if m != nil {
return m.UserCredentials
}
return nil
}
func (m *StartBigQueryJobsRequest) GetMaxParallelism() int32 {
if m != nil {
return m.MaxParallelism
}
return 0
}
// A request to finish a run.
type FinishRunRequest struct {
// Name of the resource in the form:
// "projects/{project_id}/locations/{location_id}/transferConfigs/{config_id}/runs/{run_id}"
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *FinishRunRequest) Reset() { *m = FinishRunRequest{} }
func (m *FinishRunRequest) String() string { return proto.CompactTextString(m) }
func (*FinishRunRequest) ProtoMessage() {}
func (*FinishRunRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{4}
}
func (m *FinishRunRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_FinishRunRequest.Unmarshal(m, b)
}
func (m *FinishRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_FinishRunRequest.Marshal(b, m, deterministic)
}
func (m *FinishRunRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_FinishRunRequest.Merge(m, src)
}
func (m *FinishRunRequest) XXX_Size() int {
return xxx_messageInfo_FinishRunRequest.Size(m)
}
func (m *FinishRunRequest) XXX_DiscardUnknown() {
xxx_messageInfo_FinishRunRequest.DiscardUnknown(m)
}
var xxx_messageInfo_FinishRunRequest proto.InternalMessageInfo
func (m *FinishRunRequest) GetName() string {
if m != nil {
return m.Name
}
return ""
}
// Represents the request of the CreateDataSourceDefinition method.
type CreateDataSourceDefinitionRequest struct {
// The BigQuery project id for which data source definition is associated.
// Must be in the form: `projects/{project_id}/locations/{location_id}`
Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
// Data source definition.
DataSourceDefinition *DataSourceDefinition `protobuf:"bytes,2,opt,name=data_source_definition,json=dataSourceDefinition,proto3" json:"data_source_definition,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *CreateDataSourceDefinitionRequest) Reset() { *m = CreateDataSourceDefinitionRequest{} }
func (m *CreateDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
func (*CreateDataSourceDefinitionRequest) ProtoMessage() {}
func (*CreateDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{5}
}
func (m *CreateDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CreateDataSourceDefinitionRequest.Unmarshal(m, b)
}
func (m *CreateDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CreateDataSourceDefinitionRequest.Marshal(b, m, deterministic)
}
func (m *CreateDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_CreateDataSourceDefinitionRequest.Merge(m, src)
}
func (m *CreateDataSourceDefinitionRequest) XXX_Size() int {
return xxx_messageInfo_CreateDataSourceDefinitionRequest.Size(m)
}
func (m *CreateDataSourceDefinitionRequest) XXX_DiscardUnknown() {
xxx_messageInfo_CreateDataSourceDefinitionRequest.DiscardUnknown(m)
}
var xxx_messageInfo_CreateDataSourceDefinitionRequest proto.InternalMessageInfo
func (m *CreateDataSourceDefinitionRequest) GetParent() string {
if m != nil {
return m.Parent
}
return ""
}
func (m *CreateDataSourceDefinitionRequest) GetDataSourceDefinition() *DataSourceDefinition {
if m != nil {
return m.DataSourceDefinition
}
return nil
}
// Represents the request of the UpdateDataSourceDefinition method.
type UpdateDataSourceDefinitionRequest struct {
// Data source definition.
DataSourceDefinition *DataSourceDefinition `protobuf:"bytes,1,opt,name=data_source_definition,json=dataSourceDefinition,proto3" json:"data_source_definition,omitempty"`
// Update field mask.
UpdateMask *field_mask.FieldMask `protobuf:"bytes,2,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *UpdateDataSourceDefinitionRequest) Reset() { *m = UpdateDataSourceDefinitionRequest{} }
func (m *UpdateDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
func (*UpdateDataSourceDefinitionRequest) ProtoMessage() {}
func (*UpdateDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{6}
}
func (m *UpdateDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_UpdateDataSourceDefinitionRequest.Unmarshal(m, b)
}
func (m *UpdateDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_UpdateDataSourceDefinitionRequest.Marshal(b, m, deterministic)
}
func (m *UpdateDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_UpdateDataSourceDefinitionRequest.Merge(m, src)
}
func (m *UpdateDataSourceDefinitionRequest) XXX_Size() int {
return xxx_messageInfo_UpdateDataSourceDefinitionRequest.Size(m)
}
func (m *UpdateDataSourceDefinitionRequest) XXX_DiscardUnknown() {
xxx_messageInfo_UpdateDataSourceDefinitionRequest.DiscardUnknown(m)
}
var xxx_messageInfo_UpdateDataSourceDefinitionRequest proto.InternalMessageInfo
func (m *UpdateDataSourceDefinitionRequest) GetDataSourceDefinition() *DataSourceDefinition {
if m != nil {
return m.DataSourceDefinition
}
return nil
}
func (m *UpdateDataSourceDefinitionRequest) GetUpdateMask() *field_mask.FieldMask {
if m != nil {
return m.UpdateMask
}
return nil
}
// Represents the request of the DeleteDataSourceDefinition method. All transfer
// configs associated with the data source must be deleted first, before the
// data source can be deleted.
type DeleteDataSourceDefinitionRequest struct {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/locations/{location_id}/dataSourceDefinitions/{data_source_id}`
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *DeleteDataSourceDefinitionRequest) Reset() { *m = DeleteDataSourceDefinitionRequest{} }
func (m *DeleteDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
func (*DeleteDataSourceDefinitionRequest) ProtoMessage() {}
func (*DeleteDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{7}
}
func (m *DeleteDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_DeleteDataSourceDefinitionRequest.Unmarshal(m, b)
}
func (m *DeleteDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_DeleteDataSourceDefinitionRequest.Marshal(b, m, deterministic)
}
func (m *DeleteDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_DeleteDataSourceDefinitionRequest.Merge(m, src)
}
func (m *DeleteDataSourceDefinitionRequest) XXX_Size() int {
return xxx_messageInfo_DeleteDataSourceDefinitionRequest.Size(m)
}
func (m *DeleteDataSourceDefinitionRequest) XXX_DiscardUnknown() {
xxx_messageInfo_DeleteDataSourceDefinitionRequest.DiscardUnknown(m)
}
var xxx_messageInfo_DeleteDataSourceDefinitionRequest proto.InternalMessageInfo
func (m *DeleteDataSourceDefinitionRequest) GetName() string {
if m != nil {
return m.Name
}
return ""
}
// Represents the request of the GetDataSourceDefinition method.
type GetDataSourceDefinitionRequest struct {
// The field will contain name of the resource requested.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetDataSourceDefinitionRequest) Reset() { *m = GetDataSourceDefinitionRequest{} }
func (m *GetDataSourceDefinitionRequest) String() string { return proto.CompactTextString(m) }
func (*GetDataSourceDefinitionRequest) ProtoMessage() {}
func (*GetDataSourceDefinitionRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{8}
}
func (m *GetDataSourceDefinitionRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetDataSourceDefinitionRequest.Unmarshal(m, b)
}
func (m *GetDataSourceDefinitionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetDataSourceDefinitionRequest.Marshal(b, m, deterministic)
}
func (m *GetDataSourceDefinitionRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetDataSourceDefinitionRequest.Merge(m, src)
}
func (m *GetDataSourceDefinitionRequest) XXX_Size() int {
return xxx_messageInfo_GetDataSourceDefinitionRequest.Size(m)
}
func (m *GetDataSourceDefinitionRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetDataSourceDefinitionRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetDataSourceDefinitionRequest proto.InternalMessageInfo
func (m *GetDataSourceDefinitionRequest) GetName() string {
if m != nil {
return m.Name
}
return ""
}
// Represents the request of the ListDataSourceDefinitions method.
type ListDataSourceDefinitionsRequest struct {
// The BigQuery project id for which data sources should be returned.
// Must be in the form: `projects/{project_id}/locations/{location_id}`
Parent string `protobuf:"bytes,1,opt,name=parent,proto3" json:"parent,omitempty"`
// Pagination token, which can be used to request a specific page
// of `ListDataSourceDefinitionsRequest` list results. For multiple-page
// results, `ListDataSourceDefinitionsResponse` outputs a `next_page` token,
// which can be used as the `page_token` value to request the next page of
// the list results.
PageToken string `protobuf:"bytes,2,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
// Page size. The default page size is the maximum value of 1000 results.
PageSize int32 `protobuf:"varint,3,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListDataSourceDefinitionsRequest) Reset() { *m = ListDataSourceDefinitionsRequest{} }
func (m *ListDataSourceDefinitionsRequest) String() string { return proto.CompactTextString(m) }
func (*ListDataSourceDefinitionsRequest) ProtoMessage() {}
func (*ListDataSourceDefinitionsRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{9}
}
func (m *ListDataSourceDefinitionsRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListDataSourceDefinitionsRequest.Unmarshal(m, b)
}
func (m *ListDataSourceDefinitionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListDataSourceDefinitionsRequest.Marshal(b, m, deterministic)
}
func (m *ListDataSourceDefinitionsRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListDataSourceDefinitionsRequest.Merge(m, src)
}
func (m *ListDataSourceDefinitionsRequest) XXX_Size() int {
return xxx_messageInfo_ListDataSourceDefinitionsRequest.Size(m)
}
func (m *ListDataSourceDefinitionsRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ListDataSourceDefinitionsRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ListDataSourceDefinitionsRequest proto.InternalMessageInfo
func (m *ListDataSourceDefinitionsRequest) GetParent() string {
if m != nil {
return m.Parent
}
return ""
}
func (m *ListDataSourceDefinitionsRequest) GetPageToken() string {
if m != nil {
return m.PageToken
}
return ""
}
func (m *ListDataSourceDefinitionsRequest) GetPageSize() int32 {
if m != nil {
return m.PageSize
}
return 0
}
// Returns a list of supported data source definitions.
type ListDataSourceDefinitionsResponse struct {
// List of supported data source definitions.
DataSourceDefinitions []*DataSourceDefinition `protobuf:"bytes,1,rep,name=data_source_definitions,json=dataSourceDefinitions,proto3" json:"data_source_definitions,omitempty"`
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `ListDataSourceDefinitionsRequest.page_token`
// to request the next page of the list results.
NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListDataSourceDefinitionsResponse) Reset() { *m = ListDataSourceDefinitionsResponse{} }
func (m *ListDataSourceDefinitionsResponse) String() string { return proto.CompactTextString(m) }
func (*ListDataSourceDefinitionsResponse) ProtoMessage() {}
func (*ListDataSourceDefinitionsResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{10}
}
func (m *ListDataSourceDefinitionsResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListDataSourceDefinitionsResponse.Unmarshal(m, b)
}
func (m *ListDataSourceDefinitionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListDataSourceDefinitionsResponse.Marshal(b, m, deterministic)
}
func (m *ListDataSourceDefinitionsResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListDataSourceDefinitionsResponse.Merge(m, src)
}
func (m *ListDataSourceDefinitionsResponse) XXX_Size() int {
return xxx_messageInfo_ListDataSourceDefinitionsResponse.Size(m)
}
func (m *ListDataSourceDefinitionsResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ListDataSourceDefinitionsResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ListDataSourceDefinitionsResponse proto.InternalMessageInfo
func (m *ListDataSourceDefinitionsResponse) GetDataSourceDefinitions() []*DataSourceDefinition {
if m != nil {
return m.DataSourceDefinitions
}
return nil
}
func (m *ListDataSourceDefinitionsResponse) GetNextPageToken() string {
if m != nil {
return m.NextPageToken
}
return ""
}
// Represents the data source definition.
type DataSourceDefinition struct {
// The resource name of the data source definition.
// Data source definition names have the form
// `projects/{project_id}/locations/{location}/dataSourceDefinitions/{data_source_id}`.
Name string `protobuf:"bytes,21,opt,name=name,proto3" json:"name,omitempty"`
// Data source metadata.
DataSource *DataSource `protobuf:"bytes,1,opt,name=data_source,json=dataSource,proto3" json:"data_source,omitempty"`
// The Pub/Sub topic to be used for broadcasting a message when a transfer run
// is created. Both this topic and transfer_config_pubsub_topic can be
// set to a custom topic. By default, both topics are auto-generated if none
// of them is provided when creating the definition. However, if one topic is
// manually set, the other topic has to be manually set as well. The only
// difference is that transfer_run_pubsub_topic must be a non-empty Pub/Sub
// topic, but transfer_config_pubsub_topic can be set to empty. The comments
// about "{location}" for transfer_config_pubsub_topic apply here too.
TransferRunPubsubTopic string `protobuf:"bytes,13,opt,name=transfer_run_pubsub_topic,json=transferRunPubsubTopic,proto3" json:"transfer_run_pubsub_topic,omitempty"`
// Duration which should be added to schedule_time to calculate
// run_time when job is scheduled. Only applicable for automatically
// scheduled transfer runs. Used to start a run early on a data source that
// supports continuous data refresh to compensate for unknown timezone
// offsets. Use a negative number to start a run late for data sources not
// supporting continuous data refresh.
RunTimeOffset *duration.Duration `protobuf:"bytes,16,opt,name=run_time_offset,json=runTimeOffset,proto3" json:"run_time_offset,omitempty"`
// Support e-mail address of the OAuth client's Brand, which contains the
// consent screen data.
SupportEmail string `protobuf:"bytes,22,opt,name=support_email,json=supportEmail,proto3" json:"support_email,omitempty"`
// When service account is specified, BigQuery will share created dataset
// with the given service account. Also, this service account will be
// eligible to perform status updates and message logging for data transfer
// runs for the corresponding data_source_id.
ServiceAccount string `protobuf:"bytes,2,opt,name=service_account,json=serviceAccount,proto3" json:"service_account,omitempty"`
// Is data source disabled? If true, data_source is not visible.
// API will also stop returning any data transfer configs and/or runs
// associated with the data source. This setting has higher priority
// than whitelisted_project_ids.
Disabled bool `protobuf:"varint,5,opt,name=disabled,proto3" json:"disabled,omitempty"`
// The Pub/Sub topic to use for broadcasting a message for transfer config. If
// empty, a message will not be broadcasted. Both this topic and
// transfer_run_pubsub_topic are auto-generated if none of them is provided
// when creating the definition. It is recommended to provide
// transfer_config_pubsub_topic if a user-owned transfer_run_pubsub_topic is
// provided. Otherwise, it will be set to empty. If "{location}" is found in
// the value, then that means, data source wants to handle message separately
// for datasets in different regions. We will replace {location} with the
// actual dataset location, as the actual topic name. For example,
// projects/connector/topics/scheduler-{location} could become
// projects/connector/topics/scheduler-us. If "{location}" is not found, then
// we will use the input value as topic name.
TransferConfigPubsubTopic string `protobuf:"bytes,12,opt,name=transfer_config_pubsub_topic,json=transferConfigPubsubTopic,proto3" json:"transfer_config_pubsub_topic,omitempty"`
// Supported location_ids used for deciding in which locations Pub/Sub topics
// need to be created. If custom Pub/Sub topics are used and they contains
// '{location}', the location_ids will be used for validating the topics by
// replacing the '{location}' with the individual location in the list. The
// valid values are the "location_id" field of the response of `GET
// https://bigquerydatatransfer.googleapis.com/v1/{name=projects/*}/locations`
// In addition, if the data source needs to support all available regions,
// supported_location_ids can be set to "global" (a single string element).
// When "global" is specified:
// 1) the data source implementation is supposed to stage the data in proper
// region of the destination dataset;
// 2) Data source developer should be aware of the implications (e.g., network
// traffic latency, potential charge associated with cross-region traffic,
// etc.) of supporting the "global" region;
SupportedLocationIds []string `protobuf:"bytes,23,rep,name=supported_location_ids,json=supportedLocationIds,proto3" json:"supported_location_ids,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *DataSourceDefinition) Reset() { *m = DataSourceDefinition{} }
func (m *DataSourceDefinition) String() string { return proto.CompactTextString(m) }
func (*DataSourceDefinition) ProtoMessage() {}
func (*DataSourceDefinition) Descriptor() ([]byte, []int) {
return fileDescriptor_63170854e2f004ff, []int{11}
}
func (m *DataSourceDefinition) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_DataSourceDefinition.Unmarshal(m, b)
}
func (m *DataSourceDefinition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_DataSourceDefinition.Marshal(b, m, deterministic)
}
func (m *DataSourceDefinition) XXX_Merge(src proto.Message) {
xxx_messageInfo_DataSourceDefinition.Merge(m, src)
}
func (m *DataSourceDefinition) XXX_Size() int {
return xxx_messageInfo_DataSourceDefinition.Size(m)
}
func (m *DataSourceDefinition) XXX_DiscardUnknown() {
xxx_messageInfo_DataSourceDefinition.DiscardUnknown(m)
}
var xxx_messageInfo_DataSourceDefinition proto.InternalMessageInfo
func (m *DataSourceDefinition) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *DataSourceDefinition) GetDataSource() *DataSource {
if m != nil {
return m.DataSource
}
return nil
}
func (m *DataSourceDefinition) GetTransferRunPubsubTopic() string {
if m != nil {
return m.TransferRunPubsubTopic
}
return ""
}
func (m *DataSourceDefinition) GetRunTimeOffset() *duration.Duration {
if m != nil {
return m.RunTimeOffset
}
return nil
}
func (m *DataSourceDefinition) GetSupportEmail() string {
if m != nil {
return m.SupportEmail
}
return ""
}
func (m *DataSourceDefinition) GetServiceAccount() string {
if m != nil {
return m.ServiceAccount
}
return ""
}
func (m *DataSourceDefinition) GetDisabled() bool {
if m != nil {
return m.Disabled
}
return false
}
func (m *DataSourceDefinition) GetTransferConfigPubsubTopic() string {
if m != nil {
return m.TransferConfigPubsubTopic
}
return ""
}
func (m *DataSourceDefinition) GetSupportedLocationIds() []string {
if m != nil {
return m.SupportedLocationIds
}
return nil
}
func init() {
proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.WriteDisposition", WriteDisposition_name, WriteDisposition_value)
proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Format", ImportedDataInfo_Format_name, ImportedDataInfo_Format_value)
proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_Encoding", ImportedDataInfo_Encoding_name, ImportedDataInfo_Encoding_value)
proto.RegisterEnum("google.cloud.bigquery.datatransfer.v1.ImportedDataInfo_FieldSchema_Type", ImportedDataInfo_FieldSchema_Type_name, ImportedDataInfo_FieldSchema_Type_value)
proto.RegisterType((*ImportedDataInfo)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo")
proto.RegisterType((*ImportedDataInfo_FieldSchema)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.FieldSchema")
proto.RegisterType((*ImportedDataInfo_RecordSchema)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.RecordSchema")
proto.RegisterType((*ImportedDataInfo_TableDefinition)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition")
proto.RegisterType((*ImportedDataInfo_TableDefinition_CsvOptions)(nil), "google.cloud.bigquery.datatransfer.v1.ImportedDataInfo.TableDefinition.CsvOptions")
proto.RegisterType((*UpdateTransferRunRequest)(nil), "google.cloud.bigquery.datatransfer.v1.UpdateTransferRunRequest")
proto.RegisterType((*LogTransferRunMessagesRequest)(nil), "google.cloud.bigquery.datatransfer.v1.LogTransferRunMessagesRequest")
proto.RegisterType((*StartBigQueryJobsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.StartBigQueryJobsRequest")
proto.RegisterType((*FinishRunRequest)(nil), "google.cloud.bigquery.datatransfer.v1.FinishRunRequest")
proto.RegisterType((*CreateDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.CreateDataSourceDefinitionRequest")
proto.RegisterType((*UpdateDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.UpdateDataSourceDefinitionRequest")
proto.RegisterType((*DeleteDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.DeleteDataSourceDefinitionRequest")
proto.RegisterType((*GetDataSourceDefinitionRequest)(nil), "google.cloud.bigquery.datatransfer.v1.GetDataSourceDefinitionRequest")
proto.RegisterType((*ListDataSourceDefinitionsRequest)(nil), "google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsRequest")
proto.RegisterType((*ListDataSourceDefinitionsResponse)(nil), "google.cloud.bigquery.datatransfer.v1.ListDataSourceDefinitionsResponse")
proto.RegisterType((*DataSourceDefinition)(nil), "google.cloud.bigquery.datatransfer.v1.DataSourceDefinition")
}
func init() {
proto.RegisterFile("google/cloud/bigquery/datatransfer/v1/datasource.proto", fileDescriptor_63170854e2f004ff)
}
var fileDescriptor_63170854e2f004ff = []byte{
// 2189 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x59, 0xdd, 0x6e, 0xdb, 0xc8,
0x15, 0x2e, 0xfd, 0x23, 0xcb, 0x47, 0xfe, 0xa1, 0xa7, 0x5e, 0x47, 0x51, 0x92, 0x8d, 0xa3, 0xc5,
0xa6, 0xde, 0x5c, 0x48, 0xb5, 0x9a, 0xe6, 0x17, 0xdd, 0xad, 0x2c, 0xd1, 0x0e, 0x53, 0x5b, 0x92,
0x29, 0xda, 0x41, 0xba, 0x2e, 0x88, 0x31, 0x39, 0x52, 0x26, 0xa6, 0x48, 0x86, 0x43, 0xda, 0xc9,
0x16, 0xdb, 0x8b, 0xbe, 0x42, 0x51, 0xa0, 0x37, 0x7d, 0x81, 0x02, 0x2d, 0xd0, 0x37, 0x68, 0x0b,
0xf4, 0xa2, 0x7b, 0xbb, 0xd7, 0x45, 0xb7, 0x40, 0x81, 0x5e, 0x6f, 0x1f, 0xa0, 0x28, 0x66, 0x48,
0xc9, 0x8c, 0x2c, 0x59, 0x8a, 0x9d, 0xde, 0x91, 0x67, 0xe6, 0x9c, 0xf9, 0xbe, 0x33, 0xdf, 0x9c,
0x39, 0x94, 0xe0, 0x5e, 0xdb, 0x75, 0xdb, 0x36, 0x29, 0x9a, 0xb6, 0x1b, 0x5a, 0xc5, 0x43, 0xda,
0x7e, 0x15, 0x12, 0xff, 0x4d, 0xd1, 0xc2, 0x01, 0x0e, 0x7c, 0xec, 0xb0, 0x16, 0xf1, 0x8b, 0xc7,
0xeb, 0xe2, 0x9d, 0xb9, 0xa1, 0x6f, 0x92, 0x82, 0xe7, 0xbb, 0x81, 0x8b, 0x3e, 0x8e, 0xfc, 0x0a,
0xc2, 0xaf, 0xd0, 0xf5, 0x2b, 0x24, 0xfd, 0x0a, 0xc7, 0xeb, 0xb9, 0xeb, 0x71, 0x78, 0xec, 0xd1,
0x22, 0x76, 0x1c, 0x37, 0xc0, 0x01, 0x75, 0x1d, 0x16, 0x05, 0xc9, 0x3d, 0x18, 0x7f, 0xf1, 0x5e,
0xd0, 0xc8, 0xf3, 0xee, 0x78, 0x9e, 0x7d, 0x5e, 0x1f, 0xc6, 0x5e, 0xe2, 0xed, 0x30, 0x6c, 0x15,
0xad, 0xd0, 0x17, 0x80, 0xe2, 0xf1, 0x6b, 0xfd, 0xe3, 0xa4, 0xe3, 0x05, 0x6f, 0xe2, 0xc1, 0xd5,
0xfe, 0xc1, 0x16, 0x25, 0xb6, 0x65, 0x74, 0x30, 0x3b, 0x8a, 0x67, 0xdc, 0xec, 0x9f, 0x11, 0xd0,
0x0e, 0x61, 0x01, 0xee, 0x78, 0xc3, 0xd6, 0x3f, 0xf1, 0xb1, 0xe7, 0x11, 0xbf, 0x9b, 0x8f, 0x2b,
0x89, 0x6c, 0x99, 0x36, 0x25, 0x4e, 0x10, 0x0d, 0xe4, 0xff, 0x23, 0x83, 0xac, 0x76, 0x3c, 0xd7,
0x0f, 0x88, 0x55, 0xc5, 0x01, 0x56, 0x9d, 0x96, 0x8b, 0x64, 0x98, 0x64, 0xaf, 0xec, 0xac, 0xb4,
0x2a, 0xad, 0xcd, 0x6a, 0xfc, 0x11, 0x7d, 0x1f, 0x96, 0x2d, 0xc2, 0x02, 0xea, 0x08, 0x52, 0x46,
0x80, 0x0f, 0x6d, 0x62, 0x50, 0x2b, 0x3b, 0x21, 0xa6, 0xa0, 0xc4, 0x98, 0xce, 0x87, 0x54, 0x0b,
0x6d, 0xc0, 0x8d, 0xb3, 0x1e, 0x16, 0x61, 0xa6, 0x4f, 0x3d, 0x6e, 0xc9, 0x82, 0x70, 0xbd, 0xd6,
0xef, 0x5a, 0x3d, 0x9d, 0x82, 0x5a, 0x00, 0x5d, 0xbf, 0x16, 0xcb, 0x4e, 0xae, 0x4e, 0xae, 0x65,
0x4a, 0x5b, 0x85, 0xb1, 0xf4, 0x51, 0xe8, 0x27, 0x55, 0x88, 0xa3, 0xb7, 0xa8, 0x43, 0x79, 0x70,
0x6d, 0x36, 0x88, 0x0d, 0x0c, 0xdd, 0x85, 0x95, 0x90, 0x11, 0x9f, 0x2f, 0x43, 0x1d, 0x62, 0x19,
0xad, 0xd0, 0x31, 0x85, 0x9a, 0xb2, 0x53, 0xab, 0x93, 0x6b, 0xb3, 0xda, 0x32, 0x1f, 0xad, 0x46,
0x83, 0x9b, 0xdd, 0x31, 0x64, 0xc1, 0xd2, 0x89, 0x4f, 0x03, 0x62, 0x58, 0x94, 0x79, 0x2e, 0x13,
0x51, 0xb3, 0xa9, 0x55, 0x69, 0x6d, 0xa1, 0x74, 0x7f, 0x4c, 0x90, 0xcf, 0xb8, 0x7f, 0xf5, 0xd4,
0x5d, 0x93, 0x4f, 0xfa, 0x2c, 0xb9, 0x7f, 0x4c, 0x42, 0x66, 0x93, 0xeb, 0xa1, 0x69, 0xbe, 0x20,
0x1d, 0x8c, 0x6e, 0x00, 0x44, 0xf2, 0x70, 0x70, 0x87, 0xc4, 0x5b, 0x34, 0x2b, 0x2c, 0x35, 0xdc,
0x21, 0xe8, 0x00, 0xa6, 0x82, 0x37, 0x1e, 0x11, 0x1b, 0xb3, 0x50, 0x7a, 0x72, 0xd1, 0x64, 0x25,
0x56, 0x2c, 0xe8, 0x6f, 0x3c, 0xa2, 0x89, 0xa8, 0xe8, 0x26, 0x64, 0x28, 0x33, 0x7c, 0xe2, 0x11,
0x1c, 0x10, 0x2b, 0x3b, 0xb9, 0x2a, 0xad, 0xa5, 0x35, 0xa0, 0x4c, 0x8b, 0x2d, 0x68, 0x15, 0x32,
0xc9, 0x3d, 0x9e, 0x12, 0xf0, 0x92, 0x26, 0x74, 0x00, 0x29, 0x26, 0xe2, 0x66, 0xa7, 0x57, 0xa5,
0xb5, 0x4c, 0xa9, 0x7a, 0x51, 0x88, 0x1a, 0x31, 0x5d, 0x3f, 0xc6, 0xa8, 0xc5, 0x31, 0xf3, 0x7f,
0x94, 0x60, 0x8a, 0xe3, 0x45, 0xcb, 0x20, 0xeb, 0xcf, 0x1b, 0x8a, 0xb1, 0x57, 0x6b, 0x36, 0x94,
0x8a, 0xba, 0xa9, 0x2a, 0x55, 0xf9, 0x3b, 0x08, 0x20, 0xd5, 0xd4, 0x35, 0xb5, 0xb6, 0x25, 0x4b,
0x28, 0x03, 0x33, 0x6a, 0x4d, 0x57, 0xb6, 0x14, 0x4d, 0x9e, 0x40, 0xb3, 0x30, 0xbd, 0xb9, 0x5d,
0x2f, 0xeb, 0xf2, 0x24, 0x9f, 0xa3, 0x29, 0x95, 0xba, 0x56, 0x95, 0xa7, 0xb8, 0x79, 0xe3, 0xb9,
0xae, 0x34, 0xe5, 0x69, 0x3e, 0x7d, 0xa3, 0x5e, 0xdf, 0x56, 0xca, 0x35, 0x39, 0x85, 0xe6, 0x61,
0x56, 0x57, 0x77, 0x94, 0xa6, 0x5e, 0xde, 0x69, 0xc8, 0x33, 0x28, 0x0d, 0x53, 0xd5, 0xb2, 0xae,
0xc8, 0x69, 0xfe, 0xc4, 0x07, 0xe4, 0x59, 0x34, 0x07, 0x69, 0x6e, 0x13, 0x6f, 0xc0, 0xbd, 0x6b,
0x7b, 0x3b, 0x8a, 0xa6, 0x56, 0xe4, 0x0c, 0xf7, 0xde, 0x52, 0xea, 0x5b, 0x5a, 0xb9, 0xf1, 0xe4,
0xb9, 0x3c, 0x97, 0x3b, 0x82, 0xb9, 0x24, 0x17, 0xf4, 0x39, 0xa4, 0xc4, 0x7e, 0xb2, 0xac, 0x24,
0x14, 0x5f, 0x79, 0x0f, 0x9b, 0xa8, 0xc5, 0x21, 0x73, 0x7f, 0x99, 0x81, 0xc5, 0xbe, 0x93, 0x80,
0xae, 0x42, 0xba, 0x77, 0xa0, 0x23, 0x41, 0xcd, 0x04, 0xf1, 0x29, 0xbe, 0x09, 0x99, 0xa8, 0x38,
0x1b, 0xa1, 0x4f, 0x59, 0x76, 0x42, 0x1c, 0x07, 0x88, 0x4c, 0x7b, 0x3e, 0x65, 0x68, 0x1f, 0x52,
0x2d, 0xd7, 0xef, 0xe0, 0x40, 0x88, 0x61, 0xa1, 0xf4, 0xe9, 0x85, 0xc1, 0x8a, 0x28, 0x5a, 0x1c,
0x0d, 0xdd, 0x86, 0xc5, 0x0e, 0x7e, 0x6d, 0x1c, 0x62, 0xcb, 0xf0, 0x45, 0x72, 0x98, 0x10, 0xd3,
0xb4, 0x36, 0xdf, 0xc1, 0xaf, 0x37, 0xb0, 0x15, 0x65, 0x8c, 0xa1, 0x03, 0x48, 0x13, 0xc7, 0x74,
0x2d, 0xea, 0xb4, 0x85, 0xa0, 0x16, 0x4a, 0x3f, 0xbe, 0x28, 0x02, 0x25, 0x8e, 0xa3, 0xf5, 0x22,
0x22, 0x06, 0x19, 0x93, 0x1d, 0x1b, 0xae, 0x17, 0x55, 0x83, 0x94, 0x50, 0xac, 0xf6, 0x9e, 0x2a,
0x50, 0xa1, 0xc2, 0x8e, 0xeb, 0x51, 0x64, 0x0d, 0xcc, 0xde, 0x73, 0xe2, 0x84, 0xcc, 0xbc, 0xff,
0x13, 0x82, 0x6a, 0xf0, 0x01, 0x6d, 0x3b, 0xae, 0x4f, 0x8c, 0xd0, 0x39, 0x72, 0xdc, 0x13, 0xc7,
0x38, 0xc6, 0x76, 0x48, 0x98, 0xa8, 0xc7, 0x99, 0x52, 0xae, 0xbb, 0x58, 0xf7, 0x26, 0x29, 0x6c,
0xb8, 0xae, 0xbd, 0xcf, 0xa7, 0x68, 0xdf, 0x8d, 0x1c, 0xf7, 0x22, 0x3f, 0x61, 0x63, 0xb9, 0x6f,
0x27, 0x00, 0x4e, 0x89, 0x20, 0x05, 0x16, 0xa3, 0xf2, 0x64, 0x11, 0x9b, 0x76, 0x68, 0x40, 0x7c,
0x21, 0xa9, 0x4c, 0xe9, 0xfa, 0x99, 0xc0, 0xcd, 0xc0, 0xa7, 0x4e, 0x3b, 0x0a, 0xbd, 0x20, 0x9c,
0xaa, 0x5d, 0x1f, 0x8e, 0x12, 0xdb, 0xb6, 0x7b, 0x62, 0xbc, 0x0a, 0xdd, 0x80, 0x58, 0x86, 0x43,
0x4e, 0x6c, 0xea, 0x10, 0x26, 0xea, 0xda, 0x08, 0x94, 0xc2, 0x71, 0x57, 0xf8, 0xd5, 0x62, 0x37,
0xf4, 0x18, 0x40, 0x44, 0x32, 0xcc, 0x17, 0xd8, 0x17, 0x52, 0x1d, 0x85, 0x68, 0x56, 0xcc, 0xaf,
0xbc, 0xc0, 0x3e, 0xda, 0x82, 0x25, 0x76, 0x44, 0x3d, 0xc3, 0x26, 0x98, 0xab, 0xc2, 0xf0, 0xdd,
0x93, 0x48, 0x8d, 0x99, 0xd2, 0xb5, 0x33, 0x31, 0x54, 0x27, 0xb8, 0x77, 0x37, 0x0a, 0xb1, 0xc8,
0xbd, 0xb6, 0x23, 0x27, 0xcd, 0x3d, 0x61, 0x68, 0x13, 0x96, 0x22, 0x56, 0x2f, 0x71, 0xbb, 0x4d,
0xac, 0x28, 0xd0, 0xf4, 0x48, 0x46, 0x8b, 0xc2, 0xe9, 0xa9, 0xf0, 0xe1, 0x71, 0xf2, 0xbf, 0x80,
0x54, 0x74, 0x5c, 0xd0, 0x0a, 0xa0, 0xcd, 0xba, 0xb6, 0x53, 0xd6, 0xfb, 0x0a, 0xdd, 0x0c, 0x4c,
0x56, 0x9a, 0xfb, 0xb2, 0xc4, 0x0b, 0xd2, 0xd3, 0x66, 0xbd, 0x26, 0x4f, 0xf0, 0xa7, 0xf2, 0xbe,
0x56, 0x97, 0x27, 0x79, 0x69, 0x8a, 0x2a, 0x9c, 0x5a, 0x97, 0xa7, 0xf8, 0x5b, 0xa5, 0xbe, 0xbd,
0xb7, 0x53, 0x53, 0xeb, 0xf2, 0x34, 0xaf, 0x4d, 0x95, 0x72, 0xa3, 0x5c, 0x51, 0xf5, 0xba, 0x26,
0xa7, 0x78, 0xdd, 0x6a, 0x94, 0xb5, 0xdd, 0x3d, 0x45, 0x97, 0x67, 0x78, 0xd0, 0xba, 0x56, 0x91,
0xd3, 0xf9, 0x4f, 0x21, 0xdd, 0x3d, 0x2c, 0x28, 0x0b, 0xcb, 0x4a, 0xad, 0x52, 0xaf, 0xaa, 0xb5,
0xad, 0x3e, 0x0c, 0x0b, 0x00, 0x6a, 0xb3, 0x6e, 0x3c, 0x78, 0xf0, 0xc3, 0x87, 0xc6, 0x7a, 0x04,
0x65, 0x4f, 0xdf, 0x7c, 0x20, 0x4f, 0xe4, 0xff, 0x20, 0x41, 0x76, 0xcf, 0xb3, 0x70, 0x40, 0xf4,
0x58, 0xbf, 0x5a, 0xe8, 0x68, 0xe4, 0x55, 0x48, 0x58, 0x80, 0xf6, 0x60, 0xae, 0xab, 0x6a, 0xc3,
0x0f, 0x9d, 0x58, 0x3e, 0xa5, 0x31, 0x0f, 0x41, 0x32, 0x60, 0x26, 0x38, 0x7d, 0x41, 0x8f, 0x21,
0x13, 0x8a, 0x25, 0x45, 0x5f, 0x35, 0x54, 0x47, 0xa2, 0x66, 0xee, 0x60, 0x76, 0xa4, 0x41, 0x34,
0x9d, 0x3f, 0xe7, 0x7f, 0x23, 0xc1, 0x8d, 0x6d, 0xb7, 0x9d, 0x08, 0xbe, 0x43, 0x18, 0xc3, 0x6d,
0xc2, 0xba, 0xa8, 0x11, 0x4c, 0x25, 0x2e, 0x64, 0xf1, 0x8c, 0x4c, 0x58, 0xea, 0x31, 0xe9, 0xc4,
0xf3, 0x45, 0x09, 0xcd, 0x94, 0xee, 0xbd, 0x23, 0x9d, 0x78, 0x39, 0x4d, 0x0e, 0xde, 0x36, 0xb0,
0xfc, 0x37, 0x12, 0x64, 0x9b, 0x01, 0xf6, 0x83, 0x0d, 0xda, 0xde, 0xe5, 0x31, 0x9e, 0xba, 0x87,
0xe7, 0xa2, 0x3a, 0x80, 0x79, 0x1a, 0x57, 0x0a, 0x83, 0x2f, 0x17, 0x23, 0xba, 0x7f, 0xc1, 0x2a,
0xa3, 0xcd, 0xd1, 0x84, 0x05, 0x7d, 0x02, 0xb2, 0x68, 0xa5, 0x4c, 0x9f, 0x58, 0xc4, 0x09, 0x28,
0xb6, 0x99, 0x38, 0x6e, 0x73, 0xda, 0x22, 0xb7, 0x57, 0x4e, 0xcd, 0xe8, 0x7b, 0x51, 0x89, 0xf7,
0xb0, 0x8f, 0x6d, 0x9b, 0xd8, 0x94, 0x75, 0xb2, 0x69, 0x51, 0xe2, 0x17, 0x3a, 0xf8, 0x75, 0xe3,
0xd4, 0x9a, 0xbf, 0x0d, 0xf2, 0x26, 0x75, 0x28, 0x7b, 0x91, 0x50, 0xc9, 0x00, 0x66, 0x5c, 0x56,
0xb7, 0x2a, 0x3e, 0x6f, 0x44, 0x38, 0x94, 0xa6, 0xb8, 0xa4, 0x12, 0x0d, 0x5f, 0xec, 0xb9, 0x02,
0x29, 0x0f, 0xfb, 0xc4, 0x09, 0x62, 0xdf, 0xf8, 0x0d, 0xbd, 0x82, 0x15, 0xce, 0xd5, 0x88, 0xef,
0x3b, 0xab, 0xe7, 0x18, 0x6b, 0xe5, 0xf1, 0x98, 0x09, 0x1a, 0xb8, 0xf6, 0xb2, 0x35, 0xc0, 0x9a,
0xff, 0x5a, 0x82, 0x5b, 0xd1, 0x39, 0x38, 0x0f, 0xf0, 0x70, 0x60, 0xd2, 0xff, 0x09, 0xd8, 0xe5,
0x0e, 0xcb, 0x7d, 0xb8, 0x55, 0x25, 0x36, 0x39, 0x9f, 0xd4, 0xa0, 0xfd, 0xbb, 0x0b, 0x1f, 0x6e,
0x91, 0xe0, 0x5d, 0xbd, 0x8e, 0x61, 0x75, 0x9b, 0xb2, 0x81, 0x6e, 0x6c, 0xd4, 0x9e, 0xdf, 0x00,
0xf0, 0x70, 0x9b, 0x18, 0x81, 0x7b, 0x44, 0x9c, 0xf8, 0x63, 0x66, 0x96, 0x5b, 0x74, 0x6e, 0x40,
0xd7, 0x40, 0xbc, 0x18, 0x8c, 0x7e, 0x41, 0x84, 0x8a, 0xa7, 0xb5, 0x34, 0x37, 0x34, 0xe9, 0x17,
0x24, 0xff, 0x27, 0x09, 0x6e, 0x9d, 0xb3, 0x30, 0xf3, 0x5c, 0x87, 0x11, 0xc4, 0xe0, 0xca, 0xe0,
0xcd, 0xeb, 0x76, 0x77, 0x97, 0xda, 0xbd, 0x0f, 0x06, 0xed, 0x1e, 0xe3, 0xcd, 0x93, 0x43, 0x5e,
0x07, 0xc6, 0x19, 0x6e, 0xf3, 0xdc, 0xdc, 0xe8, 0xf2, 0xcb, 0xff, 0x7d, 0x12, 0x96, 0x07, 0xc5,
0xed, 0xe5, 0xf9, 0x83, 0x44, 0xdd, 0xd0, 0x20, 0x93, 0x60, 0x12, 0x6b, 0x6f, 0xfd, 0x9d, 0xd1,
0x6b, 0x70, 0x8a, 0x19, 0x3d, 0x84, 0xab, 0xc9, 0x5a, 0x6f, 0x78, 0xe1, 0x21, 0x0b, 0x0f, 0x8d,
0xc0, 0xf5, 0xa8, 0x99, 0x9d, 0x17, 0x8b, 0xaf, 0x24, 0x8a, 0x78, 0x43, 0x0c, 0xeb, 0x7c, 0x14,
0x95, 0x61, 0x91, 0x7b, 0xf0, 0x0f, 0x61, 0xc3, 0x6d, 0xb5, 0x18, 0x09, 0xb2, 0xb2, 0x80, 0x74,
0xf5, 0x8c, 0x4c, 0xab, 0xf1, 0xb7, 0xb8, 0x36, 0xef, 0x87, 0x8e, 0x4e, 0x3b, 0xa4, 0x2e, 0xe6,
0xa3, 0x8f, 0x60, 0x9e, 0x85, 0x1e, 0x2f, 0x5e, 0x06, 0xe9, 0x60, 0x6a, 0x67, 0x57, 0xc4, 0x8a,
0x73, 0xb1, 0x51, 0xe1, 0x36, 0x5e, 0xa5, 0x18, 0xf1, 0x8f, 0xa9, 0x49, 0x0c, 0x6c, 0x9a, 0x6e,
0xe8, 0x04, 0x71, 0x2e, 0x17, 0x62, 0x73, 0x39, 0xb2, 0xa2, 0x1c, 0xa4, 0x2d, 0xca, 0x78, 0x8b,
0x67, 0x89, 0x3b, 0x3d, 0xad, 0xf5, 0xde, 0xd1, 0x67, 0x70, 0xbd, 0xc7, 0xd3, 0x74, 0x9d, 0x16,
0x6d, 0xbf, 0x4d, 0x75, 0x4e, 0x44, 0xec, 0xe5, 0xa2, 0x22, 0xa6, 0x24, 0xd9, 0xde, 0x85, 0x95,
0x18, 0x15, 0xb1, 0x0c, 0xdb, 0x35, 0xa3, 0x8f, 0x6a, 0x6a, 0xb1, 0xec, 0x95, 0xe8, 0x0b, 0xb5,
0x37, 0xba, 0x1d, 0x0f, 0xaa, 0x16, 0xbb, 0xf3, 0x39, 0xc8, 0xfd, 0x5f, 0x98, 0xe8, 0x16, 0xdc,
0x78, 0xa6, 0xa9, 0xba, 0x62, 0x54, 0xd5, 0x66, 0xa3, 0xde, 0x54, 0x75, 0xb5, 0x5e, 0xeb, 0xbb,
0xb8, 0x11, 0x2c, 0x44, 0x53, 0x74, 0x6d, 0xaf, 0x56, 0xe1, 0x1f, 0x36, 0x12, 0x92, 0x61, 0x2e,
0xb2, 0x95, 0x1b, 0x0d, 0xa5, 0x56, 0x95, 0x27, 0x4a, 0xbf, 0x95, 0x61, 0xe9, 0x74, 0x5b, 0x9b,
0x51, 0x32, 0xd0, 0xbf, 0x25, 0x58, 0x3a, 0x73, 0xb5, 0xa3, 0xcf, 0xc6, 0x94, 0xc9, 0xb0, 0xa6,
0x20, 0x77, 0x81, 0xeb, 0x3f, 0xff, 0xb3, 0x5f, 0x7e, 0xfd, 0xaf, 0x5f, 0x4d, 0x3c, 0x2b, 0x6d,
0x15, 0x8f, 0xd7, 0x8b, 0x3f, 0x4f, 0xea, 0xac, 0xc0, 0x05, 0xfd, 0x23, 0xcf, 0x77, 0x5f, 0x12,
0x33, 0x60, 0xc5, 0x3b, 0xc5, 0x6e, 0x46, 0xf9, 0xf3, 0xdb, 0x9b, 0xc0, 0x2d, 0x7e, 0xc8, 0x07,
0xbe, 0x7c, 0xf4, 0x56, 0x5f, 0x82, 0xfe, 0x26, 0xc1, 0xca, 0xe0, 0x9e, 0x00, 0x8d, 0xdb, 0xb1,
0x9f, 0xdb, 0x52, 0xe4, 0x56, 0xce, 0x08, 0x59, 0xe9, 0x78, 0xc1, 0x9b, 0xfc, 0xae, 0xe0, 0xf5,
0x93, 0xfc, 0xa6, 0xe0, 0x75, 0x31, 0x2a, 0xb6, 0xdb, 0xee, 0x2e, 0xf7, 0x48, 0xba, 0x83, 0xfe,
0x2a, 0xc1, 0xd2, 0x99, 0x26, 0x62, 0xec, 0x5d, 0x1b, 0xd6, 0x7e, 0x0c, 0x65, 0xf0, 0x4c, 0x30,
0xd8, 0xcd, 0x6f, 0x5f, 0x82, 0x01, 0xeb, 0x5f, 0x94, 0xf3, 0xf8, 0xbd, 0x04, 0xb3, 0xbd, 0x56,
0x01, 0x8d, 0xdb, 0xd2, 0xf4, 0x37, 0x17, 0x43, 0x71, 0xd7, 0x05, 0x6e, 0x35, 0x5f, 0xbd, 0x04,
0xee, 0x56, 0x77, 0x31, 0x8e, 0xf7, 0xbf, 0x12, 0xe4, 0x86, 0x77, 0x2c, 0x68, 0xdc, 0x9f, 0x6f,
0x46, 0x36, 0x3d, 0xb9, 0xcb, 0xdc, 0x32, 0xf9, 0xe7, 0x82, 0x76, 0x33, 0xff, 0x50, 0xd0, 0x8e,
0xae, 0xce, 0x21, 0xc4, 0xbf, 0x2c, 0x0e, 0xbc, 0x91, 0x1e, 0x0d, 0xe9, 0x60, 0xd0, 0xaf, 0x27,
0x20, 0x37, 0xbc, 0x03, 0x1a, 0x3b, 0x01, 0x23, 0x9b, 0xa8, 0xcb, 0x25, 0xc0, 0x13, 0x09, 0x78,
0x59, 0x6a, 0x88, 0x04, 0x0c, 0xa6, 0x72, 0x6e, 0x4d, 0x19, 0x98, 0x14, 0xae, 0x86, 0x61, 0x79,
0xf9, 0xb3, 0x04, 0xb9, 0xe1, 0x4d, 0xd4, 0xd8, 0x79, 0x19, 0xd9, 0x87, 0x0d, 0x95, 0x7a, 0x59,
0x50, 0x7e, 0x7c, 0xe7, 0xe1, 0x48, 0xa9, 0x0f, 0xe3, 0x86, 0xbe, 0x91, 0xe0, 0xca, 0x90, 0x7e,
0x0e, 0x29, 0x63, 0x12, 0x38, 0xbf, 0x1f, 0xbc, 0xdc, 0xae, 0xc6, 0x14, 0xd1, 0x25, 0x28, 0x7e,
0x2b, 0xc1, 0xd5, 0xa1, 0x3d, 0x20, 0x1a, 0xf7, 0xa7, 0xea, 0x51, 0xed, 0x6b, 0xee, 0xc9, 0xe5,
0x03, 0x45, 0xed, 0x68, 0x1f, 0xe7, 0x8b, 0x1c, 0xe5, 0xdc, 0xed, 0xaf, 0xca, 0x1f, 0x75, 0x11,
0xbc, 0x05, 0x20, 0xc2, 0x88, 0x3d, 0xca, 0x0a, 0xa6, 0xdb, 0xd9, 0xf8, 0xa7, 0x04, 0x9f, 0x98,
0x6e, 0x67, 0x3c, 0xe8, 0x1b, 0x8b, 0xa7, 0xb8, 0x1b, 0x5c, 0x89, 0x0d, 0xe9, 0xa7, 0xbb, 0xb1,
0x67, 0xdb, 0xb5, 0xb1, 0xd3, 0x2e, 0xb8, 0x7e, 0xbb, 0xd8, 0x26, 0x8e, 0xd0, 0x69, 0xf1, 0x74,
0xad, 0x11, 0x7f, 0xd4, 0x3c, 0x4e, 0xbe, 0xff, 0x6e, 0xe2, 0xe3, 0xad, 0x28, 0x66, 0x45, 0xa0,
0xe9, 0x5e, 0x1f, 0x42, 0x1f, 0xdd, 0x9b, 0xb8, 0xb0, 0xbf, 0xfe, 0x55, 0x77, 0xde, 0x81, 0x98,
0x77, 0xd0, 0x9d, 0x77, 0x90, 0x9c, 0x77, 0xb0, 0xbf, 0x7e, 0x98, 0x12, 0x68, 0x7e, 0xf0, 0xbf,
0x00, 0x00, 0x00, 0xff, 0xff, 0x86, 0x02, 0xf2, 0x9b, 0xec, 0x1a, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// DataSourceServiceClient is the client API for DataSourceService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type DataSourceServiceClient interface {
// Update a transfer run. If successful, resets
// data_source.update_deadline_seconds timer.
UpdateTransferRun(ctx context.Context, in *UpdateTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error)
// Log messages for a transfer run. If successful (at least 1 message), resets
// data_source.update_deadline_seconds timer.
LogTransferRunMessages(ctx context.Context, in *LogTransferRunMessagesRequest, opts ...grpc.CallOption) (*empty.Empty, error)
// Notify the Data Transfer Service that data is ready for loading.
// The Data Transfer Service will start and monitor multiple BigQuery Load
// jobs for a transfer run. Monitored jobs will be automatically retried
// and produce log messages when starting and finishing a job.
// Can be called multiple times for the same transfer run.
StartBigQueryJobs(ctx context.Context, in *StartBigQueryJobsRequest, opts ...grpc.CallOption) (*empty.Empty, error)
// Notify the Data Transfer Service that the data source is done processing
// the run. No more status updates or requests to start/monitor jobs will be
// accepted. The run will be finalized by the Data Transfer Service when all
// monitored jobs are completed.
// Does not need to be called if the run is set to FAILED.
FinishRun(ctx context.Context, in *FinishRunRequest, opts ...grpc.CallOption) (*empty.Empty, error)
// Creates a data source definition. Calling this method will automatically
// use your credentials to create the following Google Cloud resources in
// YOUR Google Cloud project.
// 1. OAuth client
// 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g.,
// projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run
// The field data_source.client_id should be left empty in the input request,
// as the API will create a new OAuth client on behalf of the caller. On the
// other hand data_source.scopes usually need to be set when there are OAuth
// scopes that need to be granted by end users.
// 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin
// Operations. This also applies to update and delete data source definition.
CreateDataSourceDefinition(ctx context.Context, in *CreateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
// Updates an existing data source definition. If changing
// supported_location_ids, triggers same effects as mentioned in "Create a
// data source definition."
UpdateDataSourceDefinition(ctx context.Context, in *UpdateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
// Deletes a data source definition, all of the transfer configs associated
// with this data source definition (if any) must be deleted first by the user
// in ALL regions, in order to delete the data source definition.
// This method is primarily meant for deleting data sources created during
// testing stage.
// If the data source is referenced by transfer configs in the region
// specified in the request URL, the method will fail immediately. If in the
// current region (e.g., US) it's not used by any transfer configs, but in
// another region (e.g., EU) it is, then although the method will succeed in
// region US, but it will fail when the deletion operation is replicated to
// region EU. And eventually, the system will replicate the data source
// definition back from EU to US, in order to bring all regions to
// consistency. The final effect is that the data source appears to be
// 'undeleted' in the US region.
DeleteDataSourceDefinition(ctx context.Context, in *DeleteDataSourceDefinitionRequest, opts ...grpc.CallOption) (*empty.Empty, error)
// Retrieves an existing data source definition.
GetDataSourceDefinition(ctx context.Context, in *GetDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error)
// Lists supported data source definitions.
ListDataSourceDefinitions(ctx context.Context, in *ListDataSourceDefinitionsRequest, opts ...grpc.CallOption) (*ListDataSourceDefinitionsResponse, error)
}
type dataSourceServiceClient struct {
cc grpc.ClientConnInterface
}
func NewDataSourceServiceClient(cc grpc.ClientConnInterface) DataSourceServiceClient {
return &dataSourceServiceClient{cc}
}
func (c *dataSourceServiceClient) UpdateTransferRun(ctx context.Context, in *UpdateTransferRunRequest, opts ...grpc.CallOption) (*TransferRun, error) {
out := new(TransferRun)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateTransferRun", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) LogTransferRunMessages(ctx context.Context, in *LogTransferRunMessagesRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
out := new(empty.Empty)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/LogTransferRunMessages", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) StartBigQueryJobs(ctx context.Context, in *StartBigQueryJobsRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
out := new(empty.Empty)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/StartBigQueryJobs", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) FinishRun(ctx context.Context, in *FinishRunRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
out := new(empty.Empty)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/FinishRun", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) CreateDataSourceDefinition(ctx context.Context, in *CreateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error) {
out := new(DataSourceDefinition)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/CreateDataSourceDefinition", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) UpdateDataSourceDefinition(ctx context.Context, in *UpdateDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error) {
out := new(DataSourceDefinition)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateDataSourceDefinition", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) DeleteDataSourceDefinition(ctx context.Context, in *DeleteDataSourceDefinitionRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
out := new(empty.Empty)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/DeleteDataSourceDefinition", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) GetDataSourceDefinition(ctx context.Context, in *GetDataSourceDefinitionRequest, opts ...grpc.CallOption) (*DataSourceDefinition, error) {
out := new(DataSourceDefinition)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/GetDataSourceDefinition", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataSourceServiceClient) ListDataSourceDefinitions(ctx context.Context, in *ListDataSourceDefinitionsRequest, opts ...grpc.CallOption) (*ListDataSourceDefinitionsResponse, error) {
out := new(ListDataSourceDefinitionsResponse)
err := c.cc.Invoke(ctx, "/google.cloud.bigquery.datatransfer.v1.DataSourceService/ListDataSourceDefinitions", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// DataSourceServiceServer is the server API for DataSourceService service.
type DataSourceServiceServer interface {
// Update a transfer run. If successful, resets
// data_source.update_deadline_seconds timer.
UpdateTransferRun(context.Context, *UpdateTransferRunRequest) (*TransferRun, error)
// Log messages for a transfer run. If successful (at least 1 message), resets
// data_source.update_deadline_seconds timer.
LogTransferRunMessages(context.Context, *LogTransferRunMessagesRequest) (*empty.Empty, error)
// Notify the Data Transfer Service that data is ready for loading.
// The Data Transfer Service will start and monitor multiple BigQuery Load
// jobs for a transfer run. Monitored jobs will be automatically retried
// and produce log messages when starting and finishing a job.
// Can be called multiple times for the same transfer run.
StartBigQueryJobs(context.Context, *StartBigQueryJobsRequest) (*empty.Empty, error)
// Notify the Data Transfer Service that the data source is done processing
// the run. No more status updates or requests to start/monitor jobs will be
// accepted. The run will be finalized by the Data Transfer Service when all
// monitored jobs are completed.
// Does not need to be called if the run is set to FAILED.
FinishRun(context.Context, *FinishRunRequest) (*empty.Empty, error)
// Creates a data source definition. Calling this method will automatically
// use your credentials to create the following Google Cloud resources in
// YOUR Google Cloud project.
// 1. OAuth client
// 2. Pub/Sub Topics and Subscriptions in each supported_location_ids. e.g.,
// projects/{project_id}/{topics|subscriptions}/bigquerydatatransfer.{data_source_id}.{location_id}.run
// The field data_source.client_id should be left empty in the input request,
// as the API will create a new OAuth client on behalf of the caller. On the
// other hand data_source.scopes usually need to be set when there are OAuth
// scopes that need to be granted by end users.
// 3. We need a longer deadline due to the 60 seconds SLO from Pub/Sub admin
// Operations. This also applies to update and delete data source definition.
CreateDataSourceDefinition(context.Context, *CreateDataSourceDefinitionRequest) (*DataSourceDefinition, error)
// Updates an existing data source definition. If changing
// supported_location_ids, triggers same effects as mentioned in "Create a
// data source definition."
UpdateDataSourceDefinition(context.Context, *UpdateDataSourceDefinitionRequest) (*DataSourceDefinition, error)
// Deletes a data source definition, all of the transfer configs associated
// with this data source definition (if any) must be deleted first by the user
// in ALL regions, in order to delete the data source definition.
// This method is primarily meant for deleting data sources created during
// testing stage.
// If the data source is referenced by transfer configs in the region
// specified in the request URL, the method will fail immediately. If in the
// current region (e.g., US) it's not used by any transfer configs, but in
// another region (e.g., EU) it is, then although the method will succeed in
// region US, but it will fail when the deletion operation is replicated to
// region EU. And eventually, the system will replicate the data source
// definition back from EU to US, in order to bring all regions to
// consistency. The final effect is that the data source appears to be
// 'undeleted' in the US region.
DeleteDataSourceDefinition(context.Context, *DeleteDataSourceDefinitionRequest) (*empty.Empty, error)
// Retrieves an existing data source definition.
GetDataSourceDefinition(context.Context, *GetDataSourceDefinitionRequest) (*DataSourceDefinition, error)
// Lists supported data source definitions.
ListDataSourceDefinitions(context.Context, *ListDataSourceDefinitionsRequest) (*ListDataSourceDefinitionsResponse, error)
}
// UnimplementedDataSourceServiceServer can be embedded to have forward compatible implementations.
type UnimplementedDataSourceServiceServer struct {
}
func (*UnimplementedDataSourceServiceServer) UpdateTransferRun(ctx context.Context, req *UpdateTransferRunRequest) (*TransferRun, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateTransferRun not implemented")
}
func (*UnimplementedDataSourceServiceServer) LogTransferRunMessages(ctx context.Context, req *LogTransferRunMessagesRequest) (*empty.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method LogTransferRunMessages not implemented")
}
func (*UnimplementedDataSourceServiceServer) StartBigQueryJobs(ctx context.Context, req *StartBigQueryJobsRequest) (*empty.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method StartBigQueryJobs not implemented")
}
func (*UnimplementedDataSourceServiceServer) FinishRun(ctx context.Context, req *FinishRunRequest) (*empty.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method FinishRun not implemented")
}
func (*UnimplementedDataSourceServiceServer) CreateDataSourceDefinition(ctx context.Context, req *CreateDataSourceDefinitionRequest) (*DataSourceDefinition, error) {
return nil, status.Errorf(codes.Unimplemented, "method CreateDataSourceDefinition not implemented")
}
func (*UnimplementedDataSourceServiceServer) UpdateDataSourceDefinition(ctx context.Context, req *UpdateDataSourceDefinitionRequest) (*DataSourceDefinition, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateDataSourceDefinition not implemented")
}
func (*UnimplementedDataSourceServiceServer) DeleteDataSourceDefinition(ctx context.Context, req *DeleteDataSourceDefinitionRequest) (*empty.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteDataSourceDefinition not implemented")
}
func (*UnimplementedDataSourceServiceServer) GetDataSourceDefinition(ctx context.Context, req *GetDataSourceDefinitionRequest) (*DataSourceDefinition, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDataSourceDefinition not implemented")
}
func (*UnimplementedDataSourceServiceServer) ListDataSourceDefinitions(ctx context.Context, req *ListDataSourceDefinitionsRequest) (*ListDataSourceDefinitionsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListDataSourceDefinitions not implemented")
}
func RegisterDataSourceServiceServer(s *grpc.Server, srv DataSourceServiceServer) {
s.RegisterService(&_DataSourceService_serviceDesc, srv)
}
func _DataSourceService_UpdateTransferRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(UpdateTransferRunRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).UpdateTransferRun(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateTransferRun",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).UpdateTransferRun(ctx, req.(*UpdateTransferRunRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_LogTransferRunMessages_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(LogTransferRunMessagesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).LogTransferRunMessages(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/LogTransferRunMessages",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).LogTransferRunMessages(ctx, req.(*LogTransferRunMessagesRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_StartBigQueryJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(StartBigQueryJobsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).StartBigQueryJobs(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/StartBigQueryJobs",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).StartBigQueryJobs(ctx, req.(*StartBigQueryJobsRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_FinishRun_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(FinishRunRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).FinishRun(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/FinishRun",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).FinishRun(ctx, req.(*FinishRunRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_CreateDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(CreateDataSourceDefinitionRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).CreateDataSourceDefinition(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/CreateDataSourceDefinition",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).CreateDataSourceDefinition(ctx, req.(*CreateDataSourceDefinitionRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_UpdateDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(UpdateDataSourceDefinitionRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).UpdateDataSourceDefinition(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/UpdateDataSourceDefinition",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).UpdateDataSourceDefinition(ctx, req.(*UpdateDataSourceDefinitionRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_DeleteDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DeleteDataSourceDefinitionRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).DeleteDataSourceDefinition(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/DeleteDataSourceDefinition",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).DeleteDataSourceDefinition(ctx, req.(*DeleteDataSourceDefinitionRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_GetDataSourceDefinition_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetDataSourceDefinitionRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).GetDataSourceDefinition(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/GetDataSourceDefinition",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).GetDataSourceDefinition(ctx, req.(*GetDataSourceDefinitionRequest))
}
return interceptor(ctx, in, info, handler)
}
func _DataSourceService_ListDataSourceDefinitions_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListDataSourceDefinitionsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataSourceServiceServer).ListDataSourceDefinitions(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.bigquery.datatransfer.v1.DataSourceService/ListDataSourceDefinitions",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataSourceServiceServer).ListDataSourceDefinitions(ctx, req.(*ListDataSourceDefinitionsRequest))
}
return interceptor(ctx, in, info, handler)
}
var _DataSourceService_serviceDesc = grpc.ServiceDesc{
ServiceName: "google.cloud.bigquery.datatransfer.v1.DataSourceService",
HandlerType: (*DataSourceServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "UpdateTransferRun",
Handler: _DataSourceService_UpdateTransferRun_Handler,
},
{
MethodName: "LogTransferRunMessages",
Handler: _DataSourceService_LogTransferRunMessages_Handler,
},
{
MethodName: "StartBigQueryJobs",
Handler: _DataSourceService_StartBigQueryJobs_Handler,
},
{
MethodName: "FinishRun",
Handler: _DataSourceService_FinishRun_Handler,
},
{
MethodName: "CreateDataSourceDefinition",
Handler: _DataSourceService_CreateDataSourceDefinition_Handler,
},
{
MethodName: "UpdateDataSourceDefinition",
Handler: _DataSourceService_UpdateDataSourceDefinition_Handler,
},
{
MethodName: "DeleteDataSourceDefinition",
Handler: _DataSourceService_DeleteDataSourceDefinition_Handler,
},
{
MethodName: "GetDataSourceDefinition",
Handler: _DataSourceService_GetDataSourceDefinition_Handler,
},
{
MethodName: "ListDataSourceDefinitions",
Handler: _DataSourceService_ListDataSourceDefinitions_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "google/cloud/bigquery/datatransfer/v1/datasource.proto",
}