| // Copyright 2024 Google LLC |
| // |
| // Licensed under the Apache License, Version 2.0 (the "License"); |
| // you may not use this file except in compliance with the License. |
| // You may obtain a copy of the License at |
| // |
| // http://www.apache.org/licenses/LICENSE-2.0 |
| // |
| // Unless required by applicable law or agreed to in writing, software |
| // distributed under the License is distributed on an "AS IS" BASIS, |
| // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| // See the License for the specific language governing permissions and |
| // limitations under the License. |
| |
| // Code generated by protoc-gen-go. DO NOT EDIT. |
| // versions: |
| // protoc-gen-go v1.34.1 |
| // protoc v4.25.3 |
| // source: google/ai/generativelanguage/v1beta/discuss_service.proto |
| |
| package generativelanguagepb |
| |
| import ( |
| context "context" |
| reflect "reflect" |
| sync "sync" |
| |
| _ "google.golang.org/genproto/googleapis/api/annotations" |
| grpc "google.golang.org/grpc" |
| codes "google.golang.org/grpc/codes" |
| status "google.golang.org/grpc/status" |
| protoreflect "google.golang.org/protobuf/reflect/protoreflect" |
| protoimpl "google.golang.org/protobuf/runtime/protoimpl" |
| ) |
| |
| const ( |
| // Verify that this generated code is sufficiently up-to-date. |
| _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) |
| // Verify that runtime/protoimpl is sufficiently up-to-date. |
| _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) |
| ) |
| |
| // Request to generate a message response from the model. |
| type GenerateMessageRequest struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Required. The name of the model to use. |
| // |
| // Format: `name=models/{model}`. |
| Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` |
| // Required. The structured textual input given to the model as a prompt. |
| // |
| // Given a |
| // prompt, the model will return what it predicts is the next message in the |
| // discussion. |
| Prompt *MessagePrompt `protobuf:"bytes,2,opt,name=prompt,proto3" json:"prompt,omitempty"` |
| // Optional. Controls the randomness of the output. |
| // |
| // Values can range over `[0.0,1.0]`, |
| // inclusive. A value closer to `1.0` will produce responses that are more |
| // varied, while a value closer to `0.0` will typically result in |
| // less surprising responses from the model. |
| Temperature *float32 `protobuf:"fixed32,3,opt,name=temperature,proto3,oneof" json:"temperature,omitempty"` |
| // Optional. The number of generated response messages to return. |
| // |
| // This value must be between |
| // `[1, 8]`, inclusive. If unset, this will default to `1`. |
| CandidateCount *int32 `protobuf:"varint,4,opt,name=candidate_count,json=candidateCount,proto3,oneof" json:"candidate_count,omitempty"` |
| // Optional. The maximum cumulative probability of tokens to consider when |
| // sampling. |
| // |
| // The model uses combined Top-k and nucleus sampling. |
| // |
| // Nucleus sampling considers the smallest set of tokens whose probability |
| // sum is at least `top_p`. |
| TopP *float32 `protobuf:"fixed32,5,opt,name=top_p,json=topP,proto3,oneof" json:"top_p,omitempty"` |
| // Optional. The maximum number of tokens to consider when sampling. |
| // |
| // The model uses combined Top-k and nucleus sampling. |
| // |
| // Top-k sampling considers the set of `top_k` most probable tokens. |
| TopK *int32 `protobuf:"varint,6,opt,name=top_k,json=topK,proto3,oneof" json:"top_k,omitempty"` |
| } |
| |
| func (x *GenerateMessageRequest) Reset() { |
| *x = GenerateMessageRequest{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[0] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *GenerateMessageRequest) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*GenerateMessageRequest) ProtoMessage() {} |
| |
| func (x *GenerateMessageRequest) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[0] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use GenerateMessageRequest.ProtoReflect.Descriptor instead. |
| func (*GenerateMessageRequest) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP(), []int{0} |
| } |
| |
| func (x *GenerateMessageRequest) GetModel() string { |
| if x != nil { |
| return x.Model |
| } |
| return "" |
| } |
| |
| func (x *GenerateMessageRequest) GetPrompt() *MessagePrompt { |
| if x != nil { |
| return x.Prompt |
| } |
| return nil |
| } |
| |
| func (x *GenerateMessageRequest) GetTemperature() float32 { |
| if x != nil && x.Temperature != nil { |
| return *x.Temperature |
| } |
| return 0 |
| } |
| |
| func (x *GenerateMessageRequest) GetCandidateCount() int32 { |
| if x != nil && x.CandidateCount != nil { |
| return *x.CandidateCount |
| } |
| return 0 |
| } |
| |
| func (x *GenerateMessageRequest) GetTopP() float32 { |
| if x != nil && x.TopP != nil { |
| return *x.TopP |
| } |
| return 0 |
| } |
| |
| func (x *GenerateMessageRequest) GetTopK() int32 { |
| if x != nil && x.TopK != nil { |
| return *x.TopK |
| } |
| return 0 |
| } |
| |
| // The response from the model. |
| // |
| // This includes candidate messages and |
| // conversation history in the form of chronologically-ordered messages. |
| type GenerateMessageResponse struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Candidate response messages from the model. |
| Candidates []*Message `protobuf:"bytes,1,rep,name=candidates,proto3" json:"candidates,omitempty"` |
| // The conversation history used by the model. |
| Messages []*Message `protobuf:"bytes,2,rep,name=messages,proto3" json:"messages,omitempty"` |
| // A set of content filtering metadata for the prompt and response |
| // text. |
| // |
| // This indicates which `SafetyCategory`(s) blocked a |
| // candidate from this response, the lowest `HarmProbability` |
| // that triggered a block, and the HarmThreshold setting for that category. |
| Filters []*ContentFilter `protobuf:"bytes,3,rep,name=filters,proto3" json:"filters,omitempty"` |
| } |
| |
| func (x *GenerateMessageResponse) Reset() { |
| *x = GenerateMessageResponse{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[1] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *GenerateMessageResponse) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*GenerateMessageResponse) ProtoMessage() {} |
| |
| func (x *GenerateMessageResponse) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[1] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use GenerateMessageResponse.ProtoReflect.Descriptor instead. |
| func (*GenerateMessageResponse) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP(), []int{1} |
| } |
| |
| func (x *GenerateMessageResponse) GetCandidates() []*Message { |
| if x != nil { |
| return x.Candidates |
| } |
| return nil |
| } |
| |
| func (x *GenerateMessageResponse) GetMessages() []*Message { |
| if x != nil { |
| return x.Messages |
| } |
| return nil |
| } |
| |
| func (x *GenerateMessageResponse) GetFilters() []*ContentFilter { |
| if x != nil { |
| return x.Filters |
| } |
| return nil |
| } |
| |
| // The base unit of structured text. |
| // |
| // A `Message` includes an `author` and the `content` of |
| // the `Message`. |
| // |
| // The `author` is used to tag messages when they are fed to the |
| // model as text. |
| type Message struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Optional. The author of this Message. |
| // |
| // This serves as a key for tagging |
| // the content of this Message when it is fed to the model as text. |
| // |
| // The author can be any alphanumeric string. |
| Author string `protobuf:"bytes,1,opt,name=author,proto3" json:"author,omitempty"` |
| // Required. The text content of the structured `Message`. |
| Content string `protobuf:"bytes,2,opt,name=content,proto3" json:"content,omitempty"` |
| // Output only. Citation information for model-generated `content` in this |
| // `Message`. |
| // |
| // If this `Message` was generated as output from the model, this field may be |
| // populated with attribution information for any text included in the |
| // `content`. This field is used only on output. |
| CitationMetadata *CitationMetadata `protobuf:"bytes,3,opt,name=citation_metadata,json=citationMetadata,proto3,oneof" json:"citation_metadata,omitempty"` |
| } |
| |
| func (x *Message) Reset() { |
| *x = Message{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[2] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *Message) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*Message) ProtoMessage() {} |
| |
| func (x *Message) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[2] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use Message.ProtoReflect.Descriptor instead. |
| func (*Message) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP(), []int{2} |
| } |
| |
| func (x *Message) GetAuthor() string { |
| if x != nil { |
| return x.Author |
| } |
| return "" |
| } |
| |
| func (x *Message) GetContent() string { |
| if x != nil { |
| return x.Content |
| } |
| return "" |
| } |
| |
| func (x *Message) GetCitationMetadata() *CitationMetadata { |
| if x != nil { |
| return x.CitationMetadata |
| } |
| return nil |
| } |
| |
| // All of the structured input text passed to the model as a prompt. |
| // |
| // A `MessagePrompt` contains a structured set of fields that provide context |
| // for the conversation, examples of user input/model output message pairs that |
| // prime the model to respond in different ways, and the conversation history |
| // or list of messages representing the alternating turns of the conversation |
| // between the user and the model. |
| type MessagePrompt struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Optional. Text that should be provided to the model first to ground the |
| // response. |
| // |
| // If not empty, this `context` will be given to the model first before the |
| // `examples` and `messages`. When using a `context` be sure to provide it |
| // with every request to maintain continuity. |
| // |
| // This field can be a description of your prompt to the model to help provide |
| // context and guide the responses. Examples: "Translate the phrase from |
| // English to French." or "Given a statement, classify the sentiment as happy, |
| // sad or neutral." |
| // |
| // Anything included in this field will take precedence over message history |
| // if the total input size exceeds the model's `input_token_limit` and the |
| // input request is truncated. |
| Context string `protobuf:"bytes,1,opt,name=context,proto3" json:"context,omitempty"` |
| // Optional. Examples of what the model should generate. |
| // |
| // This includes both user input and the response that the model should |
| // emulate. |
| // |
| // These `examples` are treated identically to conversation messages except |
| // that they take precedence over the history in `messages`: |
| // If the total input size exceeds the model's `input_token_limit` the input |
| // will be truncated. Items will be dropped from `messages` before `examples`. |
| Examples []*Example `protobuf:"bytes,2,rep,name=examples,proto3" json:"examples,omitempty"` |
| // Required. A snapshot of the recent conversation history sorted |
| // chronologically. |
| // |
| // Turns alternate between two authors. |
| // |
| // If the total input size exceeds the model's `input_token_limit` the input |
| // will be truncated: The oldest items will be dropped from `messages`. |
| Messages []*Message `protobuf:"bytes,3,rep,name=messages,proto3" json:"messages,omitempty"` |
| } |
| |
| func (x *MessagePrompt) Reset() { |
| *x = MessagePrompt{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[3] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *MessagePrompt) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*MessagePrompt) ProtoMessage() {} |
| |
| func (x *MessagePrompt) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[3] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use MessagePrompt.ProtoReflect.Descriptor instead. |
| func (*MessagePrompt) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP(), []int{3} |
| } |
| |
| func (x *MessagePrompt) GetContext() string { |
| if x != nil { |
| return x.Context |
| } |
| return "" |
| } |
| |
| func (x *MessagePrompt) GetExamples() []*Example { |
| if x != nil { |
| return x.Examples |
| } |
| return nil |
| } |
| |
| func (x *MessagePrompt) GetMessages() []*Message { |
| if x != nil { |
| return x.Messages |
| } |
| return nil |
| } |
| |
| // An input/output example used to instruct the Model. |
| // |
| // It demonstrates how the model should respond or format its response. |
| type Example struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Required. An example of an input `Message` from the user. |
| Input *Message `protobuf:"bytes,1,opt,name=input,proto3" json:"input,omitempty"` |
| // Required. An example of what the model should output given the input. |
| Output *Message `protobuf:"bytes,2,opt,name=output,proto3" json:"output,omitempty"` |
| } |
| |
| func (x *Example) Reset() { |
| *x = Example{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[4] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *Example) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*Example) ProtoMessage() {} |
| |
| func (x *Example) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[4] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use Example.ProtoReflect.Descriptor instead. |
| func (*Example) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP(), []int{4} |
| } |
| |
| func (x *Example) GetInput() *Message { |
| if x != nil { |
| return x.Input |
| } |
| return nil |
| } |
| |
| func (x *Example) GetOutput() *Message { |
| if x != nil { |
| return x.Output |
| } |
| return nil |
| } |
| |
| // Counts the number of tokens in the `prompt` sent to a model. |
| // |
| // Models may tokenize text differently, so each model may return a different |
| // `token_count`. |
| type CountMessageTokensRequest struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // Required. The model's resource name. This serves as an ID for the Model to |
| // use. |
| // |
| // This name should match a model name returned by the `ListModels` method. |
| // |
| // Format: `models/{model}` |
| Model string `protobuf:"bytes,1,opt,name=model,proto3" json:"model,omitempty"` |
| // Required. The prompt, whose token count is to be returned. |
| Prompt *MessagePrompt `protobuf:"bytes,2,opt,name=prompt,proto3" json:"prompt,omitempty"` |
| } |
| |
| func (x *CountMessageTokensRequest) Reset() { |
| *x = CountMessageTokensRequest{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[5] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *CountMessageTokensRequest) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*CountMessageTokensRequest) ProtoMessage() {} |
| |
| func (x *CountMessageTokensRequest) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[5] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use CountMessageTokensRequest.ProtoReflect.Descriptor instead. |
| func (*CountMessageTokensRequest) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP(), []int{5} |
| } |
| |
| func (x *CountMessageTokensRequest) GetModel() string { |
| if x != nil { |
| return x.Model |
| } |
| return "" |
| } |
| |
| func (x *CountMessageTokensRequest) GetPrompt() *MessagePrompt { |
| if x != nil { |
| return x.Prompt |
| } |
| return nil |
| } |
| |
| // A response from `CountMessageTokens`. |
| // |
| // It returns the model's `token_count` for the `prompt`. |
| type CountMessageTokensResponse struct { |
| state protoimpl.MessageState |
| sizeCache protoimpl.SizeCache |
| unknownFields protoimpl.UnknownFields |
| |
| // The number of tokens that the `model` tokenizes the `prompt` into. |
| // |
| // Always non-negative. |
| TokenCount int32 `protobuf:"varint,1,opt,name=token_count,json=tokenCount,proto3" json:"token_count,omitempty"` |
| } |
| |
| func (x *CountMessageTokensResponse) Reset() { |
| *x = CountMessageTokensResponse{} |
| if protoimpl.UnsafeEnabled { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[6] |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| ms.StoreMessageInfo(mi) |
| } |
| } |
| |
| func (x *CountMessageTokensResponse) String() string { |
| return protoimpl.X.MessageStringOf(x) |
| } |
| |
| func (*CountMessageTokensResponse) ProtoMessage() {} |
| |
| func (x *CountMessageTokensResponse) ProtoReflect() protoreflect.Message { |
| mi := &file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[6] |
| if protoimpl.UnsafeEnabled && x != nil { |
| ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) |
| if ms.LoadMessageInfo() == nil { |
| ms.StoreMessageInfo(mi) |
| } |
| return ms |
| } |
| return mi.MessageOf(x) |
| } |
| |
| // Deprecated: Use CountMessageTokensResponse.ProtoReflect.Descriptor instead. |
| func (*CountMessageTokensResponse) Descriptor() ([]byte, []int) { |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP(), []int{6} |
| } |
| |
| func (x *CountMessageTokensResponse) GetTokenCount() int32 { |
| if x != nil { |
| return x.TokenCount |
| } |
| return 0 |
| } |
| |
| var File_google_ai_generativelanguage_v1beta_discuss_service_proto protoreflect.FileDescriptor |
| |
| var file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDesc = []byte{ |
| 0x0a, 0x39, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, |
| 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76, |
| 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x64, 0x69, 0x73, 0x63, 0x75, 0x73, 0x73, 0x5f, 0x73, 0x65, |
| 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x23, 0x67, 0x6f, 0x6f, |
| 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, |
| 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, |
| 0x1a, 0x32, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, |
| 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2f, 0x76, |
| 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, |
| 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x30, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x69, 0x2f, |
| 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, |
| 0x67, 0x65, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x73, 0x61, 0x66, 0x65, 0x74, 0x79, |
| 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, |
| 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, |
| 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, |
| 0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, |
| 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, |
| 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, |
| 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x72, 0x65, 0x73, 0x6f, 0x75, |
| 0x72, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x85, 0x03, 0x0a, 0x16, 0x47, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x71, |
| 0x75, 0x65, 0x73, 0x74, 0x12, 0x45, 0x0a, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x01, 0x20, |
| 0x01, 0x28, 0x09, 0x42, 0x2f, 0xe0, 0x41, 0x02, 0xfa, 0x41, 0x29, 0x0a, 0x27, 0x67, 0x65, 0x6e, |
| 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, |
| 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4d, |
| 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x05, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x12, 0x4f, 0x0a, 0x06, 0x70, |
| 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x67, 0x6f, |
| 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, |
| 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, |
| 0x61, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x42, |
| 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x12, 0x2a, 0x0a, 0x0b, |
| 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, |
| 0x02, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, 0x00, 0x52, 0x0b, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, |
| 0x61, 0x74, 0x75, 0x72, 0x65, 0x88, 0x01, 0x01, 0x12, 0x31, 0x0a, 0x0f, 0x63, 0x61, 0x6e, 0x64, |
| 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, |
| 0x05, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, 0x01, 0x52, 0x0e, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, |
| 0x61, 0x74, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x05, 0x74, |
| 0x6f, 0x70, 0x5f, 0x70, 0x18, 0x05, 0x20, 0x01, 0x28, 0x02, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, |
| 0x02, 0x52, 0x04, 0x74, 0x6f, 0x70, 0x50, 0x88, 0x01, 0x01, 0x12, 0x1d, 0x0a, 0x05, 0x74, 0x6f, |
| 0x70, 0x5f, 0x6b, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x48, 0x03, |
| 0x52, 0x04, 0x74, 0x6f, 0x70, 0x4b, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x74, 0x65, |
| 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, 0x12, 0x0a, 0x10, 0x5f, 0x63, 0x61, |
| 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x08, 0x0a, |
| 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x74, 0x6f, 0x70, 0x5f, |
| 0x6b, 0x22, 0xff, 0x01, 0x0a, 0x17, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x4d, 0x65, |
| 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, |
| 0x0a, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, |
| 0x0b, 0x32, 0x2c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, |
| 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, |
| 0x0a, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x73, 0x12, 0x48, 0x0a, 0x08, 0x6d, |
| 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2c, 0x2e, |
| 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, |
| 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, |
| 0x65, 0x74, 0x61, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x08, 0x6d, 0x65, 0x73, |
| 0x73, 0x61, 0x67, 0x65, 0x73, 0x12, 0x4c, 0x0a, 0x07, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x73, |
| 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, |
| 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, |
| 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x43, 0x6f, 0x6e, |
| 0x74, 0x65, 0x6e, 0x74, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x07, 0x66, 0x69, 0x6c, 0x74, |
| 0x65, 0x72, 0x73, 0x22, 0xc9, 0x01, 0x0a, 0x07, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, |
| 0x1b, 0x0a, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, |
| 0x03, 0xe0, 0x41, 0x01, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x12, 0x1d, 0x0a, 0x07, |
| 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, |
| 0x41, 0x02, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x6c, 0x0a, 0x11, 0x63, |
| 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, |
| 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, |
| 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, |
| 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x43, 0x69, 0x74, |
| 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x03, 0xe0, |
| 0x41, 0x03, 0x48, 0x00, 0x52, 0x10, 0x63, 0x69, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x65, |
| 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x42, 0x14, 0x0a, 0x12, 0x5f, 0x63, 0x69, |
| 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, |
| 0xcc, 0x01, 0x0a, 0x0d, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6d, 0x70, |
| 0x74, 0x12, 0x1d, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x01, 0x20, 0x01, |
| 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x01, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, |
| 0x12, 0x4d, 0x0a, 0x08, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, |
| 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, |
| 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, |
| 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, |
| 0x42, 0x03, 0xe0, 0x41, 0x01, 0x52, 0x08, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x12, |
| 0x4d, 0x0a, 0x08, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, |
| 0x0b, 0x32, 0x2c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, |
| 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x42, |
| 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x73, 0x22, 0x9d, |
| 0x01, 0x0a, 0x07, 0x45, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x12, 0x47, 0x0a, 0x05, 0x69, 0x6e, |
| 0x70, 0x75, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, |
| 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, |
| 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, |
| 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x05, 0x69, 0x6e, |
| 0x70, 0x75, 0x74, 0x12, 0x49, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, |
| 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, |
| 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, |
| 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, |
| 0x65, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0xb3, |
| 0x01, 0x0a, 0x19, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x54, |
| 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x45, 0x0a, 0x05, |
| 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x2f, 0xe0, 0x41, 0x02, |
| 0xfa, 0x41, 0x29, 0x0a, 0x27, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, |
| 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x52, 0x05, 0x6d, 0x6f, |
| 0x64, 0x65, 0x6c, 0x12, 0x4f, 0x0a, 0x06, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x18, 0x02, 0x20, |
| 0x01, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, |
| 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, |
| 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, |
| 0x65, 0x50, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x70, 0x72, |
| 0x6f, 0x6d, 0x70, 0x74, 0x22, 0x3d, 0x0a, 0x1a, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4d, 0x65, 0x73, |
| 0x73, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, |
| 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x63, 0x6f, 0x75, 0x6e, |
| 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0a, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x43, 0x6f, |
| 0x75, 0x6e, 0x74, 0x32, 0x90, 0x04, 0x0a, 0x0e, 0x44, 0x69, 0x73, 0x63, 0x75, 0x73, 0x73, 0x53, |
| 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0xf8, 0x01, 0x0a, 0x0f, 0x47, 0x65, 0x6e, 0x65, 0x72, |
| 0x61, 0x74, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x3b, 0x2e, 0x67, 0x6f, 0x6f, |
| 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, |
| 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, |
| 0x2e, 0x47, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, |
| 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x3c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, |
| 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, |
| 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x47, 0x65, |
| 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x52, 0x65, 0x73, |
| 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6a, 0xda, 0x41, 0x34, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2c, |
| 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x2c, 0x74, 0x65, 0x6d, 0x70, 0x65, 0x72, 0x61, 0x74, 0x75, |
| 0x72, 0x65, 0x2c, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x6f, 0x75, |
| 0x6e, 0x74, 0x2c, 0x74, 0x6f, 0x70, 0x5f, 0x70, 0x2c, 0x74, 0x6f, 0x70, 0x5f, 0x6b, 0x82, 0xd3, |
| 0xe4, 0x93, 0x02, 0x2d, 0x3a, 0x01, 0x2a, 0x22, 0x28, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, |
| 0x2f, 0x7b, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73, 0x2f, 0x2a, |
| 0x7d, 0x3a, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, |
| 0x65, 0x12, 0xdc, 0x01, 0x0a, 0x12, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, |
| 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x3e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, |
| 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x43, |
| 0x6f, 0x75, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, |
| 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x3f, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, |
| 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2e, 0x43, |
| 0x6f, 0x75, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, |
| 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x45, 0xda, 0x41, 0x0c, 0x6d, 0x6f, |
| 0x64, 0x65, 0x6c, 0x2c, 0x70, 0x72, 0x6f, 0x6d, 0x70, 0x74, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, |
| 0x3a, 0x01, 0x2a, 0x22, 0x2b, 0x2f, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x7b, 0x6d, 0x6f, |
| 0x64, 0x65, 0x6c, 0x3d, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x73, 0x2f, 0x2a, 0x7d, 0x3a, 0x63, 0x6f, |
| 0x75, 0x6e, 0x74, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, |
| 0x1a, 0x24, 0xca, 0x41, 0x21, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, |
| 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x61, 0x70, |
| 0x69, 0x73, 0x2e, 0x63, 0x6f, 0x6d, 0x42, 0x9f, 0x01, 0x0a, 0x27, 0x63, 0x6f, 0x6d, 0x2e, 0x67, |
| 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x61, 0x69, 0x2e, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, |
| 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x2e, 0x76, 0x31, 0x62, 0x65, |
| 0x74, 0x61, 0x42, 0x13, 0x44, 0x69, 0x73, 0x63, 0x75, 0x73, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, |
| 0x63, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x5d, 0x63, 0x6c, 0x6f, 0x75, 0x64, |
| 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x6f, 0x2f, 0x61, |
| 0x69, 0x2f, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, |
| 0x75, 0x61, 0x67, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x76, 0x31, 0x62, 0x65, 0x74, 0x61, 0x2f, 0x67, |
| 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, |
| 0x65, 0x70, 0x62, 0x3b, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x69, 0x76, 0x65, 0x6c, 0x61, |
| 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, |
| } |
| |
| var ( |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescOnce sync.Once |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescData = file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDesc |
| ) |
| |
| func file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescGZIP() []byte { |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescOnce.Do(func() { |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescData) |
| }) |
| return file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDescData |
| } |
| |
| var file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes = make([]protoimpl.MessageInfo, 7) |
| var file_google_ai_generativelanguage_v1beta_discuss_service_proto_goTypes = []interface{}{ |
| (*GenerateMessageRequest)(nil), // 0: google.ai.generativelanguage.v1beta.GenerateMessageRequest |
| (*GenerateMessageResponse)(nil), // 1: google.ai.generativelanguage.v1beta.GenerateMessageResponse |
| (*Message)(nil), // 2: google.ai.generativelanguage.v1beta.Message |
| (*MessagePrompt)(nil), // 3: google.ai.generativelanguage.v1beta.MessagePrompt |
| (*Example)(nil), // 4: google.ai.generativelanguage.v1beta.Example |
| (*CountMessageTokensRequest)(nil), // 5: google.ai.generativelanguage.v1beta.CountMessageTokensRequest |
| (*CountMessageTokensResponse)(nil), // 6: google.ai.generativelanguage.v1beta.CountMessageTokensResponse |
| (*ContentFilter)(nil), // 7: google.ai.generativelanguage.v1beta.ContentFilter |
| (*CitationMetadata)(nil), // 8: google.ai.generativelanguage.v1beta.CitationMetadata |
| } |
| var file_google_ai_generativelanguage_v1beta_discuss_service_proto_depIdxs = []int32{ |
| 3, // 0: google.ai.generativelanguage.v1beta.GenerateMessageRequest.prompt:type_name -> google.ai.generativelanguage.v1beta.MessagePrompt |
| 2, // 1: google.ai.generativelanguage.v1beta.GenerateMessageResponse.candidates:type_name -> google.ai.generativelanguage.v1beta.Message |
| 2, // 2: google.ai.generativelanguage.v1beta.GenerateMessageResponse.messages:type_name -> google.ai.generativelanguage.v1beta.Message |
| 7, // 3: google.ai.generativelanguage.v1beta.GenerateMessageResponse.filters:type_name -> google.ai.generativelanguage.v1beta.ContentFilter |
| 8, // 4: google.ai.generativelanguage.v1beta.Message.citation_metadata:type_name -> google.ai.generativelanguage.v1beta.CitationMetadata |
| 4, // 5: google.ai.generativelanguage.v1beta.MessagePrompt.examples:type_name -> google.ai.generativelanguage.v1beta.Example |
| 2, // 6: google.ai.generativelanguage.v1beta.MessagePrompt.messages:type_name -> google.ai.generativelanguage.v1beta.Message |
| 2, // 7: google.ai.generativelanguage.v1beta.Example.input:type_name -> google.ai.generativelanguage.v1beta.Message |
| 2, // 8: google.ai.generativelanguage.v1beta.Example.output:type_name -> google.ai.generativelanguage.v1beta.Message |
| 3, // 9: google.ai.generativelanguage.v1beta.CountMessageTokensRequest.prompt:type_name -> google.ai.generativelanguage.v1beta.MessagePrompt |
| 0, // 10: google.ai.generativelanguage.v1beta.DiscussService.GenerateMessage:input_type -> google.ai.generativelanguage.v1beta.GenerateMessageRequest |
| 5, // 11: google.ai.generativelanguage.v1beta.DiscussService.CountMessageTokens:input_type -> google.ai.generativelanguage.v1beta.CountMessageTokensRequest |
| 1, // 12: google.ai.generativelanguage.v1beta.DiscussService.GenerateMessage:output_type -> google.ai.generativelanguage.v1beta.GenerateMessageResponse |
| 6, // 13: google.ai.generativelanguage.v1beta.DiscussService.CountMessageTokens:output_type -> google.ai.generativelanguage.v1beta.CountMessageTokensResponse |
| 12, // [12:14] is the sub-list for method output_type |
| 10, // [10:12] is the sub-list for method input_type |
| 10, // [10:10] is the sub-list for extension type_name |
| 10, // [10:10] is the sub-list for extension extendee |
| 0, // [0:10] is the sub-list for field type_name |
| } |
| |
| func init() { file_google_ai_generativelanguage_v1beta_discuss_service_proto_init() } |
| func file_google_ai_generativelanguage_v1beta_discuss_service_proto_init() { |
| if File_google_ai_generativelanguage_v1beta_discuss_service_proto != nil { |
| return |
| } |
| file_google_ai_generativelanguage_v1beta_citation_proto_init() |
| file_google_ai_generativelanguage_v1beta_safety_proto_init() |
| if !protoimpl.UnsafeEnabled { |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*GenerateMessageRequest); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*GenerateMessageResponse); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*Message); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*MessagePrompt); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*Example); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*CountMessageTokensRequest); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { |
| switch v := v.(*CountMessageTokensResponse); i { |
| case 0: |
| return &v.state |
| case 1: |
| return &v.sizeCache |
| case 2: |
| return &v.unknownFields |
| default: |
| return nil |
| } |
| } |
| } |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[0].OneofWrappers = []interface{}{} |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes[2].OneofWrappers = []interface{}{} |
| type x struct{} |
| out := protoimpl.TypeBuilder{ |
| File: protoimpl.DescBuilder{ |
| GoPackagePath: reflect.TypeOf(x{}).PkgPath(), |
| RawDescriptor: file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDesc, |
| NumEnums: 0, |
| NumMessages: 7, |
| NumExtensions: 0, |
| NumServices: 1, |
| }, |
| GoTypes: file_google_ai_generativelanguage_v1beta_discuss_service_proto_goTypes, |
| DependencyIndexes: file_google_ai_generativelanguage_v1beta_discuss_service_proto_depIdxs, |
| MessageInfos: file_google_ai_generativelanguage_v1beta_discuss_service_proto_msgTypes, |
| }.Build() |
| File_google_ai_generativelanguage_v1beta_discuss_service_proto = out.File |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_rawDesc = nil |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_goTypes = nil |
| file_google_ai_generativelanguage_v1beta_discuss_service_proto_depIdxs = nil |
| } |
| |
| // Reference imports to suppress errors if they are not otherwise used. |
| var _ context.Context |
| var _ grpc.ClientConnInterface |
| |
| // This is a compile-time assertion to ensure that this generated file |
| // is compatible with the grpc package it is being compiled against. |
| const _ = grpc.SupportPackageIsVersion6 |
| |
| // DiscussServiceClient is the client API for DiscussService service. |
| // |
| // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. |
| type DiscussServiceClient interface { |
| // Generates a response from the model given an input `MessagePrompt`. |
| GenerateMessage(ctx context.Context, in *GenerateMessageRequest, opts ...grpc.CallOption) (*GenerateMessageResponse, error) |
| // Runs a model's tokenizer on a string and returns the token count. |
| CountMessageTokens(ctx context.Context, in *CountMessageTokensRequest, opts ...grpc.CallOption) (*CountMessageTokensResponse, error) |
| } |
| |
| type discussServiceClient struct { |
| cc grpc.ClientConnInterface |
| } |
| |
| func NewDiscussServiceClient(cc grpc.ClientConnInterface) DiscussServiceClient { |
| return &discussServiceClient{cc} |
| } |
| |
| func (c *discussServiceClient) GenerateMessage(ctx context.Context, in *GenerateMessageRequest, opts ...grpc.CallOption) (*GenerateMessageResponse, error) { |
| out := new(GenerateMessageResponse) |
| err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1beta.DiscussService/GenerateMessage", in, out, opts...) |
| if err != nil { |
| return nil, err |
| } |
| return out, nil |
| } |
| |
| func (c *discussServiceClient) CountMessageTokens(ctx context.Context, in *CountMessageTokensRequest, opts ...grpc.CallOption) (*CountMessageTokensResponse, error) { |
| out := new(CountMessageTokensResponse) |
| err := c.cc.Invoke(ctx, "/google.ai.generativelanguage.v1beta.DiscussService/CountMessageTokens", in, out, opts...) |
| if err != nil { |
| return nil, err |
| } |
| return out, nil |
| } |
| |
| // DiscussServiceServer is the server API for DiscussService service. |
| type DiscussServiceServer interface { |
| // Generates a response from the model given an input `MessagePrompt`. |
| GenerateMessage(context.Context, *GenerateMessageRequest) (*GenerateMessageResponse, error) |
| // Runs a model's tokenizer on a string and returns the token count. |
| CountMessageTokens(context.Context, *CountMessageTokensRequest) (*CountMessageTokensResponse, error) |
| } |
| |
| // UnimplementedDiscussServiceServer can be embedded to have forward compatible implementations. |
| type UnimplementedDiscussServiceServer struct { |
| } |
| |
| func (*UnimplementedDiscussServiceServer) GenerateMessage(context.Context, *GenerateMessageRequest) (*GenerateMessageResponse, error) { |
| return nil, status.Errorf(codes.Unimplemented, "method GenerateMessage not implemented") |
| } |
| func (*UnimplementedDiscussServiceServer) CountMessageTokens(context.Context, *CountMessageTokensRequest) (*CountMessageTokensResponse, error) { |
| return nil, status.Errorf(codes.Unimplemented, "method CountMessageTokens not implemented") |
| } |
| |
| func RegisterDiscussServiceServer(s *grpc.Server, srv DiscussServiceServer) { |
| s.RegisterService(&_DiscussService_serviceDesc, srv) |
| } |
| |
| func _DiscussService_GenerateMessage_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { |
| in := new(GenerateMessageRequest) |
| if err := dec(in); err != nil { |
| return nil, err |
| } |
| if interceptor == nil { |
| return srv.(DiscussServiceServer).GenerateMessage(ctx, in) |
| } |
| info := &grpc.UnaryServerInfo{ |
| Server: srv, |
| FullMethod: "/google.ai.generativelanguage.v1beta.DiscussService/GenerateMessage", |
| } |
| handler := func(ctx context.Context, req interface{}) (interface{}, error) { |
| return srv.(DiscussServiceServer).GenerateMessage(ctx, req.(*GenerateMessageRequest)) |
| } |
| return interceptor(ctx, in, info, handler) |
| } |
| |
| func _DiscussService_CountMessageTokens_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { |
| in := new(CountMessageTokensRequest) |
| if err := dec(in); err != nil { |
| return nil, err |
| } |
| if interceptor == nil { |
| return srv.(DiscussServiceServer).CountMessageTokens(ctx, in) |
| } |
| info := &grpc.UnaryServerInfo{ |
| Server: srv, |
| FullMethod: "/google.ai.generativelanguage.v1beta.DiscussService/CountMessageTokens", |
| } |
| handler := func(ctx context.Context, req interface{}) (interface{}, error) { |
| return srv.(DiscussServiceServer).CountMessageTokens(ctx, req.(*CountMessageTokensRequest)) |
| } |
| return interceptor(ctx, in, info, handler) |
| } |
| |
| var _DiscussService_serviceDesc = grpc.ServiceDesc{ |
| ServiceName: "google.ai.generativelanguage.v1beta.DiscussService", |
| HandlerType: (*DiscussServiceServer)(nil), |
| Methods: []grpc.MethodDesc{ |
| { |
| MethodName: "GenerateMessage", |
| Handler: _DiscussService_GenerateMessage_Handler, |
| }, |
| { |
| MethodName: "CountMessageTokens", |
| Handler: _DiscussService_CountMessageTokens_Handler, |
| }, |
| }, |
| Streams: []grpc.StreamDesc{}, |
| Metadata: "google/ai/generativelanguage/v1beta/discuss_service.proto", |
| } |