blob: 96d84675c4ac1dca3f336e761dedfac3d70e609d [file] [log] [blame]
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google/cloud/dialogflow/v2/session.proto
package dialogflow
import (
context "context"
fmt "fmt"
math "math"
proto "github.com/golang/protobuf/proto"
duration "github.com/golang/protobuf/ptypes/duration"
_struct "github.com/golang/protobuf/ptypes/struct"
_ "google.golang.org/genproto/googleapis/api/annotations"
status "google.golang.org/genproto/googleapis/rpc/status"
latlng "google.golang.org/genproto/googleapis/type/latlng"
field_mask "google.golang.org/genproto/protobuf/field_mask"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status1 "google.golang.org/grpc/status"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Type of the response message.
type StreamingRecognitionResult_MessageType int32
const (
// Not specified. Should never be used.
StreamingRecognitionResult_MESSAGE_TYPE_UNSPECIFIED StreamingRecognitionResult_MessageType = 0
// Message contains a (possibly partial) transcript.
StreamingRecognitionResult_TRANSCRIPT StreamingRecognitionResult_MessageType = 1
// Event indicates that the server has detected the end of the user's speech
// utterance and expects no additional inputs.
// Therefore, the server will not process additional audio (although it may subsequently return additional results). The
// client should stop sending additional audio data, half-close the gRPC
// connection, and wait for any additional results until the server closes
// the gRPC connection. This message is only sent if `single_utterance` was
// set to `true`, and is not used otherwise.
StreamingRecognitionResult_END_OF_SINGLE_UTTERANCE StreamingRecognitionResult_MessageType = 2
)
var StreamingRecognitionResult_MessageType_name = map[int32]string{
0: "MESSAGE_TYPE_UNSPECIFIED",
1: "TRANSCRIPT",
2: "END_OF_SINGLE_UTTERANCE",
}
var StreamingRecognitionResult_MessageType_value = map[string]int32{
"MESSAGE_TYPE_UNSPECIFIED": 0,
"TRANSCRIPT": 1,
"END_OF_SINGLE_UTTERANCE": 2,
}
func (x StreamingRecognitionResult_MessageType) String() string {
return proto.EnumName(StreamingRecognitionResult_MessageType_name, int32(x))
}
func (StreamingRecognitionResult_MessageType) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{7, 0}
}
// Requests and responses for custom methods.
// The request to detect user's intent.
type DetectIntentRequest struct {
// Required. The name of the session this query is sent to. Format:
// `projects/<Project ID>/agent/sessions/<Session ID>`, or
// `projects/<Project ID>/agent/environments/<Environment ID>/users/<User
// ID>/sessions/<Session ID>`. If `Environment ID` is not specified, we assume
// default 'draft' environment. If `User ID` is not specified, we are using
// "-". It's up to the API caller to choose an appropriate `Session ID` and
// `User Id`. They can be a random number or some type of user and session
// identifiers (preferably hashed). The length of the `Session ID` and
// `User ID` must not exceed 36 characters.
Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"`
// The parameters of this query.
QueryParams *QueryParameters `protobuf:"bytes,2,opt,name=query_params,json=queryParams,proto3" json:"query_params,omitempty"`
// Required. The input specification. It can be set to:
//
// 1. an audio config
// which instructs the speech recognizer how to process the speech audio,
//
// 2. a conversational query in the form of text, or
//
// 3. an event that specifies which intent to trigger.
QueryInput *QueryInput `protobuf:"bytes,3,opt,name=query_input,json=queryInput,proto3" json:"query_input,omitempty"`
// Instructs the speech synthesizer how to generate the output
// audio. If this field is not set and agent-level speech synthesizer is not
// configured, no output audio is generated.
OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,4,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"`
// Mask for [output_audio_config][google.cloud.dialogflow.v2.DetectIntentRequest.output_audio_config] indicating which settings in this
// request-level config should override speech synthesizer settings defined at
// agent-level.
//
// If unspecified or empty, [output_audio_config][google.cloud.dialogflow.v2.DetectIntentRequest.output_audio_config] replaces the agent-level
// config in its entirety.
OutputAudioConfigMask *field_mask.FieldMask `protobuf:"bytes,7,opt,name=output_audio_config_mask,json=outputAudioConfigMask,proto3" json:"output_audio_config_mask,omitempty"`
// The natural language speech audio to be processed. This field
// should be populated iff `query_input` is set to an input audio config.
// A single request can contain up to 1 minute of speech audio data.
InputAudio []byte `protobuf:"bytes,5,opt,name=input_audio,json=inputAudio,proto3" json:"input_audio,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *DetectIntentRequest) Reset() { *m = DetectIntentRequest{} }
func (m *DetectIntentRequest) String() string { return proto.CompactTextString(m) }
func (*DetectIntentRequest) ProtoMessage() {}
func (*DetectIntentRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{0}
}
func (m *DetectIntentRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_DetectIntentRequest.Unmarshal(m, b)
}
func (m *DetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_DetectIntentRequest.Marshal(b, m, deterministic)
}
func (m *DetectIntentRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_DetectIntentRequest.Merge(m, src)
}
func (m *DetectIntentRequest) XXX_Size() int {
return xxx_messageInfo_DetectIntentRequest.Size(m)
}
func (m *DetectIntentRequest) XXX_DiscardUnknown() {
xxx_messageInfo_DetectIntentRequest.DiscardUnknown(m)
}
var xxx_messageInfo_DetectIntentRequest proto.InternalMessageInfo
func (m *DetectIntentRequest) GetSession() string {
if m != nil {
return m.Session
}
return ""
}
func (m *DetectIntentRequest) GetQueryParams() *QueryParameters {
if m != nil {
return m.QueryParams
}
return nil
}
func (m *DetectIntentRequest) GetQueryInput() *QueryInput {
if m != nil {
return m.QueryInput
}
return nil
}
func (m *DetectIntentRequest) GetOutputAudioConfig() *OutputAudioConfig {
if m != nil {
return m.OutputAudioConfig
}
return nil
}
func (m *DetectIntentRequest) GetOutputAudioConfigMask() *field_mask.FieldMask {
if m != nil {
return m.OutputAudioConfigMask
}
return nil
}
func (m *DetectIntentRequest) GetInputAudio() []byte {
if m != nil {
return m.InputAudio
}
return nil
}
// The message returned from the DetectIntent method.
type DetectIntentResponse struct {
// The unique identifier of the response. It can be used to
// locate a response in the training example set or for reporting issues.
ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"`
// The selected results of the conversational query or event processing.
// See `alternative_query_results` for additional potential results.
QueryResult *QueryResult `protobuf:"bytes,2,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"`
// Specifies the status of the webhook request.
WebhookStatus *status.Status `protobuf:"bytes,3,opt,name=webhook_status,json=webhookStatus,proto3" json:"webhook_status,omitempty"`
// The audio data bytes encoded as specified in the request.
// Note: The output audio is generated based on the values of default platform
// text responses found in the `query_result.fulfillment_messages` field. If
// multiple default text responses exist, they will be concatenated when
// generating audio. If no default platform text responses exist, the
// generated audio content will be empty.
OutputAudio []byte `protobuf:"bytes,4,opt,name=output_audio,json=outputAudio,proto3" json:"output_audio,omitempty"`
// The config used by the speech synthesizer to generate the output audio.
OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,6,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *DetectIntentResponse) Reset() { *m = DetectIntentResponse{} }
func (m *DetectIntentResponse) String() string { return proto.CompactTextString(m) }
func (*DetectIntentResponse) ProtoMessage() {}
func (*DetectIntentResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{1}
}
func (m *DetectIntentResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_DetectIntentResponse.Unmarshal(m, b)
}
func (m *DetectIntentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_DetectIntentResponse.Marshal(b, m, deterministic)
}
func (m *DetectIntentResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_DetectIntentResponse.Merge(m, src)
}
func (m *DetectIntentResponse) XXX_Size() int {
return xxx_messageInfo_DetectIntentResponse.Size(m)
}
func (m *DetectIntentResponse) XXX_DiscardUnknown() {
xxx_messageInfo_DetectIntentResponse.DiscardUnknown(m)
}
var xxx_messageInfo_DetectIntentResponse proto.InternalMessageInfo
func (m *DetectIntentResponse) GetResponseId() string {
if m != nil {
return m.ResponseId
}
return ""
}
func (m *DetectIntentResponse) GetQueryResult() *QueryResult {
if m != nil {
return m.QueryResult
}
return nil
}
func (m *DetectIntentResponse) GetWebhookStatus() *status.Status {
if m != nil {
return m.WebhookStatus
}
return nil
}
func (m *DetectIntentResponse) GetOutputAudio() []byte {
if m != nil {
return m.OutputAudio
}
return nil
}
func (m *DetectIntentResponse) GetOutputAudioConfig() *OutputAudioConfig {
if m != nil {
return m.OutputAudioConfig
}
return nil
}
// Represents the parameters of the conversational query.
type QueryParameters struct {
// The time zone of this conversational query from the
// [time zone database](https://www.iana.org/time-zones), e.g.,
// America/New_York, Europe/Paris. If not provided, the time zone specified in
// agent settings is used.
TimeZone string `protobuf:"bytes,1,opt,name=time_zone,json=timeZone,proto3" json:"time_zone,omitempty"`
// The geo location of this conversational query.
GeoLocation *latlng.LatLng `protobuf:"bytes,2,opt,name=geo_location,json=geoLocation,proto3" json:"geo_location,omitempty"`
// The collection of contexts to be activated before this query is
// executed.
Contexts []*Context `protobuf:"bytes,3,rep,name=contexts,proto3" json:"contexts,omitempty"`
// Specifies whether to delete all contexts in the current session
// before the new ones are activated.
ResetContexts bool `protobuf:"varint,4,opt,name=reset_contexts,json=resetContexts,proto3" json:"reset_contexts,omitempty"`
// Additional session entity types to replace or extend developer
// entity types with. The entity synonyms apply to all languages and persist
// for the session of this query.
SessionEntityTypes []*SessionEntityType `protobuf:"bytes,5,rep,name=session_entity_types,json=sessionEntityTypes,proto3" json:"session_entity_types,omitempty"`
// This field can be used to pass custom data to your webhook.
// Arbitrary JSON objects are supported.
// If supplied, the value is used to populate the
// `WebhookRequest.original_detect_intent_request.payload`
// field sent to your webhook.
Payload *_struct.Struct `protobuf:"bytes,6,opt,name=payload,proto3" json:"payload,omitempty"`
// Configures the type of sentiment analysis to perform. If not
// provided, sentiment analysis is not performed.
SentimentAnalysisRequestConfig *SentimentAnalysisRequestConfig `protobuf:"bytes,10,opt,name=sentiment_analysis_request_config,json=sentimentAnalysisRequestConfig,proto3" json:"sentiment_analysis_request_config,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *QueryParameters) Reset() { *m = QueryParameters{} }
func (m *QueryParameters) String() string { return proto.CompactTextString(m) }
func (*QueryParameters) ProtoMessage() {}
func (*QueryParameters) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{2}
}
func (m *QueryParameters) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_QueryParameters.Unmarshal(m, b)
}
func (m *QueryParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_QueryParameters.Marshal(b, m, deterministic)
}
func (m *QueryParameters) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryParameters.Merge(m, src)
}
func (m *QueryParameters) XXX_Size() int {
return xxx_messageInfo_QueryParameters.Size(m)
}
func (m *QueryParameters) XXX_DiscardUnknown() {
xxx_messageInfo_QueryParameters.DiscardUnknown(m)
}
var xxx_messageInfo_QueryParameters proto.InternalMessageInfo
func (m *QueryParameters) GetTimeZone() string {
if m != nil {
return m.TimeZone
}
return ""
}
func (m *QueryParameters) GetGeoLocation() *latlng.LatLng {
if m != nil {
return m.GeoLocation
}
return nil
}
func (m *QueryParameters) GetContexts() []*Context {
if m != nil {
return m.Contexts
}
return nil
}
func (m *QueryParameters) GetResetContexts() bool {
if m != nil {
return m.ResetContexts
}
return false
}
func (m *QueryParameters) GetSessionEntityTypes() []*SessionEntityType {
if m != nil {
return m.SessionEntityTypes
}
return nil
}
func (m *QueryParameters) GetPayload() *_struct.Struct {
if m != nil {
return m.Payload
}
return nil
}
func (m *QueryParameters) GetSentimentAnalysisRequestConfig() *SentimentAnalysisRequestConfig {
if m != nil {
return m.SentimentAnalysisRequestConfig
}
return nil
}
// Represents the query input. It can contain either:
//
// 1. An audio config which
// instructs the speech recognizer how to process the speech audio.
//
// 2. A conversational query in the form of text,.
//
// 3. An event that specifies which intent to trigger.
type QueryInput struct {
// Required. The input specification.
//
// Types that are valid to be assigned to Input:
// *QueryInput_AudioConfig
// *QueryInput_Text
// *QueryInput_Event
Input isQueryInput_Input `protobuf_oneof:"input"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *QueryInput) Reset() { *m = QueryInput{} }
func (m *QueryInput) String() string { return proto.CompactTextString(m) }
func (*QueryInput) ProtoMessage() {}
func (*QueryInput) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{3}
}
func (m *QueryInput) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_QueryInput.Unmarshal(m, b)
}
func (m *QueryInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_QueryInput.Marshal(b, m, deterministic)
}
func (m *QueryInput) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryInput.Merge(m, src)
}
func (m *QueryInput) XXX_Size() int {
return xxx_messageInfo_QueryInput.Size(m)
}
func (m *QueryInput) XXX_DiscardUnknown() {
xxx_messageInfo_QueryInput.DiscardUnknown(m)
}
var xxx_messageInfo_QueryInput proto.InternalMessageInfo
type isQueryInput_Input interface {
isQueryInput_Input()
}
type QueryInput_AudioConfig struct {
AudioConfig *InputAudioConfig `protobuf:"bytes,1,opt,name=audio_config,json=audioConfig,proto3,oneof"`
}
type QueryInput_Text struct {
Text *TextInput `protobuf:"bytes,2,opt,name=text,proto3,oneof"`
}
type QueryInput_Event struct {
Event *EventInput `protobuf:"bytes,3,opt,name=event,proto3,oneof"`
}
func (*QueryInput_AudioConfig) isQueryInput_Input() {}
func (*QueryInput_Text) isQueryInput_Input() {}
func (*QueryInput_Event) isQueryInput_Input() {}
func (m *QueryInput) GetInput() isQueryInput_Input {
if m != nil {
return m.Input
}
return nil
}
func (m *QueryInput) GetAudioConfig() *InputAudioConfig {
if x, ok := m.GetInput().(*QueryInput_AudioConfig); ok {
return x.AudioConfig
}
return nil
}
func (m *QueryInput) GetText() *TextInput {
if x, ok := m.GetInput().(*QueryInput_Text); ok {
return x.Text
}
return nil
}
func (m *QueryInput) GetEvent() *EventInput {
if x, ok := m.GetInput().(*QueryInput_Event); ok {
return x.Event
}
return nil
}
// XXX_OneofWrappers is for the internal use of the proto package.
func (*QueryInput) XXX_OneofWrappers() []interface{} {
return []interface{}{
(*QueryInput_AudioConfig)(nil),
(*QueryInput_Text)(nil),
(*QueryInput_Event)(nil),
}
}
// Represents the result of conversational query or event processing.
type QueryResult struct {
// The original conversational query text:
//
// - If natural language text was provided as input, `query_text` contains
// a copy of the input.
// - If natural language speech audio was provided as input, `query_text`
// contains the speech recognition result. If speech recognizer produced
// multiple alternatives, a particular one is picked.
// - If automatic spell correction is enabled, `query_text` will contain the
// corrected user input.
QueryText string `protobuf:"bytes,1,opt,name=query_text,json=queryText,proto3" json:"query_text,omitempty"`
// The language that was triggered during intent detection.
// See [Language
// Support](https://cloud.google.com/dialogflow/docs/reference/language)
// for a list of the currently supported language codes.
LanguageCode string `protobuf:"bytes,15,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"`
// The Speech recognition confidence between 0.0 and 1.0. A higher number
// indicates an estimated greater likelihood that the recognized words are
// correct. The default of 0.0 is a sentinel value indicating that confidence
// was not set.
//
// This field is not guaranteed to be accurate or set. In particular this
// field isn't set for StreamingDetectIntent since the streaming endpoint has
// separate confidence estimates per portion of the audio in
// StreamingRecognitionResult.
SpeechRecognitionConfidence float32 `protobuf:"fixed32,2,opt,name=speech_recognition_confidence,json=speechRecognitionConfidence,proto3" json:"speech_recognition_confidence,omitempty"`
// The action name from the matched intent.
Action string `protobuf:"bytes,3,opt,name=action,proto3" json:"action,omitempty"`
// The collection of extracted parameters.
Parameters *_struct.Struct `protobuf:"bytes,4,opt,name=parameters,proto3" json:"parameters,omitempty"`
// This field is set to:
//
// - `false` if the matched intent has required parameters and not all of
// the required parameter values have been collected.
// - `true` if all required parameter values have been collected, or if the
// matched intent doesn't contain any required parameters.
AllRequiredParamsPresent bool `protobuf:"varint,5,opt,name=all_required_params_present,json=allRequiredParamsPresent,proto3" json:"all_required_params_present,omitempty"`
// The text to be pronounced to the user or shown on the screen.
// Note: This is a legacy field, `fulfillment_messages` should be preferred.
FulfillmentText string `protobuf:"bytes,6,opt,name=fulfillment_text,json=fulfillmentText,proto3" json:"fulfillment_text,omitempty"`
// The collection of rich messages to present to the user.
FulfillmentMessages []*Intent_Message `protobuf:"bytes,7,rep,name=fulfillment_messages,json=fulfillmentMessages,proto3" json:"fulfillment_messages,omitempty"`
// If the query was fulfilled by a webhook call, this field is set to the
// value of the `source` field returned in the webhook response.
WebhookSource string `protobuf:"bytes,8,opt,name=webhook_source,json=webhookSource,proto3" json:"webhook_source,omitempty"`
// If the query was fulfilled by a webhook call, this field is set to the
// value of the `payload` field returned in the webhook response.
WebhookPayload *_struct.Struct `protobuf:"bytes,9,opt,name=webhook_payload,json=webhookPayload,proto3" json:"webhook_payload,omitempty"`
// The collection of output contexts. If applicable,
// `output_contexts.parameters` contains entries with name
// `<parameter name>.original` containing the original parameter values
// before the query.
OutputContexts []*Context `protobuf:"bytes,10,rep,name=output_contexts,json=outputContexts,proto3" json:"output_contexts,omitempty"`
// The intent that matched the conversational query. Some, not
// all fields are filled in this message, including but not limited to:
// `name`, `display_name`, `end_interaction` and `is_fallback`.
Intent *Intent `protobuf:"bytes,11,opt,name=intent,proto3" json:"intent,omitempty"`
// The intent detection confidence. Values range from 0.0
// (completely uncertain) to 1.0 (completely certain).
// This value is for informational purpose only and is only used to
// help match the best intent within the classification threshold.
// This value may change for the same end-user expression at any time due to a
// model retraining or change in implementation.
// If there are `multiple knowledge_answers` messages, this value is set to
// the greatest `knowledgeAnswers.match_confidence` value in the list.
IntentDetectionConfidence float32 `protobuf:"fixed32,12,opt,name=intent_detection_confidence,json=intentDetectionConfidence,proto3" json:"intent_detection_confidence,omitempty"`
// Free-form diagnostic information for the associated detect intent request.
// The fields of this data can change without notice, so you should not write
// code that depends on its structure.
// The data may contain:
//
// - webhook call latency
// - webhook errors
DiagnosticInfo *_struct.Struct `protobuf:"bytes,14,opt,name=diagnostic_info,json=diagnosticInfo,proto3" json:"diagnostic_info,omitempty"`
// The sentiment analysis result, which depends on the
// `sentiment_analysis_request_config` specified in the request.
SentimentAnalysisResult *SentimentAnalysisResult `protobuf:"bytes,17,opt,name=sentiment_analysis_result,json=sentimentAnalysisResult,proto3" json:"sentiment_analysis_result,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *QueryResult) Reset() { *m = QueryResult{} }
func (m *QueryResult) String() string { return proto.CompactTextString(m) }
func (*QueryResult) ProtoMessage() {}
func (*QueryResult) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{4}
}
func (m *QueryResult) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_QueryResult.Unmarshal(m, b)
}
func (m *QueryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_QueryResult.Marshal(b, m, deterministic)
}
func (m *QueryResult) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryResult.Merge(m, src)
}
func (m *QueryResult) XXX_Size() int {
return xxx_messageInfo_QueryResult.Size(m)
}
func (m *QueryResult) XXX_DiscardUnknown() {
xxx_messageInfo_QueryResult.DiscardUnknown(m)
}
var xxx_messageInfo_QueryResult proto.InternalMessageInfo
func (m *QueryResult) GetQueryText() string {
if m != nil {
return m.QueryText
}
return ""
}
func (m *QueryResult) GetLanguageCode() string {
if m != nil {
return m.LanguageCode
}
return ""
}
func (m *QueryResult) GetSpeechRecognitionConfidence() float32 {
if m != nil {
return m.SpeechRecognitionConfidence
}
return 0
}
func (m *QueryResult) GetAction() string {
if m != nil {
return m.Action
}
return ""
}
func (m *QueryResult) GetParameters() *_struct.Struct {
if m != nil {
return m.Parameters
}
return nil
}
func (m *QueryResult) GetAllRequiredParamsPresent() bool {
if m != nil {
return m.AllRequiredParamsPresent
}
return false
}
func (m *QueryResult) GetFulfillmentText() string {
if m != nil {
return m.FulfillmentText
}
return ""
}
func (m *QueryResult) GetFulfillmentMessages() []*Intent_Message {
if m != nil {
return m.FulfillmentMessages
}
return nil
}
func (m *QueryResult) GetWebhookSource() string {
if m != nil {
return m.WebhookSource
}
return ""
}
func (m *QueryResult) GetWebhookPayload() *_struct.Struct {
if m != nil {
return m.WebhookPayload
}
return nil
}
func (m *QueryResult) GetOutputContexts() []*Context {
if m != nil {
return m.OutputContexts
}
return nil
}
func (m *QueryResult) GetIntent() *Intent {
if m != nil {
return m.Intent
}
return nil
}
func (m *QueryResult) GetIntentDetectionConfidence() float32 {
if m != nil {
return m.IntentDetectionConfidence
}
return 0
}
func (m *QueryResult) GetDiagnosticInfo() *_struct.Struct {
if m != nil {
return m.DiagnosticInfo
}
return nil
}
func (m *QueryResult) GetSentimentAnalysisResult() *SentimentAnalysisResult {
if m != nil {
return m.SentimentAnalysisResult
}
return nil
}
// The top-level message sent by the client to the
// [Sessions.StreamingDetectIntent][google.cloud.dialogflow.v2.Sessions.StreamingDetectIntent] method.
//
// Multiple request messages should be sent in order:
//
// 1. The first message must contain
// [session][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.session],
// [query_input][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.query_input] plus optionally
// [query_params][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.query_params]. If the client
// wants to receive an audio response, it should also contain
// [output_audio_config][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.output_audio_config].
// The message must not contain
// [input_audio][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.input_audio].
// 2. If [query_input][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.query_input] was set to
// [query_input.audio_config][google.cloud.dialogflow.v2.InputAudioConfig], all subsequent
// messages must contain
// [input_audio][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.input_audio] to continue with
// Speech recognition.
// If you decide to rather detect an intent from text input after you
// already started Speech recognition, please send a message with
// [query_input.text][google.cloud.dialogflow.v2.QueryInput.text].
//
// However, note that:
//
// * Dialogflow will bill you for the audio duration so far.
// * Dialogflow discards all Speech recognition results in favor of the
// input text.
// * Dialogflow will use the language code from the first message.
//
// After you sent all input, you must half-close or abort the request stream.
type StreamingDetectIntentRequest struct {
// Required. The name of the session the query is sent to.
// Format of the session name:
// `projects/<Project ID>/agent/sessions/<Session ID>`, or
// `projects/<Project ID>/agent/environments/<Environment ID>/users/<User
// ID>/sessions/<Session ID>`. If `Environment ID` is not specified, we assume
// default 'draft' environment. If `User ID` is not specified, we are using
// "-". It's up to the API caller to choose an appropriate `Session ID` and
// `User Id`. They can be a random number or some type of user and session
// identifiers (preferably hashed). The length of the `Session ID` and
// `User ID` must not exceed 36 characters.
Session string `protobuf:"bytes,1,opt,name=session,proto3" json:"session,omitempty"`
// The parameters of this query.
QueryParams *QueryParameters `protobuf:"bytes,2,opt,name=query_params,json=queryParams,proto3" json:"query_params,omitempty"`
// Required. The input specification. It can be set to:
//
// 1. an audio config which instructs the speech recognizer how to process
// the speech audio,
//
// 2. a conversational query in the form of text, or
//
// 3. an event that specifies which intent to trigger.
QueryInput *QueryInput `protobuf:"bytes,3,opt,name=query_input,json=queryInput,proto3" json:"query_input,omitempty"`
// Please use [InputAudioConfig.single_utterance][google.cloud.dialogflow.v2.InputAudioConfig.single_utterance] instead.
// If `false` (default), recognition does not cease until
// the client closes the stream. If `true`, the recognizer will detect a
// single spoken utterance in input audio. Recognition ceases when it detects
// the audio's voice has stopped or paused. In this case, once a detected
// intent is received, the client should close the stream and start a new
// request with a new stream as needed.
// This setting is ignored when `query_input` is a piece of text or an event.
SingleUtterance bool `protobuf:"varint,4,opt,name=single_utterance,json=singleUtterance,proto3" json:"single_utterance,omitempty"` // Deprecated: Do not use.
// Instructs the speech synthesizer how to generate the output
// audio. If this field is not set and agent-level speech synthesizer is not
// configured, no output audio is generated.
OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,5,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"`
// Mask for [output_audio_config][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.output_audio_config] indicating which settings in this
// request-level config should override speech synthesizer settings defined at
// agent-level.
//
// If unspecified or empty, [output_audio_config][google.cloud.dialogflow.v2.StreamingDetectIntentRequest.output_audio_config] replaces the agent-level
// config in its entirety.
OutputAudioConfigMask *field_mask.FieldMask `protobuf:"bytes,7,opt,name=output_audio_config_mask,json=outputAudioConfigMask,proto3" json:"output_audio_config_mask,omitempty"`
// The input audio content to be recognized. Must be sent if
// `query_input` was set to a streaming input audio config. The complete audio
// over all streaming messages must not exceed 1 minute.
InputAudio []byte `protobuf:"bytes,6,opt,name=input_audio,json=inputAudio,proto3" json:"input_audio,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *StreamingDetectIntentRequest) Reset() { *m = StreamingDetectIntentRequest{} }
func (m *StreamingDetectIntentRequest) String() string { return proto.CompactTextString(m) }
func (*StreamingDetectIntentRequest) ProtoMessage() {}
func (*StreamingDetectIntentRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{5}
}
func (m *StreamingDetectIntentRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_StreamingDetectIntentRequest.Unmarshal(m, b)
}
func (m *StreamingDetectIntentRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_StreamingDetectIntentRequest.Marshal(b, m, deterministic)
}
func (m *StreamingDetectIntentRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_StreamingDetectIntentRequest.Merge(m, src)
}
func (m *StreamingDetectIntentRequest) XXX_Size() int {
return xxx_messageInfo_StreamingDetectIntentRequest.Size(m)
}
func (m *StreamingDetectIntentRequest) XXX_DiscardUnknown() {
xxx_messageInfo_StreamingDetectIntentRequest.DiscardUnknown(m)
}
var xxx_messageInfo_StreamingDetectIntentRequest proto.InternalMessageInfo
func (m *StreamingDetectIntentRequest) GetSession() string {
if m != nil {
return m.Session
}
return ""
}
func (m *StreamingDetectIntentRequest) GetQueryParams() *QueryParameters {
if m != nil {
return m.QueryParams
}
return nil
}
func (m *StreamingDetectIntentRequest) GetQueryInput() *QueryInput {
if m != nil {
return m.QueryInput
}
return nil
}
// Deprecated: Do not use.
func (m *StreamingDetectIntentRequest) GetSingleUtterance() bool {
if m != nil {
return m.SingleUtterance
}
return false
}
func (m *StreamingDetectIntentRequest) GetOutputAudioConfig() *OutputAudioConfig {
if m != nil {
return m.OutputAudioConfig
}
return nil
}
func (m *StreamingDetectIntentRequest) GetOutputAudioConfigMask() *field_mask.FieldMask {
if m != nil {
return m.OutputAudioConfigMask
}
return nil
}
func (m *StreamingDetectIntentRequest) GetInputAudio() []byte {
if m != nil {
return m.InputAudio
}
return nil
}
// The top-level message returned from the
// `StreamingDetectIntent` method.
//
// Multiple response messages can be returned in order:
//
// 1. If the input was set to streaming audio, the first one or more messages
// contain `recognition_result`. Each `recognition_result` represents a more
// complete transcript of what the user said. The last `recognition_result`
// has `is_final` set to `true`.
//
// 2. The next message contains `response_id`, `query_result`
// and optionally `webhook_status` if a WebHook was called.
type StreamingDetectIntentResponse struct {
// The unique identifier of the response. It can be used to
// locate a response in the training example set or for reporting issues.
ResponseId string `protobuf:"bytes,1,opt,name=response_id,json=responseId,proto3" json:"response_id,omitempty"`
// The result of speech recognition.
RecognitionResult *StreamingRecognitionResult `protobuf:"bytes,2,opt,name=recognition_result,json=recognitionResult,proto3" json:"recognition_result,omitempty"`
// The result of the conversational query or event processing.
QueryResult *QueryResult `protobuf:"bytes,3,opt,name=query_result,json=queryResult,proto3" json:"query_result,omitempty"`
// Specifies the status of the webhook request.
WebhookStatus *status.Status `protobuf:"bytes,4,opt,name=webhook_status,json=webhookStatus,proto3" json:"webhook_status,omitempty"`
// The audio data bytes encoded as specified in the request.
// Note: The output audio is generated based on the values of default platform
// text responses found in the `query_result.fulfillment_messages` field. If
// multiple default text responses exist, they will be concatenated when
// generating audio. If no default platform text responses exist, the
// generated audio content will be empty.
OutputAudio []byte `protobuf:"bytes,5,opt,name=output_audio,json=outputAudio,proto3" json:"output_audio,omitempty"`
// The config used by the speech synthesizer to generate the output audio.
OutputAudioConfig *OutputAudioConfig `protobuf:"bytes,6,opt,name=output_audio_config,json=outputAudioConfig,proto3" json:"output_audio_config,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *StreamingDetectIntentResponse) Reset() { *m = StreamingDetectIntentResponse{} }
func (m *StreamingDetectIntentResponse) String() string { return proto.CompactTextString(m) }
func (*StreamingDetectIntentResponse) ProtoMessage() {}
func (*StreamingDetectIntentResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{6}
}
func (m *StreamingDetectIntentResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_StreamingDetectIntentResponse.Unmarshal(m, b)
}
func (m *StreamingDetectIntentResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_StreamingDetectIntentResponse.Marshal(b, m, deterministic)
}
func (m *StreamingDetectIntentResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_StreamingDetectIntentResponse.Merge(m, src)
}
func (m *StreamingDetectIntentResponse) XXX_Size() int {
return xxx_messageInfo_StreamingDetectIntentResponse.Size(m)
}
func (m *StreamingDetectIntentResponse) XXX_DiscardUnknown() {
xxx_messageInfo_StreamingDetectIntentResponse.DiscardUnknown(m)
}
var xxx_messageInfo_StreamingDetectIntentResponse proto.InternalMessageInfo
func (m *StreamingDetectIntentResponse) GetResponseId() string {
if m != nil {
return m.ResponseId
}
return ""
}
func (m *StreamingDetectIntentResponse) GetRecognitionResult() *StreamingRecognitionResult {
if m != nil {
return m.RecognitionResult
}
return nil
}
func (m *StreamingDetectIntentResponse) GetQueryResult() *QueryResult {
if m != nil {
return m.QueryResult
}
return nil
}
func (m *StreamingDetectIntentResponse) GetWebhookStatus() *status.Status {
if m != nil {
return m.WebhookStatus
}
return nil
}
func (m *StreamingDetectIntentResponse) GetOutputAudio() []byte {
if m != nil {
return m.OutputAudio
}
return nil
}
func (m *StreamingDetectIntentResponse) GetOutputAudioConfig() *OutputAudioConfig {
if m != nil {
return m.OutputAudioConfig
}
return nil
}
// Contains a speech recognition result corresponding to a portion of the audio
// that is currently being processed or an indication that this is the end
// of the single requested utterance.
//
// Example:
//
// 1. transcript: "tube"
//
// 2. transcript: "to be a"
//
// 3. transcript: "to be"
//
// 4. transcript: "to be or not to be"
// is_final: true
//
// 5. transcript: " that's"
//
// 6. transcript: " that is"
//
// 7. message_type: `END_OF_SINGLE_UTTERANCE`
//
// 8. transcript: " that is the question"
// is_final: true
//
// Only two of the responses contain final results (#4 and #8 indicated by
// `is_final: true`). Concatenating these generates the full transcript: "to be
// or not to be that is the question".
//
// In each response we populate:
//
// * for `TRANSCRIPT`: `transcript` and possibly `is_final`.
//
// * for `END_OF_SINGLE_UTTERANCE`: only `message_type`.
type StreamingRecognitionResult struct {
// Type of the result message.
MessageType StreamingRecognitionResult_MessageType `protobuf:"varint,1,opt,name=message_type,json=messageType,proto3,enum=google.cloud.dialogflow.v2.StreamingRecognitionResult_MessageType" json:"message_type,omitempty"`
// Transcript text representing the words that the user spoke.
// Populated if and only if `message_type` = `TRANSCRIPT`.
Transcript string `protobuf:"bytes,2,opt,name=transcript,proto3" json:"transcript,omitempty"`
// If `false`, the `StreamingRecognitionResult` represents an
// interim result that may change. If `true`, the recognizer will not return
// any further hypotheses about this piece of the audio. May only be populated
// for `message_type` = `TRANSCRIPT`.
IsFinal bool `protobuf:"varint,3,opt,name=is_final,json=isFinal,proto3" json:"is_final,omitempty"`
// The Speech confidence between 0.0 and 1.0 for the current portion of audio.
// A higher number indicates an estimated greater likelihood that the
// recognized words are correct. The default of 0.0 is a sentinel value
// indicating that confidence was not set.
//
// This field is typically only provided if `is_final` is true and you should
// not rely on it being accurate or even set.
Confidence float32 `protobuf:"fixed32,4,opt,name=confidence,proto3" json:"confidence,omitempty"`
// Word-specific information for the words recognized by Speech in
// [transcript][google.cloud.dialogflow.v2.StreamingRecognitionResult.transcript]. Populated if and only if `message_type` = `TRANSCRIPT` and
// [InputAudioConfig.enable_word_info] is set.
SpeechWordInfo []*SpeechWordInfo `protobuf:"bytes,7,rep,name=speech_word_info,json=speechWordInfo,proto3" json:"speech_word_info,omitempty"`
// Time offset of the end of this Speech recognition result relative to the
// beginning of the audio. Only populated for `message_type` = `TRANSCRIPT`.
SpeechEndOffset *duration.Duration `protobuf:"bytes,8,opt,name=speech_end_offset,json=speechEndOffset,proto3" json:"speech_end_offset,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *StreamingRecognitionResult) Reset() { *m = StreamingRecognitionResult{} }
func (m *StreamingRecognitionResult) String() string { return proto.CompactTextString(m) }
func (*StreamingRecognitionResult) ProtoMessage() {}
func (*StreamingRecognitionResult) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{7}
}
func (m *StreamingRecognitionResult) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_StreamingRecognitionResult.Unmarshal(m, b)
}
func (m *StreamingRecognitionResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_StreamingRecognitionResult.Marshal(b, m, deterministic)
}
func (m *StreamingRecognitionResult) XXX_Merge(src proto.Message) {
xxx_messageInfo_StreamingRecognitionResult.Merge(m, src)
}
func (m *StreamingRecognitionResult) XXX_Size() int {
return xxx_messageInfo_StreamingRecognitionResult.Size(m)
}
func (m *StreamingRecognitionResult) XXX_DiscardUnknown() {
xxx_messageInfo_StreamingRecognitionResult.DiscardUnknown(m)
}
var xxx_messageInfo_StreamingRecognitionResult proto.InternalMessageInfo
func (m *StreamingRecognitionResult) GetMessageType() StreamingRecognitionResult_MessageType {
if m != nil {
return m.MessageType
}
return StreamingRecognitionResult_MESSAGE_TYPE_UNSPECIFIED
}
func (m *StreamingRecognitionResult) GetTranscript() string {
if m != nil {
return m.Transcript
}
return ""
}
func (m *StreamingRecognitionResult) GetIsFinal() bool {
if m != nil {
return m.IsFinal
}
return false
}
func (m *StreamingRecognitionResult) GetConfidence() float32 {
if m != nil {
return m.Confidence
}
return 0
}
func (m *StreamingRecognitionResult) GetSpeechWordInfo() []*SpeechWordInfo {
if m != nil {
return m.SpeechWordInfo
}
return nil
}
func (m *StreamingRecognitionResult) GetSpeechEndOffset() *duration.Duration {
if m != nil {
return m.SpeechEndOffset
}
return nil
}
// Represents the natural language text to be processed.
type TextInput struct {
// Required. The UTF-8 encoded natural language text to be processed.
// Text length must not exceed 256 characters.
Text string `protobuf:"bytes,1,opt,name=text,proto3" json:"text,omitempty"`
// Required. The language of this conversational query. See [Language
// Support](https://cloud.google.com/dialogflow/docs/reference/language)
// for a list of the currently supported language codes. Note that queries in
// the same session do not necessarily need to specify the same language.
LanguageCode string `protobuf:"bytes,2,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *TextInput) Reset() { *m = TextInput{} }
func (m *TextInput) String() string { return proto.CompactTextString(m) }
func (*TextInput) ProtoMessage() {}
func (*TextInput) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{8}
}
func (m *TextInput) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_TextInput.Unmarshal(m, b)
}
func (m *TextInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_TextInput.Marshal(b, m, deterministic)
}
func (m *TextInput) XXX_Merge(src proto.Message) {
xxx_messageInfo_TextInput.Merge(m, src)
}
func (m *TextInput) XXX_Size() int {
return xxx_messageInfo_TextInput.Size(m)
}
func (m *TextInput) XXX_DiscardUnknown() {
xxx_messageInfo_TextInput.DiscardUnknown(m)
}
var xxx_messageInfo_TextInput proto.InternalMessageInfo
func (m *TextInput) GetText() string {
if m != nil {
return m.Text
}
return ""
}
func (m *TextInput) GetLanguageCode() string {
if m != nil {
return m.LanguageCode
}
return ""
}
// Events allow for matching intents by event name instead of the natural
// language input. For instance, input `<event: { name: "welcome_event",
// parameters: { name: "Sam" } }>` can trigger a personalized welcome response.
// The parameter `name` may be used by the agent in the response:
// `"Hello #welcome_event.name! What can I do for you today?"`.
type EventInput struct {
// Required. The unique identifier of the event.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The collection of parameters associated with the event.
Parameters *_struct.Struct `protobuf:"bytes,2,opt,name=parameters,proto3" json:"parameters,omitempty"`
// Required. The language of this query. See [Language
// Support](https://cloud.google.com/dialogflow/docs/reference/language)
// for a list of the currently supported language codes. Note that queries in
// the same session do not necessarily need to specify the same language.
LanguageCode string `protobuf:"bytes,3,opt,name=language_code,json=languageCode,proto3" json:"language_code,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *EventInput) Reset() { *m = EventInput{} }
func (m *EventInput) String() string { return proto.CompactTextString(m) }
func (*EventInput) ProtoMessage() {}
func (*EventInput) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{9}
}
func (m *EventInput) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_EventInput.Unmarshal(m, b)
}
func (m *EventInput) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_EventInput.Marshal(b, m, deterministic)
}
func (m *EventInput) XXX_Merge(src proto.Message) {
xxx_messageInfo_EventInput.Merge(m, src)
}
func (m *EventInput) XXX_Size() int {
return xxx_messageInfo_EventInput.Size(m)
}
func (m *EventInput) XXX_DiscardUnknown() {
xxx_messageInfo_EventInput.DiscardUnknown(m)
}
var xxx_messageInfo_EventInput proto.InternalMessageInfo
func (m *EventInput) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *EventInput) GetParameters() *_struct.Struct {
if m != nil {
return m.Parameters
}
return nil
}
func (m *EventInput) GetLanguageCode() string {
if m != nil {
return m.LanguageCode
}
return ""
}
// Configures the types of sentiment analysis to perform.
type SentimentAnalysisRequestConfig struct {
// Instructs the service to perform sentiment analysis on
// `query_text`. If not provided, sentiment analysis is not performed on
// `query_text`.
AnalyzeQueryTextSentiment bool `protobuf:"varint,1,opt,name=analyze_query_text_sentiment,json=analyzeQueryTextSentiment,proto3" json:"analyze_query_text_sentiment,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SentimentAnalysisRequestConfig) Reset() { *m = SentimentAnalysisRequestConfig{} }
func (m *SentimentAnalysisRequestConfig) String() string { return proto.CompactTextString(m) }
func (*SentimentAnalysisRequestConfig) ProtoMessage() {}
func (*SentimentAnalysisRequestConfig) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{10}
}
func (m *SentimentAnalysisRequestConfig) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SentimentAnalysisRequestConfig.Unmarshal(m, b)
}
func (m *SentimentAnalysisRequestConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SentimentAnalysisRequestConfig.Marshal(b, m, deterministic)
}
func (m *SentimentAnalysisRequestConfig) XXX_Merge(src proto.Message) {
xxx_messageInfo_SentimentAnalysisRequestConfig.Merge(m, src)
}
func (m *SentimentAnalysisRequestConfig) XXX_Size() int {
return xxx_messageInfo_SentimentAnalysisRequestConfig.Size(m)
}
func (m *SentimentAnalysisRequestConfig) XXX_DiscardUnknown() {
xxx_messageInfo_SentimentAnalysisRequestConfig.DiscardUnknown(m)
}
var xxx_messageInfo_SentimentAnalysisRequestConfig proto.InternalMessageInfo
func (m *SentimentAnalysisRequestConfig) GetAnalyzeQueryTextSentiment() bool {
if m != nil {
return m.AnalyzeQueryTextSentiment
}
return false
}
// The result of sentiment analysis as configured by
// `sentiment_analysis_request_config`.
type SentimentAnalysisResult struct {
// The sentiment analysis result for `query_text`.
QueryTextSentiment *Sentiment `protobuf:"bytes,1,opt,name=query_text_sentiment,json=queryTextSentiment,proto3" json:"query_text_sentiment,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SentimentAnalysisResult) Reset() { *m = SentimentAnalysisResult{} }
func (m *SentimentAnalysisResult) String() string { return proto.CompactTextString(m) }
func (*SentimentAnalysisResult) ProtoMessage() {}
func (*SentimentAnalysisResult) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{11}
}
func (m *SentimentAnalysisResult) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SentimentAnalysisResult.Unmarshal(m, b)
}
func (m *SentimentAnalysisResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SentimentAnalysisResult.Marshal(b, m, deterministic)
}
func (m *SentimentAnalysisResult) XXX_Merge(src proto.Message) {
xxx_messageInfo_SentimentAnalysisResult.Merge(m, src)
}
func (m *SentimentAnalysisResult) XXX_Size() int {
return xxx_messageInfo_SentimentAnalysisResult.Size(m)
}
func (m *SentimentAnalysisResult) XXX_DiscardUnknown() {
xxx_messageInfo_SentimentAnalysisResult.DiscardUnknown(m)
}
var xxx_messageInfo_SentimentAnalysisResult proto.InternalMessageInfo
func (m *SentimentAnalysisResult) GetQueryTextSentiment() *Sentiment {
if m != nil {
return m.QueryTextSentiment
}
return nil
}
// The sentiment, such as positive/negative feeling or association, for a unit
// of analysis, such as the query text.
type Sentiment struct {
// Sentiment score between -1.0 (negative sentiment) and 1.0 (positive
// sentiment).
Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"`
// A non-negative number in the [0, +inf) range, which represents the absolute
// magnitude of sentiment, regardless of score (positive or negative).
Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude,proto3" json:"magnitude,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Sentiment) Reset() { *m = Sentiment{} }
func (m *Sentiment) String() string { return proto.CompactTextString(m) }
func (*Sentiment) ProtoMessage() {}
func (*Sentiment) Descriptor() ([]byte, []int) {
return fileDescriptor_2f8892c06d516e44, []int{12}
}
func (m *Sentiment) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Sentiment.Unmarshal(m, b)
}
func (m *Sentiment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Sentiment.Marshal(b, m, deterministic)
}
func (m *Sentiment) XXX_Merge(src proto.Message) {
xxx_messageInfo_Sentiment.Merge(m, src)
}
func (m *Sentiment) XXX_Size() int {
return xxx_messageInfo_Sentiment.Size(m)
}
func (m *Sentiment) XXX_DiscardUnknown() {
xxx_messageInfo_Sentiment.DiscardUnknown(m)
}
var xxx_messageInfo_Sentiment proto.InternalMessageInfo
func (m *Sentiment) GetScore() float32 {
if m != nil {
return m.Score
}
return 0
}
func (m *Sentiment) GetMagnitude() float32 {
if m != nil {
return m.Magnitude
}
return 0
}
func init() {
proto.RegisterEnum("google.cloud.dialogflow.v2.StreamingRecognitionResult_MessageType", StreamingRecognitionResult_MessageType_name, StreamingRecognitionResult_MessageType_value)
proto.RegisterType((*DetectIntentRequest)(nil), "google.cloud.dialogflow.v2.DetectIntentRequest")
proto.RegisterType((*DetectIntentResponse)(nil), "google.cloud.dialogflow.v2.DetectIntentResponse")
proto.RegisterType((*QueryParameters)(nil), "google.cloud.dialogflow.v2.QueryParameters")
proto.RegisterType((*QueryInput)(nil), "google.cloud.dialogflow.v2.QueryInput")
proto.RegisterType((*QueryResult)(nil), "google.cloud.dialogflow.v2.QueryResult")
proto.RegisterType((*StreamingDetectIntentRequest)(nil), "google.cloud.dialogflow.v2.StreamingDetectIntentRequest")
proto.RegisterType((*StreamingDetectIntentResponse)(nil), "google.cloud.dialogflow.v2.StreamingDetectIntentResponse")
proto.RegisterType((*StreamingRecognitionResult)(nil), "google.cloud.dialogflow.v2.StreamingRecognitionResult")
proto.RegisterType((*TextInput)(nil), "google.cloud.dialogflow.v2.TextInput")
proto.RegisterType((*EventInput)(nil), "google.cloud.dialogflow.v2.EventInput")
proto.RegisterType((*SentimentAnalysisRequestConfig)(nil), "google.cloud.dialogflow.v2.SentimentAnalysisRequestConfig")
proto.RegisterType((*SentimentAnalysisResult)(nil), "google.cloud.dialogflow.v2.SentimentAnalysisResult")
proto.RegisterType((*Sentiment)(nil), "google.cloud.dialogflow.v2.Sentiment")
}
func init() {
proto.RegisterFile("google/cloud/dialogflow/v2/session.proto", fileDescriptor_2f8892c06d516e44)
}
var fileDescriptor_2f8892c06d516e44 = []byte{
// 1869 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x58, 0x4f, 0x6f, 0x1b, 0xc7,
0x15, 0xf7, 0x92, 0xfa, 0xfb, 0x48, 0x4b, 0xf2, 0x48, 0xa9, 0xa8, 0x3f, 0x56, 0x6c, 0x1a, 0x69,
0x14, 0xc7, 0xe6, 0xa6, 0x4a, 0xe1, 0x36, 0x0a, 0x12, 0x87, 0xa2, 0x28, 0x47, 0x85, 0x2c, 0xcb,
0x43, 0xb9, 0x69, 0x0c, 0x04, 0x8b, 0xf1, 0xee, 0x90, 0xda, 0x7a, 0x39, 0xb3, 0xda, 0x99, 0x95,
0xac, 0x18, 0xba, 0x14, 0x08, 0xd0, 0x5b, 0x81, 0x16, 0xbd, 0x17, 0x05, 0x7a, 0x29, 0x7a, 0xe8,
0x07, 0xe8, 0xa1, 0x87, 0x9e, 0x0a, 0xf4, 0xd2, 0xa2, 0x97, 0x9e, 0x72, 0x68, 0x2f, 0xf9, 0x04,
0x45, 0x4f, 0xc5, 0xce, 0xcc, 0x92, 0x2b, 0x8a, 0xa4, 0x54, 0x23, 0x28, 0x50, 0xf4, 0x24, 0xce,
0x9b, 0xdf, 0x7b, 0xf3, 0xe6, 0xbd, 0x37, 0xbf, 0xf7, 0xb4, 0xb0, 0xda, 0xe2, 0xbc, 0x15, 0x50,
0xdb, 0x0d, 0x78, 0xec, 0xd9, 0x9e, 0x4f, 0x02, 0xde, 0x6a, 0x06, 0xfc, 0xd8, 0x3e, 0x5a, 0xb3,
0x05, 0x15, 0xc2, 0xe7, 0xac, 0x12, 0x46, 0x5c, 0x72, 0xb4, 0xa8, 0x91, 0x15, 0x85, 0xac, 0x74,
0x91, 0x95, 0xa3, 0xb5, 0xc5, 0x65, 0x63, 0x85, 0x84, 0xbe, 0x4d, 0x18, 0xe3, 0x92, 0x48, 0x9f,
0x33, 0xa1, 0x35, 0x17, 0xe7, 0x33, 0xbb, 0x6e, 0xe0, 0x53, 0x26, 0xcd, 0xc6, 0xeb, 0x99, 0x8d,
0xa6, 0x4f, 0x03, 0xcf, 0x79, 0x46, 0x0f, 0xc8, 0x91, 0xcf, 0x23, 0x03, 0x58, 0xc8, 0x00, 0x22,
0x2a, 0x78, 0x1c, 0xb9, 0xd4, 0x6c, 0xdd, 0x1d, 0xe2, 0x38, 0x89, 0x3d, 0x9f, 0x3b, 0x2e, 0x67,
0x4d, 0xbf, 0x65, 0xe0, 0xc3, 0xee, 0xe9, 0x72, 0x26, 0xe9, 0x8b, 0xd4, 0xa9, 0x37, 0x87, 0x20,
0x7d, 0x26, 0xbb, 0xde, 0x7f, 0xfb, 0xe2, 0xd0, 0x39, 0x94, 0x49, 0x5f, 0x9e, 0x38, 0xf2, 0x24,
0x4c, 0xfd, 0x5e, 0x31, 0x5a, 0x6a, 0xf5, 0x2c, 0x6e, 0xda, 0x5e, 0x1c, 0xa9, 0x68, 0x99, 0xfd,
0x1b, 0xbd, 0xfb, 0x3a, 0x30, 0x6d, 0x22, 0x9e, 0x1b, 0xc4, 0x72, 0x2f, 0x42, 0xc8, 0x28, 0x76,
0x65, 0x4f, 0xb0, 0xa3, 0xd0, 0xb5, 0x85, 0x24, 0x32, 0x4e, 0xb3, 0x50, 0x32, 0x1b, 0x89, 0x2f,
0x76, 0x40, 0x64, 0xc0, 0x4c, 0x6c, 0xca, 0x7f, 0xca, 0xc3, 0xec, 0x26, 0x95, 0xd4, 0x95, 0xdb,
0xea, 0x7e, 0x98, 0x1e, 0xc6, 0x54, 0x48, 0x54, 0x83, 0x71, 0x73, 0x8f, 0x92, 0x75, 0xc3, 0x5a,
0x9d, 0xdc, 0x78, 0xeb, 0xcb, 0x6a, 0xee, 0x5f, 0xd5, 0x5b, 0x70, 0x33, 0x93, 0x7d, 0x6d, 0x95,
0x84, 0xbe, 0xa8, 0xb8, 0xbc, 0x6d, 0x37, 0xb4, 0x02, 0x4e, 0x35, 0xd1, 0x2e, 0x14, 0x0f, 0x63,
0x1a, 0x9d, 0x38, 0x21, 0x89, 0x48, 0x5b, 0x94, 0x72, 0x37, 0xac, 0xd5, 0xc2, 0xda, 0xdb, 0x95,
0xc1, 0xd5, 0x54, 0x79, 0x9c, 0xe0, 0xf7, 0x12, 0x38, 0x95, 0x34, 0x12, 0xb8, 0x70, 0xd8, 0x11,
0x08, 0xb4, 0x03, 0x7a, 0xe9, 0xf8, 0x2c, 0x8c, 0x65, 0x29, 0xaf, 0xcc, 0x7d, 0xf3, 0x42, 0x73,
0xdb, 0x09, 0x7a, 0x23, 0xff, 0x65, 0x35, 0x87, 0xe1, 0xb0, 0x23, 0x40, 0x9f, 0xc1, 0x2c, 0x8f,
0x65, 0x18, 0x4b, 0x27, 0x5b, 0x33, 0xa5, 0x11, 0x65, 0xf5, 0xee, 0x30, 0xab, 0x8f, 0x94, 0x5a,
0x35, 0xd1, 0xaa, 0x29, 0x25, 0x7c, 0x8d, 0xf7, 0x8a, 0x50, 0x03, 0x4a, 0x7d, 0xcc, 0xab, 0x64,
0x96, 0xc6, 0xd5, 0x19, 0x8b, 0xe9, 0x19, 0x69, 0x36, 0x2b, 0x5b, 0x49, 0xbe, 0x1f, 0x12, 0xf1,
0x1c, 0xbf, 0x76, 0xce, 0x60, 0x22, 0x46, 0xaf, 0x43, 0x41, 0xdd, 0x5d, 0xdb, 0x2c, 0x8d, 0xde,
0xb0, 0x56, 0x8b, 0x18, 0x94, 0x48, 0x41, 0xcb, 0xbf, 0xcf, 0xc1, 0xdc, 0xd9, 0x7c, 0x8a, 0x90,
0x33, 0x41, 0x13, 0xcd, 0xc8, 0xfc, 0x76, 0x7c, 0x4f, 0x27, 0x15, 0x43, 0x2a, 0xda, 0xf6, 0xd0,
0xf7, 0xd2, 0x64, 0x45, 0x54, 0xc4, 0x81, 0x34, 0xc9, 0x7a, 0xf3, 0xc2, 0xe8, 0x62, 0x05, 0x37,
0x89, 0xd2, 0x0b, 0xf4, 0x1e, 0x4c, 0x1d, 0xd3, 0x67, 0x07, 0x9c, 0x3f, 0x77, 0x74, 0x1d, 0x9a,
0x5c, 0xa1, 0xd4, 0x5a, 0x14, 0xba, 0x95, 0x86, 0xda, 0xc1, 0x57, 0x0d, 0x52, 0x2f, 0xd1, 0x4d,
0x28, 0x66, 0xc3, 0xa6, 0xd2, 0x51, 0xc4, 0x85, 0x4c, 0x38, 0x06, 0x25, 0x6e, 0xec, 0xeb, 0x49,
0x5c, 0xf9, 0xab, 0x3c, 0x4c, 0xf7, 0x94, 0x21, 0x5a, 0x82, 0x49, 0xe9, 0xb7, 0xa9, 0xf3, 0x39,
0x67, 0xd4, 0xc4, 0x6e, 0x22, 0x11, 0x3c, 0xe5, 0x8c, 0xa2, 0x7b, 0x50, 0x6c, 0x51, 0xee, 0x04,
0xdc, 0x55, 0x8f, 0xd9, 0x44, 0x6e, 0x36, 0x75, 0x44, 0x11, 0xc0, 0x0e, 0x91, 0x3b, 0xac, 0x85,
0x0b, 0x2d, 0xca, 0x77, 0x0c, 0x0e, 0xdd, 0x87, 0x09, 0x43, 0x3f, 0x49, 0x7c, 0xf2, 0xab, 0x85,
0xb5, 0x5b, 0xc3, 0x9c, 0xaf, 0x69, 0x2c, 0xee, 0x28, 0xa1, 0x37, 0x60, 0x2a, 0xa2, 0x82, 0x4a,
0xa7, 0x63, 0x26, 0x89, 0xd6, 0x04, 0xbe, 0xaa, 0xa4, 0xb5, 0x14, 0xe6, 0xc0, 0x5c, 0x1f, 0x4e,
0x12, 0xa5, 0x51, 0x75, 0xe6, 0xd0, 0x80, 0x99, 0x27, 0x5d, 0x57, 0x6a, 0xfb, 0x27, 0x21, 0xc5,
0x48, 0xf4, 0x8a, 0x04, 0xfa, 0x16, 0x8c, 0x87, 0xe4, 0x24, 0xe0, 0xc4, 0x33, 0x49, 0x98, 0x3f,
0x57, 0xd9, 0x0d, 0xc5, 0x53, 0x38, 0xc5, 0xa1, 0x2f, 0x2c, 0xb8, 0x29, 0x12, 0x6f, 0xda, 0x94,
0x49, 0x87, 0x30, 0x12, 0x9c, 0x08, 0x5f, 0x38, 0x91, 0xa6, 0x9f, 0x34, 0xa5, 0xa0, 0xac, 0xad,
0x0f, 0xf7, 0xd0, 0x18, 0xa9, 0x1a, 0x1b, 0x86, 0xc1, 0x4c, 0x7e, 0x57, 0xc4, 0xd0, 0xfd, 0xf2,
0x3f, 0x2c, 0x80, 0x2e, 0x49, 0xa0, 0xc7, 0x50, 0x3c, 0x53, 0x53, 0x96, 0x72, 0xe0, 0xce, 0x30,
0x07, 0xb6, 0xd9, 0xd9, 0xfa, 0xf9, 0xf8, 0x0a, 0x2e, 0x90, 0x0c, 0x0f, 0xbc, 0x0f, 0x23, 0x49,
0x1a, 0x4c, 0x55, 0xbc, 0x31, 0xcc, 0xd4, 0x3e, 0x7d, 0x21, 0x95, 0xb9, 0x8f, 0xaf, 0x60, 0xa5,
0x84, 0x3e, 0x84, 0x51, 0x7a, 0x44, 0xd9, 0xa5, 0xb8, 0xae, 0x9e, 0x00, 0x53, 0x75, 0xad, 0xb6,
0x31, 0x0e, 0xa3, 0x8a, 0x1c, 0xca, 0x3f, 0x1f, 0x87, 0x42, 0xe6, 0xb9, 0xa2, 0xeb, 0xa0, 0xa9,
0xd0, 0x51, 0xbe, 0xe9, 0x8a, 0x9e, 0x54, 0x92, 0xc4, 0x09, 0x74, 0x0b, 0xae, 0x06, 0x84, 0xb5,
0x62, 0xd2, 0xa2, 0x8e, 0xcb, 0x3d, 0x5a, 0x9a, 0x56, 0x88, 0x62, 0x2a, 0xac, 0x71, 0x8f, 0xa2,
0x0d, 0xb8, 0x2e, 0x42, 0x4a, 0xdd, 0x03, 0x27, 0xa2, 0x2e, 0x6f, 0x31, 0x3f, 0xa9, 0x6a, 0x1d,
0x39, 0x8f, 0x32, 0x97, 0xaa, 0x2b, 0xe7, 0xf0, 0x92, 0x06, 0xe1, 0x2e, 0xa6, 0xd6, 0x81, 0xa0,
0x6f, 0xc0, 0x18, 0x71, 0xd5, 0xab, 0xc9, 0xab, 0x13, 0xcc, 0x0a, 0x7d, 0x07, 0x20, 0xec, 0x3c,
0x3f, 0xc3, 0xc9, 0x03, 0xab, 0x2a, 0x03, 0x45, 0x1f, 0xc0, 0x12, 0x09, 0x02, 0x55, 0x48, 0x7e,
0x44, 0x3d, 0xd3, 0x7a, 0x9c, 0x30, 0x79, 0x12, 0x4c, 0x2a, 0xc6, 0x9c, 0xc0, 0x25, 0x12, 0x04,
0xd8, 0x20, 0x74, 0x6f, 0xd9, 0xd3, 0xfb, 0xe8, 0x2d, 0x98, 0x69, 0xc6, 0x41, 0xd3, 0x0f, 0x02,
0x55, 0x98, 0x2a, 0x3a, 0x63, 0xca, 0xb3, 0xe9, 0x8c, 0x5c, 0xc5, 0xe8, 0x33, 0x98, 0xcb, 0x42,
0xdb, 0x54, 0x08, 0xd2, 0xa2, 0xa2, 0x34, 0xae, 0x9e, 0xd5, 0xed, 0xe1, 0x35, 0xa3, 0x66, 0x89,
0x87, 0x5a, 0x05, 0xcf, 0x66, 0xec, 0x18, 0x99, 0x7a, 0xdc, 0x1d, 0x0e, 0x55, 0xc3, 0x4f, 0x69,
0x42, 0xf9, 0xd1, 0xe1, 0x4b, 0x25, 0x44, 0x1f, 0xc1, 0x74, 0x0a, 0x4b, 0xdf, 0xe0, 0xe4, 0xf0,
0x68, 0xa5, 0x66, 0xf7, 0xcc, 0x53, 0xdc, 0x81, 0x69, 0x43, 0xa7, 0x1d, 0x1a, 0x81, 0xcb, 0xb3,
0xd1, 0x94, 0xd6, 0xed, 0x90, 0xcd, 0x3a, 0x8c, 0xe9, 0x49, 0xa9, 0x54, 0x50, 0x6e, 0x94, 0x2f,
0x8e, 0x03, 0x36, 0x1a, 0xe8, 0x43, 0x58, 0xd2, 0xbf, 0x1c, 0x4f, 0xb5, 0xb0, 0x9e, 0x72, 0x2a,
0xaa, 0x72, 0x5a, 0xd0, 0x90, 0xcd, 0x14, 0x91, 0x29, 0xa6, 0x8f, 0x60, 0xda, 0xf3, 0x49, 0x8b,
0x71, 0x21, 0x7d, 0xd7, 0xf1, 0x59, 0x93, 0x97, 0xa6, 0x2e, 0x88, 0x45, 0x17, 0xbf, 0xcd, 0x9a,
0x1c, 0x71, 0x58, 0xe8, 0xcb, 0x4a, 0xaa, 0x23, 0x5e, 0x53, 0xb6, 0xde, 0xfd, 0x0f, 0xd9, 0x48,
0x75, 0xc7, 0x79, 0xd1, 0x7f, 0xa3, 0xfc, 0xc5, 0x08, 0x2c, 0x37, 0x64, 0x44, 0x49, 0xdb, 0x67,
0xad, 0xff, 0xd3, 0x41, 0xec, 0x2e, 0xcc, 0x08, 0x9f, 0xb5, 0x02, 0xea, 0xc4, 0x52, 0xd2, 0x88,
0x24, 0xb9, 0x56, 0x8d, 0x6c, 0x23, 0x57, 0xb2, 0xf0, 0xb4, 0xde, 0x7b, 0x92, 0x6e, 0x0d, 0x6a,
0xff, 0xa3, 0xff, 0x43, 0x73, 0xdb, 0xd8, 0xb9, 0xb9, 0xed, 0xb7, 0x79, 0xb8, 0x3e, 0xa0, 0x0e,
0x2e, 0x3b, 0xc0, 0x51, 0x40, 0x59, 0x1e, 0x3e, 0x33, 0xc6, 0xdd, 0x1b, 0x5a, 0xb4, 0xe9, 0xb9,
0x19, 0x8a, 0x36, 0x75, 0x7b, 0x2d, 0xea, 0x15, 0x9d, 0x9b, 0x13, 0xf3, 0x5f, 0xeb, 0x9c, 0x38,
0xf2, 0xaa, 0x73, 0xe2, 0xe8, 0x7f, 0x7d, 0x4e, 0xfc, 0x6b, 0x1e, 0x16, 0x07, 0x87, 0x0e, 0x51,
0x28, 0x9a, 0x96, 0xa0, 0xc6, 0x2d, 0x95, 0xb0, 0xa9, 0xb5, 0x8d, 0x57, 0x4b, 0x44, 0xda, 0x2a,
0xd4, 0x08, 0x56, 0x68, 0x77, 0x17, 0x68, 0x05, 0x40, 0x46, 0x84, 0x09, 0x37, 0xf2, 0x43, 0x9d,
0xed, 0x49, 0x9c, 0x91, 0xa0, 0x05, 0x98, 0xf0, 0x85, 0xd3, 0xf4, 0x19, 0x09, 0x54, 0xaa, 0x26,
0xf0, 0xb8, 0x2f, 0xb6, 0x92, 0x65, 0xa2, 0x9a, 0x61, 0xd7, 0x11, 0xc5, 0xae, 0x19, 0x09, 0xda,
0x87, 0x19, 0xd3, 0xdf, 0x8f, 0x79, 0xe4, 0x69, 0x3e, 0xbd, 0x44, 0x73, 0x6b, 0x28, 0x9d, 0x4f,
0x78, 0xe4, 0x25, 0x94, 0x8a, 0xa7, 0xc4, 0x99, 0x35, 0xaa, 0xc3, 0x35, 0x63, 0x95, 0x32, 0xcf,
0xe1, 0xcd, 0xa6, 0xa0, 0x52, 0xb5, 0xb6, 0xc2, 0xda, 0xc2, 0xb9, 0x87, 0xb5, 0x69, 0xfe, 0x41,
0xc6, 0xd3, 0x5a, 0xa7, 0xce, 0xbc, 0x47, 0x4a, 0xa3, 0xfc, 0x03, 0x28, 0x64, 0x62, 0x82, 0x96,
0xa1, 0xf4, 0xb0, 0xde, 0x68, 0x54, 0x1f, 0xd4, 0x9d, 0xfd, 0x4f, 0xf7, 0xea, 0xce, 0x93, 0xdd,
0xc6, 0x5e, 0xbd, 0xb6, 0xbd, 0xb5, 0x5d, 0xdf, 0x9c, 0xb9, 0x82, 0xa6, 0x00, 0xf6, 0x71, 0x75,
0xb7, 0x51, 0xc3, 0xdb, 0x7b, 0xfb, 0x33, 0x16, 0x5a, 0x82, 0xf9, 0xfa, 0xee, 0xa6, 0xf3, 0x68,
0xcb, 0x69, 0x6c, 0xef, 0x3e, 0xd8, 0xa9, 0x3b, 0x4f, 0xf6, 0xf7, 0xeb, 0xb8, 0xba, 0x5b, 0xab,
0xcf, 0xe4, 0xca, 0xbb, 0x30, 0xd9, 0x19, 0xc4, 0xd0, 0xbc, 0x99, 0xde, 0x34, 0xf7, 0x2a, 0xea,
0xd2, 0x93, 0xd9, 0x6a, 0xef, 0x84, 0x94, 0xeb, 0x22, 0xce, 0x8c, 0x49, 0xe5, 0x1f, 0x5b, 0x00,
0xdd, 0xd9, 0x2c, 0xb1, 0xc8, 0x48, 0x9b, 0x9e, 0xb1, 0x98, 0x08, 0x7a, 0x46, 0x9e, 0xdc, 0xe5,
0x47, 0x9e, 0x73, 0xae, 0xe4, 0x07, 0xb9, 0x42, 0x60, 0x65, 0xf8, 0xbc, 0x8c, 0xee, 0xc3, 0xb2,
0x6a, 0x7b, 0x9f, 0x53, 0xa7, 0x3b, 0x1f, 0x3a, 0x9d, 0xee, 0xa5, 0xbc, 0x9e, 0xc0, 0x0b, 0x06,
0xf3, 0x38, 0x1d, 0x18, 0x3b, 0x56, 0xcb, 0x11, 0xcc, 0x0f, 0x68, 0x82, 0xe8, 0x13, 0x98, 0x1b,
0x68, 0xf3, 0x82, 0xc9, 0xb8, 0x63, 0x12, 0xa3, 0xc3, 0xf3, 0x67, 0xde, 0x87, 0xc9, 0xce, 0x02,
0xcd, 0xc1, 0xa8, 0x70, 0x79, 0xa4, 0x03, 0x9c, 0xc3, 0x7a, 0x81, 0x96, 0x61, 0xb2, 0x4d, 0x92,
0x27, 0x15, 0x7b, 0xe9, 0x5c, 0xda, 0x15, 0xac, 0xfd, 0x6e, 0x04, 0x26, 0x4c, 0xd3, 0x14, 0xe8,
0x17, 0x39, 0x28, 0x66, 0x19, 0x18, 0xd9, 0xc3, 0x3c, 0xeb, 0xd3, 0xb3, 0x17, 0xdf, 0xb9, 0xbc,
0x82, 0x66, 0xef, 0xf2, 0x6f, 0xac, 0xbf, 0x55, 0x67, 0x4d, 0xb7, 0xbe, 0x93, 0x69, 0xad, 0x3f,
0xfa, 0xcb, 0xdf, 0x7f, 0x96, 0xfb, 0x89, 0x55, 0xbe, 0x67, 0x1f, 0xad, 0xd9, 0x2f, 0x0d, 0xe0,
0x83, 0x30, 0xe2, 0x3f, 0xa4, 0xae, 0x14, 0xf6, 0x6d, 0x9b, 0xb4, 0x28, 0x93, 0xe9, 0x27, 0x27,
0x61, 0xdf, 0x3e, 0x5d, 0xf7, 0x32, 0xf6, 0xd7, 0xad, 0xdb, 0x4f, 0x71, 0xf9, 0xe1, 0x70, 0x65,
0xca, 0x8e, 0xfc, 0x88, 0xb3, 0x24, 0x78, 0x89, 0x30, 0x16, 0x34, 0x4a, 0xfe, 0x0e, 0xb1, 0x89,
0x7e, 0x6a, 0xc1, 0x6b, 0x7d, 0xbb, 0x15, 0xfa, 0xee, 0xa5, 0xf8, 0xad, 0x5f, 0xd0, 0xde, 0x7b,
0x05, 0x4d, 0x13, 0xbd, 0x2b, 0xab, 0xd6, 0x3b, 0xd6, 0xe2, 0x8b, 0x3f, 0x56, 0x17, 0x06, 0xce,
0x44, 0x7f, 0xae, 0x7e, 0x7a, 0x20, 0x65, 0x28, 0xd6, 0x6d, 0xfb, 0xf8, 0xf8, 0xdc, 0xc0, 0x44,
0x62, 0x79, 0xa0, 0xbf, 0xe5, 0xdd, 0x0d, 0x03, 0x22, 0x9b, 0x3c, 0x6a, 0xdf, 0xb9, 0x08, 0xde,
0x3d, 0x6a, 0xe3, 0x0f, 0xb9, 0xaf, 0xaa, 0xbf, 0xb2, 0x2e, 0x31, 0x91, 0xa1, 0xb7, 0x3b, 0xd1,
0x7f, 0x69, 0x7e, 0x9d, 0xf6, 0xa6, 0x30, 0x4d, 0xd4, 0x29, 0x6a, 0x0c, 0x04, 0x9f, 0x49, 0xd9,
0xcb, 0xcc, 0xea, 0xd4, 0xa4, 0xef, 0x65, 0xf2, 0xe7, 0xb4, 0x8f, 0x51, 0x58, 0x71, 0x79, 0x7b,
0x48, 0xa4, 0x37, 0x8a, 0xc6, 0xd5, 0xbd, 0x84, 0x70, 0xf6, 0xac, 0xa7, 0x9b, 0x06, 0xdb, 0xe2,
0x09, 0x93, 0x54, 0x78, 0xd4, 0xb2, 0x5b, 0x94, 0x29, 0x3a, 0xb2, 0xbb, 0x97, 0xec, 0xf7, 0x21,
0xf4, 0xfd, 0xee, 0xea, 0x9f, 0x96, 0xf5, 0xcb, 0x5c, 0x6e, 0x73, 0xeb, 0xd7, 0xb9, 0xc5, 0x07,
0xda, 0x5c, 0x4d, 0x1d, 0xbd, 0xd9, 0x3d, 0xfa, 0xfb, 0x6b, 0xcf, 0xc6, 0x94, 0xd5, 0x77, 0xff,
0x1d, 0x00, 0x00, 0xff, 0xff, 0x5a, 0x6f, 0xa1, 0x96, 0x98, 0x16, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// SessionsClient is the client API for Sessions service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type SessionsClient interface {
// Processes a natural language query and returns structured, actionable data
// as a result. This method is not idempotent, because it may cause contexts
// and session entity types to be updated, which in turn might affect
// results of future queries.
DetectIntent(ctx context.Context, in *DetectIntentRequest, opts ...grpc.CallOption) (*DetectIntentResponse, error)
// Processes a natural language query in audio format in a streaming fashion
// and returns structured, actionable data as a result. This method is only
// available via the gRPC API (not REST).
StreamingDetectIntent(ctx context.Context, opts ...grpc.CallOption) (Sessions_StreamingDetectIntentClient, error)
}
type sessionsClient struct {
cc grpc.ClientConnInterface
}
func NewSessionsClient(cc grpc.ClientConnInterface) SessionsClient {
return &sessionsClient{cc}
}
func (c *sessionsClient) DetectIntent(ctx context.Context, in *DetectIntentRequest, opts ...grpc.CallOption) (*DetectIntentResponse, error) {
out := new(DetectIntentResponse)
err := c.cc.Invoke(ctx, "/google.cloud.dialogflow.v2.Sessions/DetectIntent", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *sessionsClient) StreamingDetectIntent(ctx context.Context, opts ...grpc.CallOption) (Sessions_StreamingDetectIntentClient, error) {
stream, err := c.cc.NewStream(ctx, &_Sessions_serviceDesc.Streams[0], "/google.cloud.dialogflow.v2.Sessions/StreamingDetectIntent", opts...)
if err != nil {
return nil, err
}
x := &sessionsStreamingDetectIntentClient{stream}
return x, nil
}
type Sessions_StreamingDetectIntentClient interface {
Send(*StreamingDetectIntentRequest) error
Recv() (*StreamingDetectIntentResponse, error)
grpc.ClientStream
}
type sessionsStreamingDetectIntentClient struct {
grpc.ClientStream
}
func (x *sessionsStreamingDetectIntentClient) Send(m *StreamingDetectIntentRequest) error {
return x.ClientStream.SendMsg(m)
}
func (x *sessionsStreamingDetectIntentClient) Recv() (*StreamingDetectIntentResponse, error) {
m := new(StreamingDetectIntentResponse)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
// SessionsServer is the server API for Sessions service.
type SessionsServer interface {
// Processes a natural language query and returns structured, actionable data
// as a result. This method is not idempotent, because it may cause contexts
// and session entity types to be updated, which in turn might affect
// results of future queries.
DetectIntent(context.Context, *DetectIntentRequest) (*DetectIntentResponse, error)
// Processes a natural language query in audio format in a streaming fashion
// and returns structured, actionable data as a result. This method is only
// available via the gRPC API (not REST).
StreamingDetectIntent(Sessions_StreamingDetectIntentServer) error
}
// UnimplementedSessionsServer can be embedded to have forward compatible implementations.
type UnimplementedSessionsServer struct {
}
func (*UnimplementedSessionsServer) DetectIntent(ctx context.Context, req *DetectIntentRequest) (*DetectIntentResponse, error) {
return nil, status1.Errorf(codes.Unimplemented, "method DetectIntent not implemented")
}
func (*UnimplementedSessionsServer) StreamingDetectIntent(srv Sessions_StreamingDetectIntentServer) error {
return status1.Errorf(codes.Unimplemented, "method StreamingDetectIntent not implemented")
}
func RegisterSessionsServer(s *grpc.Server, srv SessionsServer) {
s.RegisterService(&_Sessions_serviceDesc, srv)
}
func _Sessions_DetectIntent_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(DetectIntentRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(SessionsServer).DetectIntent(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/google.cloud.dialogflow.v2.Sessions/DetectIntent",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(SessionsServer).DetectIntent(ctx, req.(*DetectIntentRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Sessions_StreamingDetectIntent_Handler(srv interface{}, stream grpc.ServerStream) error {
return srv.(SessionsServer).StreamingDetectIntent(&sessionsStreamingDetectIntentServer{stream})
}
type Sessions_StreamingDetectIntentServer interface {
Send(*StreamingDetectIntentResponse) error
Recv() (*StreamingDetectIntentRequest, error)
grpc.ServerStream
}
type sessionsStreamingDetectIntentServer struct {
grpc.ServerStream
}
func (x *sessionsStreamingDetectIntentServer) Send(m *StreamingDetectIntentResponse) error {
return x.ServerStream.SendMsg(m)
}
func (x *sessionsStreamingDetectIntentServer) Recv() (*StreamingDetectIntentRequest, error) {
m := new(StreamingDetectIntentRequest)
if err := x.ServerStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
var _Sessions_serviceDesc = grpc.ServiceDesc{
ServiceName: "google.cloud.dialogflow.v2.Sessions",
HandlerType: (*SessionsServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "DetectIntent",
Handler: _Sessions_DetectIntent_Handler,
},
},
Streams: []grpc.StreamDesc{
{
StreamName: "StreamingDetectIntent",
Handler: _Sessions_StreamingDetectIntent_Handler,
ServerStreams: true,
ClientStreams: true,
},
},
Metadata: "google/cloud/dialogflow/v2/session.proto",
}