participant.proto 59 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429
  1. // Copyright 2022 Google LLC
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. syntax = "proto3";
  15. package google.cloud.dialogflow.v2beta1;
  16. import "google/api/annotations.proto";
  17. import "google/api/client.proto";
  18. import "google/api/field_behavior.proto";
  19. import "google/api/resource.proto";
  20. import "google/cloud/dialogflow/v2beta1/audio_config.proto";
  21. import "google/cloud/dialogflow/v2beta1/session.proto";
  22. import "google/protobuf/field_mask.proto";
  23. import "google/protobuf/struct.proto";
  24. import "google/protobuf/timestamp.proto";
  25. import "google/rpc/status.proto";
  26. option cc_enable_arenas = true;
  27. option csharp_namespace = "Google.Cloud.Dialogflow.V2beta1";
  28. option go_package = "google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1;dialogflow";
  29. option java_multiple_files = true;
  30. option java_outer_classname = "ParticipantProto";
  31. option java_package = "com.google.cloud.dialogflow.v2beta1";
  32. option objc_class_prefix = "DF";
  33. // Service for managing [Participants][google.cloud.dialogflow.v2beta1.Participant].
  34. service Participants {
  35. option (google.api.default_host) = "dialogflow.googleapis.com";
  36. option (google.api.oauth_scopes) =
  37. "https://www.googleapis.com/auth/cloud-platform,"
  38. "https://www.googleapis.com/auth/dialogflow";
  39. // Creates a new participant in a conversation.
  40. rpc CreateParticipant(CreateParticipantRequest) returns (Participant) {
  41. option (google.api.http) = {
  42. post: "/v2beta1/{parent=projects/*/conversations/*}/participants"
  43. body: "participant"
  44. additional_bindings {
  45. post: "/v2beta1/{parent=projects/*/locations/*/conversations/*}/participants"
  46. body: "participant"
  47. }
  48. };
  49. option (google.api.method_signature) = "parent,participant";
  50. }
  51. // Retrieves a conversation participant.
  52. rpc GetParticipant(GetParticipantRequest) returns (Participant) {
  53. option (google.api.http) = {
  54. get: "/v2beta1/{name=projects/*/conversations/*/participants/*}"
  55. additional_bindings {
  56. get: "/v2beta1/{name=projects/*/locations/*/conversations/*/participants/*}"
  57. }
  58. };
  59. option (google.api.method_signature) = "name";
  60. }
  61. // Returns the list of all participants in the specified conversation.
  62. rpc ListParticipants(ListParticipantsRequest) returns (ListParticipantsResponse) {
  63. option (google.api.http) = {
  64. get: "/v2beta1/{parent=projects/*/conversations/*}/participants"
  65. additional_bindings {
  66. get: "/v2beta1/{parent=projects/*/locations/*/conversations/*}/participants"
  67. }
  68. };
  69. option (google.api.method_signature) = "parent";
  70. }
  71. // Updates the specified participant.
  72. rpc UpdateParticipant(UpdateParticipantRequest) returns (Participant) {
  73. option (google.api.http) = {
  74. patch: "/v2beta1/{participant.name=projects/*/conversations/*/participants/*}"
  75. body: "participant"
  76. additional_bindings {
  77. patch: "/v2beta1/{participant.name=projects/*/locations/*/conversations/*/participants/*}"
  78. body: "participant"
  79. }
  80. };
  81. option (google.api.method_signature) = "participant,update_mask";
  82. }
  83. // Adds a text (chat, for example), or audio (phone recording, for example)
  84. // message from a participant into the conversation.
  85. //
  86. // Note: Always use agent versions for production traffic
  87. // sent to virtual agents. See [Versions and
  88. // environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
  89. rpc AnalyzeContent(AnalyzeContentRequest) returns (AnalyzeContentResponse) {
  90. option (google.api.http) = {
  91. post: "/v2beta1/{participant=projects/*/conversations/*/participants/*}:analyzeContent"
  92. body: "*"
  93. additional_bindings {
  94. post: "/v2beta1/{participant=projects/*/locations/*/conversations/*/participants/*}:analyzeContent"
  95. body: "*"
  96. }
  97. };
  98. option (google.api.method_signature) = "participant,text_input";
  99. option (google.api.method_signature) = "participant,audio_input";
  100. option (google.api.method_signature) = "participant,event_input";
  101. }
  102. // Adds a text (e.g., chat) or audio (e.g., phone recording) message from a
  103. // participant into the conversation.
  104. // Note: This method is only available through the gRPC API (not REST).
  105. //
  106. // The top-level message sent to the client by the server is
  107. // `StreamingAnalyzeContentResponse`. Multiple response messages can be
  108. // returned in order. The first one or more messages contain the
  109. // `recognition_result` field. Each result represents a more complete
  110. // transcript of what the user said. The next message contains the
  111. // `reply_text` field, and potentially the `reply_audio` and/or the
  112. // `automated_agent_reply` fields.
  113. //
  114. // Note: Always use agent versions for production traffic
  115. // sent to virtual agents. See [Versions and
  116. // environments](https://cloud.google.com/dialogflow/es/docs/agents-versions).
  117. rpc StreamingAnalyzeContent(stream StreamingAnalyzeContentRequest) returns (stream StreamingAnalyzeContentResponse) {
  118. }
  119. // Gets suggested articles for a participant based on specific historical
  120. // messages.
  121. //
  122. // Note that [ListSuggestions][google.cloud.dialogflow.v2beta1.Participants.ListSuggestions] will only list the auto-generated
  123. // suggestions, while [CompileSuggestion][google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion] will try to compile suggestion
  124. // based on the provided conversation context in the real time.
  125. rpc SuggestArticles(SuggestArticlesRequest) returns (SuggestArticlesResponse) {
  126. option (google.api.http) = {
  127. post: "/v2beta1/{parent=projects/*/conversations/*/participants/*}/suggestions:suggestArticles"
  128. body: "*"
  129. additional_bindings {
  130. post: "/v2beta1/{parent=projects/*/locations/*/conversations/*/participants/*}/suggestions:suggestArticles"
  131. body: "*"
  132. }
  133. };
  134. option (google.api.method_signature) = "parent";
  135. }
  136. // Gets suggested faq answers for a participant based on specific historical
  137. // messages.
  138. rpc SuggestFaqAnswers(SuggestFaqAnswersRequest) returns (SuggestFaqAnswersResponse) {
  139. option (google.api.http) = {
  140. post: "/v2beta1/{parent=projects/*/conversations/*/participants/*}/suggestions:suggestFaqAnswers"
  141. body: "*"
  142. additional_bindings {
  143. post: "/v2beta1/{parent=projects/*/locations/*/conversations/*/participants/*}/suggestions:suggestFaqAnswers"
  144. body: "*"
  145. }
  146. };
  147. option (google.api.method_signature) = "parent";
  148. }
  149. // Gets smart replies for a participant based on specific historical
  150. // messages.
  151. rpc SuggestSmartReplies(SuggestSmartRepliesRequest) returns (SuggestSmartRepliesResponse) {
  152. option (google.api.http) = {
  153. post: "/v2beta1/{parent=projects/*/conversations/*/participants/*}/suggestions:suggestSmartReplies"
  154. body: "*"
  155. additional_bindings {
  156. post: "/v2beta1/{parent=projects/*/locations/*/conversations/*/participants/*}/suggestions:suggestSmartReplies"
  157. body: "*"
  158. }
  159. };
  160. option (google.api.method_signature) = "parent";
  161. }
  162. // Deprecated: Use inline suggestion, event based suggestion or
  163. // Suggestion* API instead.
  164. // See [HumanAgentAssistantConfig.name][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.name] for more
  165. // details.
  166. // Removal Date: 2020-09-01.
  167. //
  168. // Retrieves suggestions for live agents.
  169. //
  170. // This method should be used by human agent client software to fetch auto
  171. // generated suggestions in real-time, while the conversation with an end user
  172. // is in progress. The functionality is implemented in terms of the
  173. // [list
  174. // pagination](https://cloud.google.com/apis/design/design_patterns#list_pagination)
  175. // design pattern. The client app should use the `next_page_token` field
  176. // to fetch the next batch of suggestions. `suggestions` are sorted by
  177. // `create_time` in descending order.
  178. // To fetch latest suggestion, just set `page_size` to 1.
  179. // To fetch new suggestions without duplication, send request with filter
  180. // `create_time_epoch_microseconds > [first item's create_time of previous
  181. // request]` and empty page_token.
  182. rpc ListSuggestions(ListSuggestionsRequest) returns (ListSuggestionsResponse) {
  183. option deprecated = true;
  184. option (google.api.http) = {
  185. get: "/v2beta1/{parent=projects/*/conversations/*/participants/*}/suggestions"
  186. };
  187. }
  188. // Deprecated. use [SuggestArticles][google.cloud.dialogflow.v2beta1.Participants.SuggestArticles] and [SuggestFaqAnswers][google.cloud.dialogflow.v2beta1.Participants.SuggestFaqAnswers] instead.
  189. //
  190. // Gets suggestions for a participant based on specific historical
  191. // messages.
  192. //
  193. // Note that [ListSuggestions][google.cloud.dialogflow.v2beta1.Participants.ListSuggestions] will only list the auto-generated
  194. // suggestions, while [CompileSuggestion][google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion] will try to compile suggestion
  195. // based on the provided conversation context in the real time.
  196. rpc CompileSuggestion(CompileSuggestionRequest) returns (CompileSuggestionResponse) {
  197. option deprecated = true;
  198. option (google.api.http) = {
  199. post: "/v2beta1/{parent=projects/*/conversations/*/participants/*}/suggestions:compile"
  200. body: "*"
  201. };
  202. }
  203. }
  204. // Represents a conversation participant (human agent, virtual agent, end-user).
  205. message Participant {
  206. option (google.api.resource) = {
  207. type: "dialogflow.googleapis.com/Participant"
  208. pattern: "projects/{project}/conversations/{conversation}/participants/{participant}"
  209. pattern: "projects/{project}/locations/{location}/conversations/{conversation}/participants/{participant}"
  210. };
  211. // Enumeration of the roles a participant can play in a conversation.
  212. enum Role {
  213. // Participant role not set.
  214. ROLE_UNSPECIFIED = 0;
  215. // Participant is a human agent.
  216. HUMAN_AGENT = 1;
  217. // Participant is an automated agent, such as a Dialogflow agent.
  218. AUTOMATED_AGENT = 2;
  219. // Participant is an end user that has called or chatted with
  220. // Dialogflow services.
  221. END_USER = 3;
  222. }
  223. // Optional. The unique identifier of this participant.
  224. // Format: `projects/<Project ID>/locations/<Location
  225. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  226. string name = 1 [(google.api.field_behavior) = OPTIONAL];
  227. // Immutable. The role this participant plays in the conversation. This field must be set
  228. // during participant creation and is then immutable.
  229. Role role = 2 [(google.api.field_behavior) = IMMUTABLE];
  230. // Optional. Obfuscated user id that should be associated with the created participant.
  231. //
  232. // You can specify a user id as follows:
  233. //
  234. // 1. If you set this field in
  235. // [CreateParticipantRequest][google.cloud.dialogflow.v2beta1.CreateParticipantRequest.participant] or
  236. // [UpdateParticipantRequest][google.cloud.dialogflow.v2beta1.UpdateParticipantRequest.participant],
  237. // Dialogflow adds the obfuscated user id with the participant.
  238. //
  239. // 2. If you set this field in
  240. // [AnalyzeContent][google.cloud.dialogflow.v2beta1.AnalyzeContentRequest.obfuscated_external_user_id] or
  241. // [StreamingAnalyzeContent][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.obfuscated_external_user_id],
  242. // Dialogflow will update [Participant.obfuscated_external_user_id][google.cloud.dialogflow.v2beta1.Participant.obfuscated_external_user_id].
  243. //
  244. // Dialogflow uses this user id for following purposes:
  245. // 1) Billing and measurement. If user with the same
  246. // obfuscated_external_user_id is created in a later conversation, dialogflow
  247. // will know it's the same user. 2) Agent assist suggestion personalization.
  248. // For example, Dialogflow can use it to provide personalized smart reply
  249. // suggestions for this user.
  250. //
  251. // Note:
  252. //
  253. // * Please never pass raw user ids to Dialogflow. Always obfuscate your user
  254. // id first.
  255. // * Dialogflow only accepts a UTF-8 encoded string, e.g., a hex digest of a
  256. // hash function like SHA-512.
  257. // * The length of the user id must be <= 256 characters.
  258. string obfuscated_external_user_id = 7 [(google.api.field_behavior) = OPTIONAL];
  259. // Optional. Key-value filters on the metadata of documents returned by article
  260. // suggestion. If specified, article suggestion only returns suggested
  261. // documents that match all filters in their [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. Multiple
  262. // values for a metadata key should be concatenated by comma. For example,
  263. // filters to match all documents that have 'US' or 'CA' in their market
  264. // metadata values and 'agent' in their user metadata values will be
  265. // ```
  266. // documents_metadata_filters {
  267. // key: "market"
  268. // value: "US,CA"
  269. // }
  270. // documents_metadata_filters {
  271. // key: "user"
  272. // value: "agent"
  273. // }
  274. // ```
  275. map<string, string> documents_metadata_filters = 8 [(google.api.field_behavior) = OPTIONAL];
  276. }
  277. // Represents a message posted into a conversation.
  278. message Message {
  279. option (google.api.resource) = {
  280. type: "dialogflow.googleapis.com/Message"
  281. pattern: "projects/{project}/conversations/{conversation}/messages/{message}"
  282. pattern: "projects/{project}/locations/{location}/conversations/{conversation}/messages/{message}"
  283. };
  284. // Optional. The unique identifier of the message.
  285. // Format: `projects/<Project ID>/locations/<Location
  286. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  287. string name = 1 [(google.api.field_behavior) = OPTIONAL];
  288. // Required. The message content.
  289. string content = 2 [(google.api.field_behavior) = REQUIRED];
  290. // Optional. The message language.
  291. // This should be a [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt)
  292. // language tag. Example: "en-US".
  293. string language_code = 3 [(google.api.field_behavior) = OPTIONAL];
  294. // Output only. The participant that sends this message.
  295. string participant = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
  296. // Output only. The role of the participant.
  297. Participant.Role participant_role = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
  298. // Output only. The time when the message was created in Contact Center AI.
  299. google.protobuf.Timestamp create_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY];
  300. // Optional. The time when the message was sent.
  301. google.protobuf.Timestamp send_time = 9 [(google.api.field_behavior) = OPTIONAL];
  302. // Output only. The annotation for the message.
  303. MessageAnnotation message_annotation = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
  304. // Output only. The sentiment analysis result for the message.
  305. SentimentAnalysisResult sentiment_analysis = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
  306. }
  307. // The request message for [Participants.CreateParticipant][google.cloud.dialogflow.v2beta1.Participants.CreateParticipant].
  308. message CreateParticipantRequest {
  309. // Required. Resource identifier of the conversation adding the participant.
  310. // Format: `projects/<Project ID>/locations/<Location
  311. // ID>/conversations/<Conversation ID>`.
  312. string parent = 1 [
  313. (google.api.field_behavior) = REQUIRED,
  314. (google.api.resource_reference) = {
  315. child_type: "dialogflow.googleapis.com/Participant"
  316. }
  317. ];
  318. // Required. The participant to create.
  319. Participant participant = 2 [(google.api.field_behavior) = REQUIRED];
  320. }
  321. // The request message for [Participants.GetParticipant][google.cloud.dialogflow.v2beta1.Participants.GetParticipant].
  322. message GetParticipantRequest {
  323. // Required. The name of the participant. Format:
  324. // `projects/<Project ID>/locations/<Location ID>/conversations/<Conversation
  325. // ID>/participants/<Participant ID>`.
  326. string name = 1 [
  327. (google.api.field_behavior) = REQUIRED,
  328. (google.api.resource_reference) = {
  329. type: "dialogflow.googleapis.com/Participant"
  330. }
  331. ];
  332. }
  333. // The request message for [Participants.ListParticipants][google.cloud.dialogflow.v2beta1.Participants.ListParticipants].
  334. message ListParticipantsRequest {
  335. // Required. The conversation to list all participants from.
  336. // Format: `projects/<Project ID>/locations/<Location
  337. // ID>/conversations/<Conversation ID>`.
  338. string parent = 1 [
  339. (google.api.field_behavior) = REQUIRED,
  340. (google.api.resource_reference) = {
  341. child_type: "dialogflow.googleapis.com/Participant"
  342. }
  343. ];
  344. // Optional. The maximum number of items to return in a single page. By
  345. // default 100 and at most 1000.
  346. int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL];
  347. // Optional. The next_page_token value returned from a previous list request.
  348. string page_token = 3 [(google.api.field_behavior) = OPTIONAL];
  349. }
  350. // The response message for [Participants.ListParticipants][google.cloud.dialogflow.v2beta1.Participants.ListParticipants].
  351. message ListParticipantsResponse {
  352. // The list of participants. There is a maximum number of items
  353. // returned based on the page_size field in the request.
  354. repeated Participant participants = 1;
  355. // Token to retrieve the next page of results or empty if there are no
  356. // more results in the list.
  357. string next_page_token = 2;
  358. }
  359. // The request message for [Participants.UpdateParticipant][google.cloud.dialogflow.v2beta1.Participants.UpdateParticipant].
  360. message UpdateParticipantRequest {
  361. // Required. The participant to update.
  362. Participant participant = 1 [(google.api.field_behavior) = REQUIRED];
  363. // Required. The mask to specify which fields to update.
  364. google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED];
  365. }
  366. // Represents the natural language speech audio to be processed.
  367. message AudioInput {
  368. // Required. Instructs the speech recognizer how to process the speech audio.
  369. InputAudioConfig config = 1;
  370. // Required. The natural language speech audio to be processed.
  371. // A single request can contain up to 1 minute of speech audio data.
  372. // The transcribed text cannot contain more than 256 bytes for virtual agent
  373. // interactions.
  374. bytes audio = 2;
  375. }
  376. // Represents the natural language speech audio to be played to the end user.
  377. message OutputAudio {
  378. // Required. Instructs the speech synthesizer how to generate the speech
  379. // audio.
  380. OutputAudioConfig config = 1;
  381. // Required. The natural language speech audio.
  382. bytes audio = 2;
  383. }
  384. // Represents a response from an automated agent.
  385. message AutomatedAgentReply {
  386. // Represents different automated agent reply types.
  387. enum AutomatedAgentReplyType {
  388. // Not specified. This should never happen.
  389. AUTOMATED_AGENT_REPLY_TYPE_UNSPECIFIED = 0;
  390. // Partial reply. e.g. Aggregated responses in a `Fulfillment` that enables
  391. // `return_partial_response` can be returned as partial reply.
  392. // WARNING: partial reply is not eligible for barge-in.
  393. PARTIAL = 1;
  394. // Final reply.
  395. FINAL = 2;
  396. }
  397. // Required.
  398. oneof response {
  399. // Response of the Dialogflow [Sessions.DetectIntent][google.cloud.dialogflow.v2beta1.Sessions.DetectIntent] call.
  400. DetectIntentResponse detect_intent_response = 1;
  401. }
  402. // Response messages from the automated agent.
  403. repeated ResponseMessage response_messages = 3;
  404. // Info on the query match for the automated agent response.
  405. oneof match {
  406. // Name of the intent if an intent is matched for the query.
  407. // For a V2 query, the value format is `projects/<Project ID>/locations/
  408. // <Location ID>/agent/intents/<Intent ID>`.
  409. // For a V3 query, the value format is `projects/<Project ID>/locations/
  410. // <Location ID>/agents/<Agent ID>/intents/<Intent ID>`.
  411. string intent = 4 [(google.api.resource_reference) = {
  412. type: "dialogflow.googleapis.com/Intent"
  413. }];
  414. // Event name if an event is triggered for the query.
  415. string event = 5;
  416. }
  417. // The confidence of the match. Values range from 0.0 (completely uncertain)
  418. // to 1.0 (completely certain).
  419. // This value is for informational purpose only and is only used to help match
  420. // the best intent within the classification threshold. This value may change
  421. // for the same end-user expression at any time due to a model retraining or
  422. // change in implementation.
  423. float match_confidence = 9;
  424. // The collection of current parameters at the time of this response.
  425. google.protobuf.Struct parameters = 10;
  426. // The collection of current Dialogflow CX agent session parameters at the
  427. // time of this response.
  428. // Deprecated: Use `parameters` instead.
  429. google.protobuf.Struct cx_session_parameters = 6 [deprecated = true];
  430. // AutomatedAgentReply type.
  431. AutomatedAgentReplyType automated_agent_reply_type = 7;
  432. // Indicates whether the partial automated agent reply is interruptible when a
  433. // later reply message arrives. e.g. if the agent specified some music as
  434. // partial response, it can be cancelled.
  435. bool allow_cancellation = 8;
  436. // The unique identifier of the current Dialogflow CX conversation page.
  437. // Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
  438. // ID>/flows/<Flow ID>/pages/<Page ID>`.
  439. string cx_current_page = 11;
  440. }
  441. // The type of Human Agent Assistant API suggestion to perform, and the maximum
  442. // number of results to return for that type. Multiple `Feature` objects can
  443. // be specified in the `features` list.
  444. message SuggestionFeature {
  445. // Defines the type of Human Agent Assistant feature.
  446. enum Type {
  447. // Unspecified feature type.
  448. TYPE_UNSPECIFIED = 0;
  449. // Run article suggestion model for chat.
  450. ARTICLE_SUGGESTION = 1;
  451. // Run FAQ model.
  452. FAQ = 2;
  453. // Run smart reply model for chat.
  454. SMART_REPLY = 3;
  455. // Run conversation summarization model for chat.
  456. CONVERSATION_SUMMARIZATION = 8;
  457. }
  458. // Type of Human Agent Assistant API feature to request.
  459. Type type = 1;
  460. }
  461. // Represents the parameters of human assist query.
  462. message AssistQueryParameters {
  463. // Key-value filters on the metadata of documents returned by article
  464. // suggestion. If specified, article suggestion only returns suggested
  465. // documents that match all filters in their [Document.metadata][google.cloud.dialogflow.v2beta1.Document.metadata]. Multiple
  466. // values for a metadata key should be concatenated by comma. For example,
  467. // filters to match all documents that have 'US' or 'CA' in their market
  468. // metadata values and 'agent' in their user metadata values will be
  469. // ```
  470. // documents_metadata_filters {
  471. // key: "market"
  472. // value: "US,CA"
  473. // }
  474. // documents_metadata_filters {
  475. // key: "user"
  476. // value: "agent"
  477. // }
  478. // ```
  479. map<string, string> documents_metadata_filters = 1;
  480. }
  481. // The request message for [Participants.AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent].
  482. message AnalyzeContentRequest {
  483. // Required. The name of the participant this text comes from.
  484. // Format: `projects/<Project ID>/locations/<Location
  485. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  486. string participant = 1 [
  487. (google.api.field_behavior) = REQUIRED,
  488. (google.api.resource_reference) = {
  489. type: "dialogflow.googleapis.com/Participant"
  490. }
  491. ];
  492. // Required. The input content.
  493. oneof input {
  494. // The natural language text to be processed.
  495. TextInput text_input = 6;
  496. // The natural language speech audio to be processed.
  497. AudioInput audio_input = 7;
  498. // An input event to send to Dialogflow.
  499. EventInput event_input = 8;
  500. }
  501. // Speech synthesis configuration.
  502. // The speech synthesis settings for a virtual agent that may be configured
  503. // for the associated conversation profile are not used when calling
  504. // AnalyzeContent. If this configuration is not supplied, speech synthesis
  505. // is disabled.
  506. OutputAudioConfig reply_audio_config = 5;
  507. // Parameters for a Dialogflow virtual-agent query.
  508. QueryParameters query_params = 9;
  509. // Parameters for a human assist query.
  510. AssistQueryParameters assist_query_params = 14;
  511. // Additional parameters to be put into Dialogflow CX session parameters. To
  512. // remove a parameter from the session, clients should explicitly set the
  513. // parameter value to null.
  514. //
  515. // Note: this field should only be used if you are connecting to a Dialogflow
  516. // CX agent.
  517. google.protobuf.Struct cx_parameters = 18;
  518. // The unique identifier of the CX page to override the `current_page` in the
  519. // session.
  520. // Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
  521. // ID>/flows/<Flow ID>/pages/<Page ID>`.
  522. //
  523. // If `cx_current_page` is specified, the previous state of the session will
  524. // be ignored by Dialogflow CX, including the [previous
  525. // page][QueryResult.current_page] and the [previous session
  526. // parameters][QueryResult.parameters]. In most cases, `cx_current_page` and
  527. // `cx_parameters` should be configured together to direct a session to a
  528. // specific state.
  529. //
  530. // Note: this field should only be used if you are connecting to a Dialogflow
  531. // CX agent.
  532. string cx_current_page = 20;
  533. // Optional. The send time of the message from end user or human agent's
  534. // perspective. It is used for identifying the same message under one
  535. // participant.
  536. //
  537. // Given two messages under the same participant:
  538. // - If send time are different regardless of whether the content of the
  539. // messages are exactly the same, the conversation will regard them as
  540. // two distinct messages sent by the participant.
  541. // - If send time is the same regardless of whether the content of the
  542. // messages are exactly the same, the conversation will regard them as
  543. // same message, and ignore the message received later.
  544. //
  545. // If the value is not provided, a new request will always be regarded as a
  546. // new message without any de-duplication.
  547. google.protobuf.Timestamp message_send_time = 10;
  548. // A unique identifier for this request. Restricted to 36 ASCII characters.
  549. // A random UUID is recommended.
  550. // This request is only idempotent if a `request_id` is provided.
  551. string request_id = 11;
  552. }
  553. // The message in the response that indicates the parameters of DTMF.
  554. message DtmfParameters {
  555. // Indicates whether DTMF input can be handled in the next request.
  556. bool accepts_dtmf_input = 1;
  557. }
  558. // The response message for [Participants.AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent].
  559. message AnalyzeContentResponse {
  560. // Output only. The output text content.
  561. // This field is set if the automated agent responded with text to show to
  562. // the user.
  563. string reply_text = 1;
  564. // Optional. The audio data bytes encoded as specified in the request.
  565. // This field is set if:
  566. //
  567. // - `reply_audio_config` was specified in the request, or
  568. // - The automated agent responded with audio to play to the user. In such
  569. // case, `reply_audio.config` contains settings used to synthesize the
  570. // speech.
  571. //
  572. // In some scenarios, multiple output audio fields may be present in the
  573. // response structure. In these cases, only the top-most-level audio output
  574. // has content.
  575. OutputAudio reply_audio = 2;
  576. // Optional. Only set if a Dialogflow automated agent has responded.
  577. // Note that: [AutomatedAgentReply.detect_intent_response.output_audio][]
  578. // and [AutomatedAgentReply.detect_intent_response.output_audio_config][]
  579. // are always empty, use [reply_audio][google.cloud.dialogflow.v2beta1.AnalyzeContentResponse.reply_audio] instead.
  580. AutomatedAgentReply automated_agent_reply = 3;
  581. // Output only. Message analyzed by CCAI.
  582. Message message = 5;
  583. // The suggestions for most recent human agent. The order is the same as
  584. // [HumanAgentAssistantConfig.SuggestionConfig.feature_configs][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.SuggestionConfig.feature_configs] of
  585. // [HumanAgentAssistantConfig.human_agent_suggestion_config][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.human_agent_suggestion_config].
  586. //
  587. // Note that any failure of Agent Assist features will not lead to the overall
  588. // failure of an AnalyzeContent API call. Instead, the features will
  589. // fail silently with the error field set in the corresponding
  590. // SuggestionResult.
  591. repeated SuggestionResult human_agent_suggestion_results = 6;
  592. // The suggestions for end user. The order is the same as
  593. // [HumanAgentAssistantConfig.SuggestionConfig.feature_configs][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.SuggestionConfig.feature_configs] of
  594. // [HumanAgentAssistantConfig.end_user_suggestion_config][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.end_user_suggestion_config].
  595. //
  596. // Same as human_agent_suggestion_results, any failure of Agent Assist
  597. // features will not lead to the overall failure of an AnalyzeContent API
  598. // call. Instead, the features will fail silently with the error field set in
  599. // the corresponding SuggestionResult.
  600. repeated SuggestionResult end_user_suggestion_results = 7;
  601. // Indicates the parameters of DTMF.
  602. DtmfParameters dtmf_parameters = 9;
  603. }
  604. // Defines the language used in the input text.
  605. message InputTextConfig {
  606. // Required. The language of this conversational query. See [Language
  607. // Support](https://cloud.google.com/dialogflow/docs/reference/language)
  608. // for a list of the currently supported language codes.
  609. string language_code = 1;
  610. }
  611. // The top-level message sent by the client to the
  612. // [Participants.StreamingAnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.StreamingAnalyzeContent] method.
  613. //
  614. // Multiple request messages should be sent in order:
  615. //
  616. // 1. The first message must contain
  617. // [participant][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.participant],
  618. // [config][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.config] and optionally
  619. // [query_params][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.query_params]. If you want
  620. // to receive an audio response, it should also contain
  621. // [reply_audio_config][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.reply_audio_config].
  622. // The message must not contain
  623. // [input][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.input].
  624. //
  625. // 2. If [config][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.config] in the first message
  626. // was set to [audio_config][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.audio_config],
  627. // all subsequent messages must contain
  628. // [input_audio][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.input_audio] to continue
  629. // with Speech recognition.
  630. // If you decide to rather analyze text input after you already started
  631. // Speech recognition, please send a message with
  632. // [StreamingAnalyzeContentRequest.input_text][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.input_text].
  633. //
  634. // However, note that:
  635. //
  636. // * Dialogflow will bill you for the audio so far.
  637. // * Dialogflow discards all Speech recognition results in favor of the
  638. // text input.
  639. //
  640. // 3. If [StreamingAnalyzeContentRequest.config][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.config] in the first message was set
  641. // to [StreamingAnalyzeContentRequest.text_config][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.text_config], then the second message
  642. // must contain only [input_text][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentRequest.input_text].
  643. // Moreover, you must not send more than two messages.
  644. //
  645. // After you sent all input, you must half-close or abort the request stream.
  646. message StreamingAnalyzeContentRequest {
  647. // Required. The name of the participant this text comes from.
  648. // Format: `projects/<Project ID>/locations/<Location
  649. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  650. string participant = 1 [
  651. (google.api.field_behavior) = REQUIRED,
  652. (google.api.resource_reference) = {
  653. type: "dialogflow.googleapis.com/Participant"
  654. }
  655. ];
  656. // Required. The input config.
  657. oneof config {
  658. // Instructs the speech recognizer how to process the speech audio.
  659. InputAudioConfig audio_config = 2;
  660. // The natural language text to be processed.
  661. InputTextConfig text_config = 3;
  662. }
  663. // Speech synthesis configuration.
  664. // The speech synthesis settings for a virtual agent that may be configured
  665. // for the associated conversation profile are not used when calling
  666. // StreamingAnalyzeContent. If this configuration is not supplied, speech
  667. // synthesis is disabled.
  668. OutputAudioConfig reply_audio_config = 4;
  669. // Required. The input.
  670. oneof input {
  671. // The input audio content to be recognized. Must be sent if `audio_config`
  672. // is set in the first message. The complete audio over all streaming
  673. // messages must not exceed 1 minute.
  674. bytes input_audio = 5;
  675. // The UTF-8 encoded natural language text to be processed. Must be sent if
  676. // `text_config` is set in the first message. Text length must not exceed
  677. // 256 bytes for virtual agent interactions. The `input_text` field can be
  678. // only sent once.
  679. string input_text = 6;
  680. // The DTMF digits used to invoke intent and fill in parameter value.
  681. //
  682. // This input is ignored if the previous response indicated that DTMF input
  683. // is not accepted.
  684. TelephonyDtmfEvents input_dtmf = 9;
  685. }
  686. // Parameters for a Dialogflow virtual-agent query.
  687. QueryParameters query_params = 7;
  688. // Parameters for a human assist query.
  689. AssistQueryParameters assist_query_params = 8;
  690. // Additional parameters to be put into Dialogflow CX session parameters. To
  691. // remove a parameter from the session, clients should explicitly set the
  692. // parameter value to null.
  693. //
  694. // Note: this field should only be used if you are connecting to a Dialogflow
  695. // CX agent.
  696. google.protobuf.Struct cx_parameters = 13;
  697. // The unique identifier of the CX page to override the `current_page` in the
  698. // session.
  699. // Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
  700. // ID>/flows/<Flow ID>/pages/<Page ID>`.
  701. //
  702. // If `cx_current_page` is specified, the previous state of the session will
  703. // be ignored by Dialogflow CX, including the [previous
  704. // page][QueryResult.current_page] and the [previous session
  705. // parameters][QueryResult.parameters]. In most cases, `cx_current_page` and
  706. // `cx_parameters` should be configured together to direct a session to a
  707. // specific state.
  708. //
  709. // Note: this field should only be used if you are connecting to a Dialogflow
  710. // CX agent.
  711. string cx_current_page = 15;
  712. // Enable partial virtual agent responses. If this flag is not enabled,
  713. // response stream still contains only one final response even if some
  714. // `Fulfillment`s in Dialogflow virtual agent have been configured to return
  715. // partial responses.
  716. bool enable_partial_automated_agent_reply = 12;
  717. }
  718. // The top-level message returned from the `StreamingAnalyzeContent` method.
  719. //
  720. // Multiple response messages can be returned in order:
  721. //
  722. // 1. If the input was set to streaming audio, the first one or more messages
  723. // contain `recognition_result`. Each `recognition_result` represents a more
  724. // complete transcript of what the user said. The last `recognition_result`
  725. // has `is_final` set to `true`.
  726. //
  727. // 2. In virtual agent stage: if `enable_partial_automated_agent_reply` is
  728. // true, the following N (currently 1 <= N <= 4) messages
  729. // contain `automated_agent_reply` and optionally `reply_audio`
  730. // returned by the virtual agent. The first (N-1)
  731. // `automated_agent_reply`s will have `automated_agent_reply_type` set to
  732. // `PARTIAL`. The last `automated_agent_reply` has
  733. // `automated_agent_reply_type` set to `FINAL`.
  734. // If `enable_partial_automated_agent_reply` is not enabled, response stream
  735. // only contains the final reply.
  736. //
  737. // In human assist stage: the following N (N >= 1) messages contain
  738. // `human_agent_suggestion_results`, `end_user_suggestion_results` or
  739. // `message`.
  740. message StreamingAnalyzeContentResponse {
  741. // The result of speech recognition.
  742. StreamingRecognitionResult recognition_result = 1;
  743. // Optional. The output text content.
  744. // This field is set if an automated agent responded with a text for the user.
  745. string reply_text = 2;
  746. // Optional. The audio data bytes encoded as specified in the request.
  747. // This field is set if:
  748. //
  749. // - The `reply_audio_config` field is specified in the request.
  750. // - The automated agent, which this output comes from, responded with audio.
  751. // In such case, the `reply_audio.config` field contains settings used to
  752. // synthesize the speech.
  753. //
  754. // In some scenarios, multiple output audio fields may be present in the
  755. // response structure. In these cases, only the top-most-level audio output
  756. // has content.
  757. OutputAudio reply_audio = 3;
  758. // Optional. Only set if a Dialogflow automated agent has responded.
  759. // Note that: [AutomatedAgentReply.detect_intent_response.output_audio][]
  760. // and [AutomatedAgentReply.detect_intent_response.output_audio_config][]
  761. // are always empty, use [reply_audio][google.cloud.dialogflow.v2beta1.StreamingAnalyzeContentResponse.reply_audio] instead.
  762. AutomatedAgentReply automated_agent_reply = 4;
  763. // Output only. Message analyzed by CCAI.
  764. Message message = 6;
  765. // The suggestions for most recent human agent. The order is the same as
  766. // [HumanAgentAssistantConfig.SuggestionConfig.feature_configs][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.SuggestionConfig.feature_configs] of
  767. // [HumanAgentAssistantConfig.human_agent_suggestion_config][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.human_agent_suggestion_config].
  768. repeated SuggestionResult human_agent_suggestion_results = 7;
  769. // The suggestions for end user. The order is the same as
  770. // [HumanAgentAssistantConfig.SuggestionConfig.feature_configs][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.SuggestionConfig.feature_configs] of
  771. // [HumanAgentAssistantConfig.end_user_suggestion_config][google.cloud.dialogflow.v2beta1.HumanAgentAssistantConfig.end_user_suggestion_config].
  772. repeated SuggestionResult end_user_suggestion_results = 8;
  773. // Indicates the parameters of DTMF.
  774. DtmfParameters dtmf_parameters = 10;
  775. }
  776. // Represents a part of a message possibly annotated with an entity. The part
  777. // can be an entity or purely a part of the message between two entities or
  778. // message start/end.
  779. message AnnotatedMessagePart {
  780. // Required. A part of a message possibly annotated with an entity.
  781. string text = 1;
  782. // Optional. The [Dialogflow system entity
  783. // type](https://cloud.google.com/dialogflow/docs/reference/system-entities)
  784. // of this message part. If this is empty, Dialogflow could not annotate the
  785. // phrase part with a system entity.
  786. string entity_type = 2;
  787. // Optional. The [Dialogflow system entity formatted value
  788. // ](https://cloud.google.com/dialogflow/docs/reference/system-entities) of
  789. // this message part. For example for a system entity of type
  790. // `@sys.unit-currency`, this may contain:
  791. // <pre>
  792. // {
  793. // "amount": 5,
  794. // "currency": "USD"
  795. // }
  796. // </pre>
  797. google.protobuf.Value formatted_value = 3;
  798. }
  799. // Represents the result of annotation for the message.
  800. message MessageAnnotation {
  801. // Optional. The collection of annotated message parts ordered by their
  802. // position in the message. You can recover the annotated message by
  803. // concatenating [AnnotatedMessagePart.text].
  804. repeated AnnotatedMessagePart parts = 1;
  805. // Required. Indicates whether the text message contains entities.
  806. bool contain_entities = 2;
  807. }
  808. // Represents article answer.
  809. message ArticleAnswer {
  810. // The article title.
  811. string title = 1;
  812. // The article URI.
  813. string uri = 2;
  814. // Output only. Article snippets.
  815. repeated string snippets = 3;
  816. // A map that contains metadata about the answer and the
  817. // document from which it originates.
  818. map<string, string> metadata = 5;
  819. // The name of answer record, in the format of
  820. // "projects/<Project ID>/locations/<Location ID>/answerRecords/<Answer Record
  821. // ID>"
  822. string answer_record = 6;
  823. }
  824. // Represents answer from "frequently asked questions".
  825. message FaqAnswer {
  826. // The piece of text from the `source` knowledge base document.
  827. string answer = 1;
  828. // The system's confidence score that this Knowledge answer is a good match
  829. // for this conversational query, range from 0.0 (completely uncertain)
  830. // to 1.0 (completely certain).
  831. float confidence = 2;
  832. // The corresponding FAQ question.
  833. string question = 3;
  834. // Indicates which Knowledge Document this answer was extracted
  835. // from.
  836. // Format: `projects/<Project ID>/locations/<Location
  837. // ID>/agent/knowledgeBases/<Knowledge Base ID>/documents/<Document ID>`.
  838. string source = 4;
  839. // A map that contains metadata about the answer and the
  840. // document from which it originates.
  841. map<string, string> metadata = 5;
  842. // The name of answer record, in the format of
  843. // "projects/<Project ID>/locations/<Location ID>/answerRecords/<Answer Record
  844. // ID>"
  845. string answer_record = 6;
  846. }
  847. // Represents a smart reply answer.
  848. message SmartReplyAnswer {
  849. // The content of the reply.
  850. string reply = 1;
  851. // Smart reply confidence.
  852. // The system's confidence score that this reply is a good match for
  853. // this conversation, as a value from 0.0 (completely uncertain) to 1.0
  854. // (completely certain).
  855. float confidence = 2;
  856. // The name of answer record, in the format of
  857. // "projects/<Project ID>/locations/<Location ID>/answerRecords/<Answer Record
  858. // ID>"
  859. string answer_record = 3 [(google.api.resource_reference) = {
  860. type: "dialogflow.googleapis.com/AnswerRecord"
  861. }];
  862. }
  863. // One response of different type of suggestion response which is used in
  864. // the response of [Participants.AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent] and
  865. // [Participants.AnalyzeContent][google.cloud.dialogflow.v2beta1.Participants.AnalyzeContent], as well as [HumanAgentAssistantEvent][google.cloud.dialogflow.v2beta1.HumanAgentAssistantEvent].
  866. message SuggestionResult {
  867. // Different type of suggestion response.
  868. oneof suggestion_response {
  869. // Error status if the request failed.
  870. google.rpc.Status error = 1;
  871. // SuggestArticlesResponse if request is for ARTICLE_SUGGESTION.
  872. SuggestArticlesResponse suggest_articles_response = 2;
  873. // SuggestFaqAnswersResponse if request is for FAQ_ANSWER.
  874. SuggestFaqAnswersResponse suggest_faq_answers_response = 3;
  875. // SuggestSmartRepliesResponse if request is for SMART_REPLY.
  876. SuggestSmartRepliesResponse suggest_smart_replies_response = 4;
  877. }
  878. }
  879. // The request message for [Participants.SuggestArticles][google.cloud.dialogflow.v2beta1.Participants.SuggestArticles].
  880. message SuggestArticlesRequest {
  881. // Required. The name of the participant to fetch suggestion for.
  882. // Format: `projects/<Project ID>/locations/<Location
  883. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  884. string parent = 1 [
  885. (google.api.field_behavior) = REQUIRED,
  886. (google.api.resource_reference) = {
  887. type: "dialogflow.googleapis.com/Participant"
  888. }
  889. ];
  890. // Optional. The name of the latest conversation message to compile suggestion
  891. // for. If empty, it will be the latest message of the conversation.
  892. //
  893. // Format: `projects/<Project ID>/locations/<Location
  894. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  895. string latest_message = 2 [
  896. (google.api.field_behavior) = OPTIONAL,
  897. (google.api.resource_reference) = {
  898. type: "dialogflow.googleapis.com/Message"
  899. }
  900. ];
  901. // Optional. Max number of messages prior to and including
  902. // [latest_message][google.cloud.dialogflow.v2beta1.SuggestArticlesRequest.latest_message] to use as context
  903. // when compiling the suggestion. By default 20 and at most 50.
  904. int32 context_size = 3 [(google.api.field_behavior) = OPTIONAL];
  905. // Optional. Parameters for a human assist query.
  906. AssistQueryParameters assist_query_params = 4 [(google.api.field_behavior) = OPTIONAL];
  907. }
  908. // The response message for [Participants.SuggestArticles][google.cloud.dialogflow.v2beta1.Participants.SuggestArticles].
  909. message SuggestArticlesResponse {
  910. // Output only. Articles ordered by score in descending order.
  911. repeated ArticleAnswer article_answers = 1;
  912. // The name of the latest conversation message used to compile
  913. // suggestion for.
  914. //
  915. // Format: `projects/<Project ID>/locations/<Location
  916. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  917. string latest_message = 2;
  918. // Number of messages prior to and including
  919. // [latest_message][google.cloud.dialogflow.v2beta1.SuggestArticlesResponse.latest_message] to compile the
  920. // suggestion. It may be smaller than the
  921. // [SuggestArticlesResponse.context_size][google.cloud.dialogflow.v2beta1.SuggestArticlesResponse.context_size] field in the request if there
  922. // aren't that many messages in the conversation.
  923. int32 context_size = 3;
  924. }
  925. // The request message for [Participants.SuggestFaqAnswers][google.cloud.dialogflow.v2beta1.Participants.SuggestFaqAnswers].
  926. message SuggestFaqAnswersRequest {
  927. // Required. The name of the participant to fetch suggestion for.
  928. // Format: `projects/<Project ID>/locations/<Location
  929. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  930. string parent = 1 [
  931. (google.api.field_behavior) = REQUIRED,
  932. (google.api.resource_reference) = {
  933. type: "dialogflow.googleapis.com/Participant"
  934. }
  935. ];
  936. // Optional. The name of the latest conversation message to compile suggestion
  937. // for. If empty, it will be the latest message of the conversation.
  938. //
  939. // Format: `projects/<Project ID>/locations/<Location
  940. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  941. string latest_message = 2 [
  942. (google.api.field_behavior) = OPTIONAL,
  943. (google.api.resource_reference) = {
  944. type: "dialogflow.googleapis.com/Message"
  945. }
  946. ];
  947. // Optional. Max number of messages prior to and including
  948. // [latest_message] to use as context when compiling the
  949. // suggestion. By default 20 and at most 50.
  950. int32 context_size = 3 [(google.api.field_behavior) = OPTIONAL];
  951. // Optional. Parameters for a human assist query.
  952. AssistQueryParameters assist_query_params = 4 [(google.api.field_behavior) = OPTIONAL];
  953. }
  954. // The request message for [Participants.SuggestFaqAnswers][google.cloud.dialogflow.v2beta1.Participants.SuggestFaqAnswers].
  955. message SuggestFaqAnswersResponse {
  956. // Output only. Answers extracted from FAQ documents.
  957. repeated FaqAnswer faq_answers = 1;
  958. // The name of the latest conversation message used to compile
  959. // suggestion for.
  960. //
  961. // Format: `projects/<Project ID>/locations/<Location
  962. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  963. string latest_message = 2;
  964. // Number of messages prior to and including
  965. // [latest_message][google.cloud.dialogflow.v2beta1.SuggestFaqAnswersResponse.latest_message] to compile the
  966. // suggestion. It may be smaller than the
  967. // [SuggestFaqAnswersRequest.context_size][google.cloud.dialogflow.v2beta1.SuggestFaqAnswersRequest.context_size] field in the request if there
  968. // aren't that many messages in the conversation.
  969. int32 context_size = 3;
  970. }
  971. // The request message for [Participants.SuggestSmartReplies][google.cloud.dialogflow.v2beta1.Participants.SuggestSmartReplies].
  972. message SuggestSmartRepliesRequest {
  973. // Required. The name of the participant to fetch suggestion for.
  974. // Format: `projects/<Project ID>/locations/<Location
  975. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  976. string parent = 1 [
  977. (google.api.field_behavior) = REQUIRED,
  978. (google.api.resource_reference) = {
  979. type: "dialogflow.googleapis.com/Participant"
  980. }
  981. ];
  982. // The current natural language text segment to compile suggestion
  983. // for. This provides a way for user to get follow up smart reply suggestion
  984. // after a smart reply selection, without sending a text message.
  985. TextInput current_text_input = 4;
  986. // The name of the latest conversation message to compile suggestion
  987. // for. If empty, it will be the latest message of the conversation.
  988. //
  989. // Format: `projects/<Project ID>/locations/<Location
  990. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  991. string latest_message = 2 [(google.api.resource_reference) = {
  992. type: "dialogflow.googleapis.com/Message"
  993. }];
  994. // Optional. Max number of messages prior to and including
  995. // [latest_message] to use as context when compiling the
  996. // suggestion. By default 20 and at most 50.
  997. int32 context_size = 3;
  998. }
  999. // The response message for [Participants.SuggestSmartReplies][google.cloud.dialogflow.v2beta1.Participants.SuggestSmartReplies].
  1000. message SuggestSmartRepliesResponse {
  1001. // Output only. Multiple reply options provided by smart reply service. The
  1002. // order is based on the rank of the model prediction.
  1003. // The maximum number of the returned replies is set in SmartReplyConfig.
  1004. repeated SmartReplyAnswer smart_reply_answers = 1;
  1005. // The name of the latest conversation message used to compile
  1006. // suggestion for.
  1007. //
  1008. // Format: `projects/<Project ID>/locations/<Location
  1009. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  1010. string latest_message = 2 [(google.api.resource_reference) = {
  1011. type: "dialogflow.googleapis.com/Message"
  1012. }];
  1013. // Number of messages prior to and including
  1014. // [latest_message][google.cloud.dialogflow.v2beta1.SuggestSmartRepliesResponse.latest_message] to compile the
  1015. // suggestion. It may be smaller than the
  1016. // [SuggestSmartRepliesRequest.context_size][google.cloud.dialogflow.v2beta1.SuggestSmartRepliesRequest.context_size] field in the request if there
  1017. // aren't that many messages in the conversation.
  1018. int32 context_size = 3;
  1019. }
  1020. // Represents a suggestion for a human agent.
  1021. message Suggestion {
  1022. option deprecated = true;
  1023. // Represents suggested article.
  1024. message Article {
  1025. // Output only. The article title.
  1026. string title = 1;
  1027. // Output only. The article URI.
  1028. string uri = 2;
  1029. // Output only. Article snippets.
  1030. repeated string snippets = 3;
  1031. // Output only. A map that contains metadata about the answer and the
  1032. // document from which it originates.
  1033. map<string, string> metadata = 5;
  1034. // Output only. The name of answer record, in the format of
  1035. // "projects/<Project ID>/locations/<Location ID>/answerRecords/<Answer
  1036. // Record ID>"
  1037. string answer_record = 6;
  1038. }
  1039. // Represents suggested answer from "frequently asked questions".
  1040. message FaqAnswer {
  1041. // Output only. The piece of text from the `source` knowledge base document.
  1042. string answer = 1;
  1043. // The system's confidence score that this Knowledge answer is a good match
  1044. // for this conversational query, range from 0.0 (completely uncertain)
  1045. // to 1.0 (completely certain).
  1046. float confidence = 2;
  1047. // Output only. The corresponding FAQ question.
  1048. string question = 3;
  1049. // Output only. Indicates which Knowledge Document this answer was extracted
  1050. // from.
  1051. // Format: `projects/<Project ID>/locations/<Location
  1052. // ID>/agent/knowledgeBases/<Knowledge Base ID>/documents/<Document ID>`.
  1053. string source = 4;
  1054. // Output only. A map that contains metadata about the answer and the
  1055. // document from which it originates.
  1056. map<string, string> metadata = 5;
  1057. // Output only. The name of answer record, in the format of
  1058. // "projects/<Project ID>/locations/<Location ID>/answerRecords/<Answer
  1059. // Record ID>"
  1060. string answer_record = 6;
  1061. }
  1062. // Output only. The name of this suggestion.
  1063. // Format:
  1064. // `projects/<Project ID>/locations/<Location ID>/conversations/<Conversation
  1065. // ID>/participants/*/suggestions/<Suggestion ID>`.
  1066. string name = 1;
  1067. // Output only. Articles ordered by score in descending order.
  1068. repeated Article articles = 2;
  1069. // Output only. Answers extracted from FAQ documents.
  1070. repeated FaqAnswer faq_answers = 4;
  1071. // Output only. The time the suggestion was created.
  1072. google.protobuf.Timestamp create_time = 5;
  1073. // Output only. Latest message used as context to compile this suggestion.
  1074. //
  1075. // Format: `projects/<Project ID>/locations/<Location
  1076. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  1077. string latest_message = 7;
  1078. }
  1079. // The request message for [Participants.ListSuggestions][google.cloud.dialogflow.v2beta1.Participants.ListSuggestions].
  1080. message ListSuggestionsRequest {
  1081. option deprecated = true;
  1082. // Required. The name of the participant to fetch suggestions for.
  1083. // Format: `projects/<Project ID>/locations/<Location
  1084. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  1085. string parent = 1;
  1086. // Optional. The maximum number of items to return in a single page. The
  1087. // default value is 100; the maximum value is 1000.
  1088. int32 page_size = 2;
  1089. // Optional. The next_page_token value returned from a previous list request.
  1090. string page_token = 3;
  1091. // Optional. Filter on suggestions fields. Currently predicates on
  1092. // `create_time` and `create_time_epoch_microseconds` are supported.
  1093. // `create_time` only support milliseconds accuracy. E.g.,
  1094. // `create_time_epoch_microseconds > 1551790877964485` or
  1095. // `create_time > "2017-01-15T01:30:15.01Z"`
  1096. //
  1097. // For more information about filtering, see
  1098. // [API Filtering](https://aip.dev/160).
  1099. string filter = 4;
  1100. }
  1101. // The response message for [Participants.ListSuggestions][google.cloud.dialogflow.v2beta1.Participants.ListSuggestions].
  1102. message ListSuggestionsResponse {
  1103. option deprecated = true;
  1104. // Required. The list of suggestions. There will be a maximum number of items
  1105. // returned based on the page_size field in the request. `suggestions` is
  1106. // sorted by `create_time` in descending order.
  1107. repeated Suggestion suggestions = 1;
  1108. // Optional. Token to retrieve the next page of results or empty if there are
  1109. // no more results in the list.
  1110. string next_page_token = 2;
  1111. }
  1112. // The request message for [Participants.CompileSuggestion][google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion].
  1113. message CompileSuggestionRequest {
  1114. option deprecated = true;
  1115. // Required. The name of the participant to fetch suggestion for.
  1116. // Format: `projects/<Project ID>/locations/<Location
  1117. // ID>/conversations/<Conversation ID>/participants/<Participant ID>`.
  1118. string parent = 1;
  1119. // Optional. The name of the latest conversation message to compile suggestion
  1120. // for. If empty, it will be the latest message of the conversation.
  1121. //
  1122. // Format: `projects/<Project ID>/locations/<Location
  1123. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  1124. string latest_message = 2;
  1125. // Optional. Max number of messages prior to and including
  1126. // [latest_message] to use as context when compiling the
  1127. // suggestion. If zero or less than zero, 20 is used.
  1128. int32 context_size = 3;
  1129. }
  1130. // The response message for [Participants.CompileSuggestion][google.cloud.dialogflow.v2beta1.Participants.CompileSuggestion].
  1131. message CompileSuggestionResponse {
  1132. option deprecated = true;
  1133. // The compiled suggestion.
  1134. Suggestion suggestion = 1;
  1135. // The name of the latest conversation message used to compile
  1136. // suggestion for.
  1137. //
  1138. // Format: `projects/<Project ID>/locations/<Location
  1139. // ID>/conversations/<Conversation ID>/messages/<Message ID>`.
  1140. string latest_message = 2;
  1141. // Number of messages prior to and including
  1142. // [latest_message][google.cloud.dialogflow.v2beta1.CompileSuggestionResponse.latest_message]
  1143. // to compile the suggestion. It may be smaller than the
  1144. // [CompileSuggestionRequest.context_size][google.cloud.dialogflow.v2beta1.CompileSuggestionRequest.context_size] field in the request if
  1145. // there aren't that many messages in the conversation.
  1146. int32 context_size = 3;
  1147. }
  1148. // Response messages from an automated agent.
  1149. message ResponseMessage {
  1150. // The text response message.
  1151. message Text {
  1152. // A collection of text responses.
  1153. repeated string text = 1;
  1154. }
  1155. // Indicates that the conversation should be handed off to a human agent.
  1156. //
  1157. // Dialogflow only uses this to determine which conversations were handed off
  1158. // to a human agent for measurement purposes. What else to do with this signal
  1159. // is up to you and your handoff procedures.
  1160. //
  1161. // You may set this, for example:
  1162. // * In the entry fulfillment of a CX Page if entering the page indicates
  1163. // something went extremely wrong in the conversation.
  1164. // * In a webhook response when you determine that the customer issue can only
  1165. // be handled by a human.
  1166. message LiveAgentHandoff {
  1167. // Custom metadata for your handoff procedure. Dialogflow doesn't impose
  1168. // any structure on this.
  1169. google.protobuf.Struct metadata = 1;
  1170. }
  1171. // Indicates that interaction with the Dialogflow agent has ended.
  1172. message EndInteraction {
  1173. }
  1174. // Represents an audio message that is composed of both segments
  1175. // synthesized from the Dialogflow agent prompts and ones hosted externally
  1176. // at the specified URIs.
  1177. message MixedAudio {
  1178. // Represents one segment of audio.
  1179. message Segment {
  1180. // Content of the segment.
  1181. oneof content {
  1182. // Raw audio synthesized from the Dialogflow agent's response using
  1183. // the output config specified in the request.
  1184. bytes audio = 1;
  1185. // Client-specific URI that points to an audio clip accessible to the
  1186. // client.
  1187. string uri = 2;
  1188. }
  1189. // Whether the playback of this segment can be interrupted by the end
  1190. // user's speech and the client should then start the next Dialogflow
  1191. // request.
  1192. bool allow_playback_interruption = 3;
  1193. }
  1194. // Segments this audio response is composed of.
  1195. repeated Segment segments = 1;
  1196. }
  1197. // Represents the signal that telles the client to transfer the phone call
  1198. // connected to the agent to a third-party endpoint.
  1199. message TelephonyTransferCall {
  1200. // Endpoint to transfer the call to.
  1201. oneof endpoint {
  1202. // Transfer the call to a phone number
  1203. // in [E.164 format](https://en.wikipedia.org/wiki/E.164).
  1204. string phone_number = 1;
  1205. // Transfer the call to a SIP endpoint.
  1206. string sip_uri = 2;
  1207. }
  1208. }
  1209. // Required. The rich response message.
  1210. oneof message {
  1211. // Returns a text response.
  1212. Text text = 1;
  1213. // Returns a response containing a custom, platform-specific payload.
  1214. google.protobuf.Struct payload = 2;
  1215. // Hands off conversation to a live agent.
  1216. LiveAgentHandoff live_agent_handoff = 3;
  1217. // A signal that indicates the interaction with the Dialogflow agent has
  1218. // ended.
  1219. EndInteraction end_interaction = 4;
  1220. // An audio response message composed of both the synthesized Dialogflow
  1221. // agent responses and the audios hosted in places known to the client.
  1222. MixedAudio mixed_audio = 5;
  1223. // A signal that the client should transfer the phone call connected to
  1224. // this agent to a third-party endpoint.
  1225. TelephonyTransferCall telephony_transfer_call = 6;
  1226. }
  1227. }