123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908 |
- // Copyright 2022 Google LLC
- //
- // Licensed under the Apache License, Version 2.0 (the "License");
- // you may not use this file except in compliance with the License.
- // You may obtain a copy of the License at
- //
- // http://www.apache.org/licenses/LICENSE-2.0
- //
- // Unless required by applicable law or agreed to in writing, software
- // distributed under the License is distributed on an "AS IS" BASIS,
- // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- // See the License for the specific language governing permissions and
- // limitations under the License.
- syntax = "proto3";
- package google.cloud.timeseriesinsights.v1;
- import "google/api/annotations.proto";
- import "google/api/client.proto";
- import "google/api/field_behavior.proto";
- import "google/api/resource.proto";
- import "google/protobuf/duration.proto";
- import "google/protobuf/empty.proto";
- import "google/protobuf/timestamp.proto";
- import "google/rpc/status.proto";
- option cc_enable_arenas = true;
- option go_package = "google.golang.org/genproto/googleapis/cloud/timeseriesinsights/v1;timeseriesinsights";
- option java_multiple_files = true;
- option java_outer_classname = "TimeseriesInsightsProto";
- option java_package = "com.google.cloud.timeseriesinsights.v1";
- service TimeseriesInsightsController {
- option (google.api.default_host) = "timeseriesinsights.googleapis.com";
- option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform";
- // Lists [DataSets][google.cloud.timeseriesinsights.v1.DataSet] under the project.
- //
- // The order of the results is unspecified but deterministic. Newly created
- // [DataSets][google.cloud.timeseriesinsights.v1.DataSet] will not necessarily be added to the end
- // of this list.
- rpc ListDataSets(ListDataSetsRequest) returns (ListDataSetsResponse) {
- option (google.api.http) = {
- get: "/v1/{parent=projects/*/locations/*}/datasets"
- additional_bindings {
- get: "/v1/{parent=projects/*}/datasets"
- }
- };
- option (google.api.method_signature) = "parent";
- }
- // Create a [DataSet][google.cloud.timeseriesinsights.v1.DataSet] from data stored on Cloud
- // Storage.
- //
- // The data must stay immutable while we process the
- // [DataSet][google.cloud.timeseriesinsights.v1.DataSet] creation; otherwise, undefined outcomes
- // might result. For more information, see [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
- rpc CreateDataSet(CreateDataSetRequest) returns (DataSet) {
- option (google.api.http) = {
- post: "/v1/{parent=projects/*/locations/*}/datasets"
- body: "dataset"
- additional_bindings {
- post: "/v1/{parent=projects/*}/datasets"
- body: "dataset"
- }
- };
- option (google.api.method_signature) = "parent,dataset";
- }
- // Delete a [DataSet][google.cloud.timeseriesinsights.v1.DataSet] from the system.
- //
- // **NOTE**: If the [DataSet][google.cloud.timeseriesinsights.v1.DataSet] is still being
- // processed, it will be aborted and deleted.
- rpc DeleteDataSet(DeleteDataSetRequest) returns (google.protobuf.Empty) {
- option (google.api.http) = {
- delete: "/v1/{name=projects/*/locations/*/datasets/*}"
- additional_bindings {
- delete: "/v1/{name=projects/*/datasets/*}"
- }
- };
- option (google.api.method_signature) = "name";
- }
- // Append events to a `LOADED` [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
- rpc AppendEvents(AppendEventsRequest) returns (AppendEventsResponse) {
- option (google.api.http) = {
- post: "/v1/{dataset=projects/*/locations/*/datasets/*}:appendEvents"
- body: "*"
- additional_bindings {
- post: "/v1/{dataset=projects/*/datasets/*}:appendEvents"
- body: "*"
- }
- };
- option (google.api.method_signature) = "dataset,events";
- }
- // Execute a Timeseries Insights query over a loaded
- // [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
- rpc QueryDataSet(QueryDataSetRequest) returns (QueryDataSetResponse) {
- option (google.api.http) = {
- post: "/v1/{name=projects/*/locations/*/datasets/*}:query"
- body: "*"
- additional_bindings {
- post: "/v1/{name=projects/*/datasets/*}:query"
- body: "*"
- }
- };
- }
- // Evaluate an explicit slice from a loaded [DataSet][google.cloud.timeseriesinsights.v1.DataSet].
- rpc EvaluateSlice(EvaluateSliceRequest) returns (EvaluatedSlice) {
- option (google.api.http) = {
- post: "/v1/{dataset=projects/*/locations/*/datasets/*}:evaluateSlice"
- body: "*"
- additional_bindings {
- post: "/v1/{dataset=projects/*/datasets/*}:evaluateSlice"
- body: "*"
- }
- };
- }
- // Evaluate an explicit timeseries.
- rpc EvaluateTimeseries(EvaluateTimeseriesRequest) returns (EvaluatedSlice) {
- option (google.api.http) = {
- post: "/v1/{parent=projects/*/locations/*}/datasets:evaluateTimeseries"
- body: "*"
- additional_bindings {
- post: "/v1/{parent=projects/*}/datasets:evaluateTimeseries"
- body: "*"
- }
- };
- }
- }
- // Mapping of BigQuery columns to timestamp, group_id and dimensions.
- message BigqueryMapping {
- // The column which should be used as the event timestamps. If not specified
- // 'Timestamp' is used by default. The column may have TIMESTAMP or INT64
- // type (the latter is interpreted as microseconds since the Unix epoch).
- string timestamp_column = 1;
- // The column which should be used as the group ID (grouping events into
- // sessions). If not specified 'GroupId' is used by default, if the input
- // table does not have such a column, random unique group IDs are
- // generated automatically (different group ID per input row).
- string group_id_column = 2;
- // The list of columns that should be translated to dimensions. If empty,
- // all columns are translated to dimensions. The timestamp and group_id
- // columns should not be listed here again. Columns are expected to have
- // primitive types (STRING, INT64, FLOAT64 or NUMERIC).
- repeated string dimension_column = 3;
- }
- // A data source consists of multiple [Event][google.cloud.timeseriesinsights.v1.Event] objects stored on
- // Cloud Storage. Each Event should be in JSON format, with one Event
- // per line, also known as JSON Lines format.
- message DataSource {
- // Data source URI.
- //
- // 1) Google Cloud Storage files (JSON) are defined in the following form.
- // `gs://bucket_name/object_name`. For more information on Cloud Storage URIs,
- // please see https://cloud.google.com/storage/docs/reference-uris.
- string uri = 1;
- // For BigQuery inputs defines the columns that should be used for dimensions
- // (including time and group ID).
- BigqueryMapping bq_mapping = 2;
- }
- // A collection of data sources sent for processing.
- message DataSet {
- option (google.api.resource) = {
- type: "timeseriesinsights.googleapis.com/Dataset"
- pattern: "projects/{project}/datasets/{dataset}"
- pattern: "projects/{project}/locations/{location}/datasets/{dataset}"
- };
- // DataSet state.
- enum State {
- // Unspecified / undefined state.
- STATE_UNSPECIFIED = 0;
- // Dataset is unknown to the system; we have never seen this dataset before
- // or we have seen this dataset but have fully GC-ed it.
- UNKNOWN = 1;
- // Dataset processing is pending.
- PENDING = 2;
- // Dataset is loading.
- LOADING = 3;
- // Dataset is loaded and can be queried.
- LOADED = 4;
- // Dataset is unloading.
- UNLOADING = 5;
- // Dataset is unloaded and is removed from the system.
- UNLOADED = 6;
- // Dataset processing failed.
- FAILED = 7;
- }
- // The dataset name, which will be used for querying, status and unload
- // requests. This must be unique within a project.
- string name = 1;
- // [Data dimension names][google.cloud.timeseriesinsights.v1.EventDimension.name] allowed for this `DataSet`.
- //
- // If left empty, all dimension names are included. This field works as a
- // filter to avoid regenerating the data.
- repeated string data_names = 2;
- // Input data.
- repeated DataSource data_sources = 3;
- // Dataset state in the system.
- State state = 4;
- // Dataset processing status.
- google.rpc.Status status = 5;
- // Periodically we discard dataset [Event][google.cloud.timeseriesinsights.v1.Event] objects that have
- // timestamps older than 'ttl'. Omitting this field or a zero value means no
- // events are discarded.
- google.protobuf.Duration ttl = 6;
- }
- // Represents an event dimension.
- message EventDimension {
- // Dimension name.
- //
- // **NOTE**: `EventDimension` names must be composed of alphanumeric
- // characters only, and are case insensitive. Unicode characters are *not*
- // supported. The underscore '_' is also allowed.
- string name = 1;
- // Dimension value.
- //
- // **NOTE**: All entries of the dimension `name` must have the same `value`
- // type.
- oneof value {
- // String representation.
- //
- // **NOTE**: String values are case insensitive. Unicode characters are
- // supported.
- string string_val = 2;
- // Long representation.
- int64 long_val = 3;
- // Bool representation.
- bool bool_val = 4;
- // Double representation.
- double double_val = 5;
- }
- }
- // Represents an entry in a data source.
- //
- // Each Event has:
- //
- // * A timestamp at which the event occurs.
- // * One or multiple dimensions.
- // * Optionally, a group ID that allows clients to group logically related
- // events (for example, all events representing payments transactions done by
- // a user in a day have the same group ID). If a group ID is not provided, an
- // internal one will be generated based on the content and `eventTime`.
- //
- // **NOTE**:
- //
- // * Internally, we discretize time in equal-sized chunks and we assume an
- // event has a 0
- // [TimeseriesPoint.value][google.cloud.timeseriesinsights.v1.TimeseriesPoint.value]
- // in a chunk that does not contain any occurrences of an event in the input.
- // * The number of Events with the same group ID should be limited.
- // * Group ID *cannot* be queried.
- // * Group ID does *not* correspond to a user ID or the like. If a user ID is of
- // interest to be queried, use a user ID `dimension` instead.
- message Event {
- // Event dimensions.
- repeated EventDimension dimensions = 1;
- // Event group ID.
- //
- // **NOTE**: JSON encoding should use a string to hold a 64-bit integer value,
- // because a native JSON number holds only 53 binary bits for an integer.
- int64 group_id = 2;
- // Event timestamp.
- google.protobuf.Timestamp event_time = 3;
- }
- // Appends events to an existing DataSet.
- message AppendEventsRequest {
- // Events to be appended.
- //
- // Note:
- //
- // 0. The [DataSet][google.cloud.timeseriesinsights.v1.DataSet] must be shown in a `LOADED` state
- // in the results of `list` method; otherwise, all events from
- // the append request will be dropped, and a `NOT_FOUND` status will be
- // returned.
- // 0. All events in a single request must have the same
- // [groupId][google.cloud.timeseriesinsights.v1.Event.group_id] if set; otherwise, an
- // `INVALID_ARGUMENT` status will be returned.
- // 0. If [groupId][google.cloud.timeseriesinsights.v1.Event.group_id] is not set (or 0), there
- // should be only 1 event; otherwise, an `INVALID_ARGUMENT` status will be
- // returned.
- // 0. The events must be newer than the current time minus
- // [DataSet TTL][google.cloud.timeseriesinsights.v1.DataSet.ttl] or they will be dropped.
- repeated Event events = 1;
- // Required. The DataSet to which we want to append to in the format of
- // "projects/{project}/datasets/{dataset}"
- string dataset = 2 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "timeseriesinsights.googleapis.com/Dataset"
- }
- ];
- }
- // Response for an AppendEvents RPC.
- message AppendEventsResponse {
- // Dropped events; empty if all events are successfully added.
- repeated Event dropped_events = 1;
- }
- // Create a DataSet request.
- message CreateDataSetRequest {
- // Required. Client project name which will own this DataSet in the format of
- // 'projects/{project}'.
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "cloudresourcemanager.googleapis.com/Project"
- }
- ];
- // Required. Dataset to be loaded.
- DataSet dataset = 2 [(google.api.field_behavior) = REQUIRED];
- }
- // Unload DataSet request from the serving system.
- message DeleteDataSetRequest {
- // Required. Dataset name in the format of "projects/{project}/datasets/{dataset}"
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "timeseriesinsights.googleapis.com/Dataset"
- }
- ];
- }
- // List the DataSets created by the current project.
- message ListDataSetsRequest {
- // Required. Project owning the DataSet in the format of "projects/{project}".
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "cloudresourcemanager.googleapis.com/Project"
- }
- ];
- // Number of results to return in the list.
- int32 page_size = 2;
- // Token to provide to skip to a particular spot in the list.
- string page_token = 3;
- }
- // Created DataSets list response.
- message ListDataSetsResponse {
- // The list of created DataSets.
- repeated DataSet datasets = 1;
- // Token to receive the next page of results.
- string next_page_token = 2;
- }
- // A categorical dimension fixed to a certain value.
- message PinnedDimension {
- // The name of the dimension for which we are fixing its value.
- string name = 1;
- // Dimension value.
- //
- // **NOTE**: The `value` type must match that in the data with the same
- // `dimension` as name.
- oneof value {
- // A string value. This can be used for [dimensions][google.cloud.timeseriesinsights.v1.EventDimension], which
- // have their value field set to [string_val][google.cloud.timeseriesinsights.v1.EventDimension.string_val].
- string string_val = 2;
- // A bool value. This can be used for [dimensions][google.cloud.timeseriesinsights.v1.EventDimension], which
- // have their value field set to [bool_val][google.cloud.timeseriesinsights.v1.EventDimension.bool_val].
- bool bool_val = 3;
- }
- }
- // Parameters that control the sensitivity and other options for the time series
- // forecast.
- message ForecastParams {
- // A time period of a fixed interval.
- enum Period {
- // Unknown or simply not given.
- PERIOD_UNSPECIFIED = 0;
- // 1 hour
- HOURLY = 5;
- // 24 hours
- DAILY = 1;
- // 7 days
- WEEKLY = 2;
- // 30 days
- MONTHLY = 3;
- // 365 days
- YEARLY = 4;
- }
- // Optional. Penalize variations between the actual and forecasted values smaller than
- // this. For more information about how this parameter affects the score, see
- // the [anomalyScore](EvaluatedSlice.anomaly_score) formula.
- //
- // Intuitively, anomaly scores summarize how statistically significant the
- // change between the actual and forecasted value is compared with what we
- // expect the change to be (see
- // [expectedDeviation](EvaluatedSlice.expected_deviation)). However, in
- // practice, depending on the application, changes smaller than certain
- // absolute values, while statistically significant, may not be important.
- //
- // This parameter allows us to penalize such low absolute value changes.
- //
- // Must be in the (0.0, inf) range.
- //
- // If unspecified, it defaults to 0.000001.
- optional double noise_threshold = 12 [(google.api.field_behavior) = OPTIONAL];
- // Optional. Specifying any known seasonality/periodicity in the time series
- // for the slices we will analyze can improve the quality of the results.
- //
- // If unsure, simply leave it unspecified by not setting a value for this
- // field.
- //
- // If your time series has multiple seasonal patterns, then set it to the most
- // granular one (e.g. if it has daily and weekly patterns, set this to DAILY).
- Period seasonality_hint = 10 [(google.api.field_behavior) = OPTIONAL];
- // Optional. The length of the returned [forecasted
- // timeseries][EvaluatedSlice.forecast].
- //
- // This duration is currently capped at 100 x
- // [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity].
- //
- // Example: If the detection point is set to "2020-12-27T00:00:00Z", the
- // [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity] to "3600s" and the
- // horizon_duration to "10800s", then we will generate 3 time
- // series points (from "2020-12-27T01:00:00Z" to "2020-12-27T04:00:00Z"), for
- // which we will return their forecasted values.
- //
- // Note: The horizon time is only used for forecasting not for anormaly
- // detection. To detect anomalies for multiple points of time,
- // simply send multiple queries with those as
- // [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time].
- google.protobuf.Duration horizon_duration = 13 [(google.api.field_behavior) = OPTIONAL];
- }
- // A point in a time series.
- message TimeseriesPoint {
- // The timestamp of this point.
- google.protobuf.Timestamp time = 1;
- // The value for this point.
- //
- // It is computed by aggregating all events in the associated slice that are
- // in the `[time, time + granularity]` range (see
- // [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity]) using the specified
- // [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric].
- optional double value = 2;
- }
- // A time series.
- message Timeseries {
- // The points in this time series, ordered by their timestamp.
- repeated TimeseriesPoint point = 1;
- }
- // Forecast result for a given slice.
- message EvaluatedSlice {
- // Values for all categorical dimensions that uniquely identify this slice.
- repeated PinnedDimension dimensions = 1;
- // The actual value at the detection time (see
- // [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time]).
- //
- // **NOTE**: This value can be an estimate, so it should not be used as a
- // source of truth.
- optional double detection_point_actual = 11;
- // The expected value at the detection time, which is obtained by forecasting
- // on the historical time series.
- optional double detection_point_forecast = 12;
- // How much our forecast model expects the detection point actual will
- // deviate from its forecasted value based on how well it fit the input time
- // series.
- //
- // In general, we expect the `detectionPointActual` to
- // be in the `[detectionPointForecast - expectedDeviation,
- // detectionPointForecast + expectedDeviation]` range. The more the actual
- // value is outside this range, the more statistically significant the
- // anomaly is.
- //
- // The expected deviation is always positive.
- optional double expected_deviation = 16;
- // Summarizes how significant the change between the actual and forecasted
- // detection points are compared with the historical patterns observed on the
- // [history][google.cloud.timeseriesinsights.v1.EvaluatedSlice.history] time series.
- //
- // Defined as *|a - f| / (e + nt)*, where:
- //
- // - *a* is the [detectionPointActual][google.cloud.timeseriesinsights.v1.EvaluatedSlice.detection_point_actual].
- // - *f* is the [detectionPointForecast][google.cloud.timeseriesinsights.v1.EvaluatedSlice.detection_point_forecast].
- // - *e* is the [expectedDeviation][google.cloud.timeseriesinsights.v1.EvaluatedSlice.expected_deviation].
- // - *nt` is the [noiseThreshold][google.cloud.timeseriesinsights.v1.ForecastParams.noise_threshold].
- //
- // Anomaly scores between different requests and datasets are comparable. As
- // a guideline, the risk of a slice being an anomaly based on the anomaly
- // score is:
- //
- // - **Very High** if `anomalyScore` > 5.
- // - **High** if the `anomalyScore` is in the [2, 5] range.
- // - **Medium** if the `anomalyScore` is in the [1, 2) range.
- // - **Low** if the `anomalyScore` is < 1.
- //
- // If there were issues evaluating this slice, then the anomaly score will be
- // set to -1.0 and the [status][google.cloud.timeseriesinsights.v1.EvaluatedSlice.status] field will contain details on what
- // went wrong.
- optional double anomaly_score = 17;
- // The actual values in the `[`
- // [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] `-`
- // [forecastHistory][google.cloud.timeseriesinsights.v1.TimeseriesParams.forecast_history]`,`
- // [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] `]` time
- // range.
- //
- // **NOTE**: This field is only populated if
- // [returnTimeseries][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.return_timeseries] is true.
- Timeseries history = 5;
- // The forecasted values in the `[`
- // [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] `+`
- // [granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity]`,`
- // [forecastParams.horizonTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.forecast_params] `]` time
- // range.
- //
- // **NOTE**: This field is only populated if
- // [returnTimeseries][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.return_timeseries] is true.
- Timeseries forecast = 10;
- // Evaluation status. Contains an error message if the `anomalyScore` is < 0.
- //
- // Possible error messages:
- //
- // - **"Time series too sparse"**: The returned time series for this slice did
- // not contain enough data points (we require a minimum of 10).
- // - **"Not enough recent time series points"**: The time series contains the
- // minimum of 10 points, but there are not enough close in time to the
- // detection point.
- // - **"Missing detection point data"**: There were not events to be
- // aggregated within the `[detectionTime, detectionTime + granularity]` time
- // interval, so we don't have an actual value with which we can compare our
- // prediction.
- // - **"Data retrieval error"**: We failed to retrieve the time series data
- // for this slice and could not evaluate it successfully. Should be a
- // transient error.
- // - **"Internal server error"**: Internal unexpected error.
- google.rpc.Status status = 18;
- }
- // Parameters that control how we slice the dataset and, optionally, filter
- // slices that have some specific values on some dimensions (pinned dimensions).
- message SlicingParams {
- // Required. Dimensions over which we will group the events in slices. The names
- // specified here come from the
- // [EventDimension.name][google.cloud.timeseriesinsights.v1.EventDimension.name] field. At least
- // one dimension name must be specified. All dimension names that do not exist
- // in the queried `DataSet` will be ignored.
- //
- // Currently only dimensions that hold string values can be specified here.
- repeated string dimension_names = 1 [(google.api.field_behavior) = REQUIRED];
- // Optional. We will only analyze slices for which
- // [EvaluatedSlice.dimensions][google.cloud.timeseriesinsights.v1.EvaluatedSlice.dimensions] contain all of the
- // following pinned dimensions. A query with a pinned dimension `{ name: "d3"
- // stringVal: "v3" }` will only analyze events which contain the dimension `{
- // name: "d3" stringVal: "v3" }`.
- // The [pinnedDimensions][google.cloud.timeseriesinsights.v1.SlicingParams.pinned_dimensions] and
- // [dimensionNames][google.cloud.timeseriesinsights.v1.SlicingParams.dimension_names] fields can **not**
- // share the same dimension names.
- //
- // Example a valid specification:
- //
- // ```json
- // {
- // dimensionNames: ["d1", "d2"],
- // pinnedDimensions: [
- // { name: "d3" stringVal: "v3" },
- // { name: "d4" stringVal: "v4" }
- // ]
- // }
- // ```
- //
- // In the previous example we will slice the dataset by dimensions "d1",
- // "d2", "d3" and "d4", but we will only analyze slices for which "d3=v3" and
- // "d4=v4".
- //
- // The following example is **invalid** as "d2" is present in both
- // dimensionNames and pinnedDimensions:
- //
- // ```json
- // {
- // dimensionNames: ["d1", "d2"],
- // pinnedDimensions: [
- // { name: "d2" stringVal: "v2" },
- // { name: "d4" stringVal: "v4" }
- // ]
- // }
- // ```
- repeated PinnedDimension pinned_dimensions = 2 [(google.api.field_behavior) = OPTIONAL];
- }
- // Parameters that control how we construct the time series for each slice.
- message TimeseriesParams {
- // Methods by which we can aggregate multiple events by a given
- // [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric].
- enum AggregationMethod {
- // Unspecified.
- AGGREGATION_METHOD_UNSPECIFIED = 0;
- // Aggregate multiple events by summing up the values found in the
- // [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] dimension.
- SUM = 1;
- // Aggregate multiple events by averaging out the values found in the
- // [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] dimension.
- AVERAGE = 2;
- }
- // Required. How long should we go in the past when fetching the timeline used for
- // forecasting each slice.
- //
- // This is used in combination with the
- // [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] parameter.
- // The time series we construct will have the following time range:
- // `[detectionTime - forecastHistory, detectionTime + granularity]`.
- //
- // The forecast history might be rounded up, so that a multiple of
- // `granularity` is used to process the query.
- //
- // Note: If there are not enough events in the
- // `[detectionTime - forecastHistory, detectionTime + granularity]` time
- // interval, the slice evaluation can fail. For more information, see
- // [EvaluatedSlice.status][google.cloud.timeseriesinsights.v1.EvaluatedSlice.status].
- google.protobuf.Duration forecast_history = 1 [(google.api.field_behavior) = REQUIRED];
- // Required. The time granularity of the time series (on the x-axis). Each time series
- // point starting at time T will aggregate all events for a particular slice
- // in *[T, T + granularity)* time windows.
- //
- // Note: The aggregation is decided based on the
- // [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] parameter.
- //
- // This granularity defines the query-time aggregation windows and is not
- // necessarily related to any event time granularity in the raw data (though
- // we do recommend that the query-time granularity is not finer than the
- // ingestion-time one).
- //
- // Currently, the minimal supported granularity is 10 seconds.
- google.protobuf.Duration granularity = 2 [(google.api.field_behavior) = REQUIRED];
- // Optional. Denotes the [name][google.cloud.timeseriesinsights.v1.EventDimension.name] of a numerical
- // dimension that will have its values aggregated to compute the y-axis of the
- // time series.
- //
- // The aggregation method must also be specified by setting the
- // [metricAggregationMethod][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric_aggregation_method]
- // field.
- //
- // Note: Currently, if the aggregation method is unspecified, we will
- // default to SUM for backward compatibility reasons, but new implementations
- // should set the
- // [metricAggregationMethod][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric_aggregation_method]
- // explicitly.
- //
- // If the metric is unspecified, we will use the number of events that each
- // time series point contains as the point value.
- //
- // Example: Let's assume we have the following three events in our dataset:
- // ```json
- // {
- // eventTime: "2020-12-27T00:00:00Z",
- // dimensions: [
- // { name: "d1" stringVal: "v1" },
- // { name: "d2" stringVal: "v2" }
- // { name: "m1" longVal: 100 }
- // { name: "m2" longVal: 11 }
- // ]
- // },
- // {
- // eventTime: "2020-12-27T00:10:00Z",
- // dimensions: [
- // { name: "d1" stringVal: "v1" },
- // { name: "d2" stringVal: "v2" }
- // { name: "m1" longVal: 200 }
- // { name: "m2" longVal: 22 }
- // ]
- // },
- // {
- // eventTime: "2020-12-27T00:20:00Z",
- // dimensions: [
- // { name: "d1" stringVal: "v1" },
- // { name: "d2" stringVal: "v2" }
- // { name: "m1" longVal: 300 }
- // { name: "m2" longVal: 33 }
- // ]
- // }
- // ```
- //
- // These events are all within the same hour, spaced 10 minutes between each
- // of them. Assuming our [QueryDataSetRequest][google.cloud.timeseriesinsights.v1.QueryDataSetRequest] had set
- // [slicingParams.dimensionNames][google.cloud.timeseriesinsights.v1.SlicingParams.dimension_names] to ["d1",
- // "d2"] and [timeseries_params.granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity] to
- // "3600s", then all the previous events will be aggregated into the same
- // [timeseries point][google.cloud.timeseriesinsights.v1.TimeseriesPoint].
- //
- // The time series point that they're all part of will have the
- // [time][google.cloud.timeseriesinsights.v1.TimeseriesPoint.time] set to "2020-12-27T00:00:00Z" and the
- // [value][google.cloud.timeseriesinsights.v1.TimeseriesPoint.value] populated based on this metric field:
- //
- // - If the metric is set to "m1" and metric_aggregation_method to SUM, then
- // the value of the point will be 600.
- // - If the metric is set to "m2" and metric_aggregation_method to SUM, then
- // the value of the point will be 66.
- // - If the metric is set to "m1" and metric_aggregation_method to AVERAGE,
- // then the value of the point will be 200.
- // - If the metric is set to "m2" and metric_aggregation_method to AVERAGE,
- // then the value of the point will be 22.
- // - If the metric field is "" or unspecified, then the value of the point
- // will be 3, as we will simply count the events.
- optional string metric = 4 [(google.api.field_behavior) = OPTIONAL];
- // Optional. Together with the [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] field, specifies how
- // we will aggregate multiple events to obtain the value of a time series
- // point. See the [metric][google.cloud.timeseriesinsights.v1.TimeseriesParams.metric] documentation for more
- // details.
- //
- // If the metric is not specified or "", then this field will be ignored.
- AggregationMethod metric_aggregation_method = 5 [(google.api.field_behavior) = OPTIONAL];
- }
- // Request for performing a query against a loaded DataSet.
- message QueryDataSetRequest {
- // Required. Loaded DataSet to be queried in the format of
- // "projects/{project}/datasets/{dataset}"
- string name = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "timeseriesinsights.googleapis.com/Dataset"
- }
- ];
- // Required. This is the point in time that we want to probe for anomalies.
- //
- // The corresponding [TimeseriesPoint][google.cloud.timeseriesinsights.v1.TimeseriesPoint] is referred to as the
- // detection point.
- //
- // **NOTE**: As with any other time series point, the value is given by
- // aggregating all events in the slice that are in the
- // [detectionTime, detectionTime + granularity) time interval, where
- // the granularity is specified in the
- // [timeseriesParams.granularity][google.cloud.timeseriesinsights.v1.TimeseriesParams.granularity] field.
- google.protobuf.Timestamp detection_time = 11 [(google.api.field_behavior) = REQUIRED];
- // How many slices are returned in
- // [QueryDataSetResponse.slices][google.cloud.timeseriesinsights.v1.QueryDataSetResponse.slices].
- //
- // The returned slices are tentatively the ones with the highest
- // [anomaly scores][google.cloud.timeseriesinsights.v1.EvaluatedSlice.anomaly_score] in the dataset that match
- // the query, but it is not guaranteed.
- //
- // Reducing this number will improve query performance, both in terms of
- // latency and resource usage.
- //
- // Defaults to 50.
- optional int32 num_returned_slices = 13;
- // Parameters controlling how we will split the dataset into the slices that
- // we will analyze.
- SlicingParams slicing_params = 9;
- // Parameters controlling how we will build the time series used to predict
- // the [detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time] value for each slice.
- TimeseriesParams timeseries_params = 10;
- // Parameters that control the time series forecasting models, such as the
- // sensitivity of the anomaly detection.
- ForecastParams forecast_params = 5;
- // If specified, we will return the actual and forecasted time for all
- // returned slices.
- //
- // The time series are returned in the
- // [EvaluatedSlice.history][google.cloud.timeseriesinsights.v1.EvaluatedSlice.history] and
- // [EvaluatedSlice.forecast][google.cloud.timeseriesinsights.v1.EvaluatedSlice.forecast] fields.
- bool return_timeseries = 8;
- }
- // Response for a query executed by the system.
- message QueryDataSetResponse {
- // Loaded DataSet that was queried.
- string name = 1;
- // Slices sorted in descending order by their
- // [anomalyScore][google.cloud.timeseriesinsights.v1.EvaluatedSlice.anomaly_score].
- //
- // At most [numReturnedSlices][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.num_returned_slices]
- // slices are present in this field.
- repeated EvaluatedSlice slices = 3;
- }
- // Request for evaluateSlice.
- message EvaluateSliceRequest {
- // Required. Loaded DataSet to be queried in the format of
- // "projects/{project}/datasets/{dataset}"
- string dataset = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "timeseriesinsights.googleapis.com/Dataset"
- }
- ];
- // Required. Dimensions with pinned values that specify the slice for which we will
- // fetch the time series.
- repeated PinnedDimension pinned_dimensions = 2 [(google.api.field_behavior) = REQUIRED];
- // Required. This is the point in time that we want to probe for anomalies.
- //
- // See documentation for
- // [QueryDataSetRequest.detectionTime][google.cloud.timeseriesinsights.v1.QueryDataSetRequest.detection_time].
- google.protobuf.Timestamp detection_time = 3 [(google.api.field_behavior) = REQUIRED];
- // Parameters controlling how we will build the time series used to predict
- // the [detectionTime][google.cloud.timeseriesinsights.v1.EvaluateSliceRequest.detection_time] value for this slice.
- TimeseriesParams timeseries_params = 4;
- // Parameters that control the time series forecasting models, such as the
- // sensitivity of the anomaly detection.
- ForecastParams forecast_params = 5;
- }
- // Request for evaluateTimeseries.
- message EvaluateTimeseriesRequest {
- // Required. Client project name in the format of 'projects/{project}'.
- string parent = 1 [
- (google.api.field_behavior) = REQUIRED,
- (google.api.resource_reference) = {
- type: "cloudresourcemanager.googleapis.com/Project"
- }
- ];
- // Evaluate this time series without requiring it was previously loaded in
- // a data set.
- //
- // The evaluated time series point is the last one, analogous to calling
- // evaluateSlice or query with
- // [detectionTime][google.cloud.timeseriesinsights.v1.EvaluateSliceRequest.detection_time] set to
- // `timeseries.point(timeseries.point_size() - 1).time`.
- //
- // The length of the time series must be at least 10.
- //
- // All points must have the same time offset relative to the granularity. For
- // example, if the [granularity][google.cloud.timeseriesinsights.v1.EvaluateTimeseriesRequest.granularity] is "5s", then the following
- // point.time sequences are valid:
- // - "100s", "105s", "120s", "125s" (no offset)
- // - "102s", "107s", "122s", "127s" (offset is "2s")
- // However, the following sequence is invalid as it has inconsistent offsets:
- // - "100s", "105s", "122s", "127s" (offsets are either "0s" or "2s")
- Timeseries timeseries = 2;
- // The granularity of the time series (time distance between two consecutive
- // points).
- google.protobuf.Duration granularity = 3;
- // The forecast parameters.
- ForecastParams forecast_params = 4;
- }
|