diff --git a/.apigentools-info b/.apigentools-info index 265e8fbe076..bd0cd8b2e90 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2024-11-20 20:14:23.296853", - "spec_repo_commit": "ebf27b5e" + "regenerated": "2024-11-20 21:48:32.490853", + "spec_repo_commit": "34905ccb" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2024-11-20 20:14:23.315976", - "spec_repo_commit": "ebf27b5e" + "regenerated": "2024-11-20 21:48:32.509653", + "spec_repo_commit": "34905ccb" } } } \ No newline at end of file diff --git a/.generator/schemas/v1/openapi.yaml b/.generator/schemas/v1/openapi.yaml index 4dc36090def..9f928e0266a 100644 --- a/.generator/schemas/v1/openapi.yaml +++ b/.generator/schemas/v1/openapi.yaml @@ -4654,6 +4654,11 @@ components: description: Widget column field. example: content type: string + is_clustering_pattern_field_path: + description: Identifies the clustering pattern field column, usable only + with logs_pattern_stream. + example: true + type: boolean width: $ref: '#/components/schemas/ListStreamColumnWidth' required: @@ -4733,6 +4738,12 @@ components: ListStreamQuery: description: Updated list stream widget. properties: + clustering_pattern_field_path: + default: message + description: Specifies the field for logs pattern clustering. Usable only + with logs_pattern_stream. + example: message + type: string compute: description: Compute configuration for the List Stream Widget. Compute can be used only with the logs_transaction_stream (from 1 to 5 items) list diff --git a/api/datadogV1/model_list_stream_column.go b/api/datadogV1/model_list_stream_column.go index 810889babe0..f94c75b8cb6 100644 --- a/api/datadogV1/model_list_stream_column.go +++ b/api/datadogV1/model_list_stream_column.go @@ -14,6 +14,8 @@ import ( type ListStreamColumn struct { // Widget column field. Field string `json:"field"` + // Identifies the clustering pattern field column, usable only with logs_pattern_stream. + IsClusteringPatternFieldPath *bool `json:"is_clustering_pattern_field_path,omitempty"` // Widget column width. Width ListStreamColumnWidth `json:"width"` // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct @@ -63,6 +65,34 @@ func (o *ListStreamColumn) SetField(v string) { o.Field = v } +// GetIsClusteringPatternFieldPath returns the IsClusteringPatternFieldPath field value if set, zero value otherwise. +func (o *ListStreamColumn) GetIsClusteringPatternFieldPath() bool { + if o == nil || o.IsClusteringPatternFieldPath == nil { + var ret bool + return ret + } + return *o.IsClusteringPatternFieldPath +} + +// GetIsClusteringPatternFieldPathOk returns a tuple with the IsClusteringPatternFieldPath field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ListStreamColumn) GetIsClusteringPatternFieldPathOk() (*bool, bool) { + if o == nil || o.IsClusteringPatternFieldPath == nil { + return nil, false + } + return o.IsClusteringPatternFieldPath, true +} + +// HasIsClusteringPatternFieldPath returns a boolean if a field has been set. +func (o *ListStreamColumn) HasIsClusteringPatternFieldPath() bool { + return o != nil && o.IsClusteringPatternFieldPath != nil +} + +// SetIsClusteringPatternFieldPath gets a reference to the given bool and assigns it to the IsClusteringPatternFieldPath field. +func (o *ListStreamColumn) SetIsClusteringPatternFieldPath(v bool) { + o.IsClusteringPatternFieldPath = &v +} + // GetWidth returns the Width field value. func (o *ListStreamColumn) GetWidth() ListStreamColumnWidth { if o == nil { @@ -93,6 +123,9 @@ func (o ListStreamColumn) MarshalJSON() ([]byte, error) { return datadog.Marshal(o.UnparsedObject) } toSerialize["field"] = o.Field + if o.IsClusteringPatternFieldPath != nil { + toSerialize["is_clustering_pattern_field_path"] = o.IsClusteringPatternFieldPath + } toSerialize["width"] = o.Width for key, value := range o.AdditionalProperties { @@ -104,8 +137,9 @@ func (o ListStreamColumn) MarshalJSON() ([]byte, error) { // UnmarshalJSON deserializes the given payload. func (o *ListStreamColumn) UnmarshalJSON(bytes []byte) (err error) { all := struct { - Field *string `json:"field"` - Width *ListStreamColumnWidth `json:"width"` + Field *string `json:"field"` + IsClusteringPatternFieldPath *bool `json:"is_clustering_pattern_field_path,omitempty"` + Width *ListStreamColumnWidth `json:"width"` }{} if err = datadog.Unmarshal(bytes, &all); err != nil { return datadog.Unmarshal(bytes, &o.UnparsedObject) @@ -118,13 +152,14 @@ func (o *ListStreamColumn) UnmarshalJSON(bytes []byte) (err error) { } additionalProperties := make(map[string]interface{}) if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { - datadog.DeleteKeys(additionalProperties, &[]string{"field", "width"}) + datadog.DeleteKeys(additionalProperties, &[]string{"field", "is_clustering_pattern_field_path", "width"}) } else { return err } hasInvalidField := false o.Field = *all.Field + o.IsClusteringPatternFieldPath = all.IsClusteringPatternFieldPath if !all.Width.IsValid() { hasInvalidField = true } else { diff --git a/api/datadogV1/model_list_stream_query.go b/api/datadogV1/model_list_stream_query.go index ff42ec09c16..db3f338ba48 100644 --- a/api/datadogV1/model_list_stream_query.go +++ b/api/datadogV1/model_list_stream_query.go @@ -12,6 +12,8 @@ import ( // ListStreamQuery Updated list stream widget. type ListStreamQuery struct { + // Specifies the field for logs pattern clustering. Usable only with logs_pattern_stream. + ClusteringPatternFieldPath *string `json:"clustering_pattern_field_path,omitempty"` // Compute configuration for the List Stream Widget. Compute can be used only with the logs_transaction_stream (from 1 to 5 items) list stream source. Compute []ListStreamComputeItems `json:"compute,omitempty"` // Source from which to query items to display in the stream. @@ -39,6 +41,8 @@ type ListStreamQuery struct { // will change when the set of required properties is changed. func NewListStreamQuery(dataSource ListStreamSource, queryString string) *ListStreamQuery { this := ListStreamQuery{} + var clusteringPatternFieldPath string = "message" + this.ClusteringPatternFieldPath = &clusteringPatternFieldPath this.DataSource = dataSource this.QueryString = queryString return &this @@ -49,11 +53,41 @@ func NewListStreamQuery(dataSource ListStreamSource, queryString string) *ListSt // but it doesn't guarantee that properties required by API are set. func NewListStreamQueryWithDefaults() *ListStreamQuery { this := ListStreamQuery{} + var clusteringPatternFieldPath string = "message" + this.ClusteringPatternFieldPath = &clusteringPatternFieldPath var dataSource ListStreamSource = LISTSTREAMSOURCE_APM_ISSUE_STREAM this.DataSource = dataSource return &this } +// GetClusteringPatternFieldPath returns the ClusteringPatternFieldPath field value if set, zero value otherwise. +func (o *ListStreamQuery) GetClusteringPatternFieldPath() string { + if o == nil || o.ClusteringPatternFieldPath == nil { + var ret string + return ret + } + return *o.ClusteringPatternFieldPath +} + +// GetClusteringPatternFieldPathOk returns a tuple with the ClusteringPatternFieldPath field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *ListStreamQuery) GetClusteringPatternFieldPathOk() (*string, bool) { + if o == nil || o.ClusteringPatternFieldPath == nil { + return nil, false + } + return o.ClusteringPatternFieldPath, true +} + +// HasClusteringPatternFieldPath returns a boolean if a field has been set. +func (o *ListStreamQuery) HasClusteringPatternFieldPath() bool { + return o != nil && o.ClusteringPatternFieldPath != nil +} + +// SetClusteringPatternFieldPath gets a reference to the given string and assigns it to the ClusteringPatternFieldPath field. +func (o *ListStreamQuery) SetClusteringPatternFieldPath(v string) { + o.ClusteringPatternFieldPath = &v +} + // GetCompute returns the Compute field value if set, zero value otherwise. func (o *ListStreamQuery) GetCompute() []ListStreamComputeItems { if o == nil || o.Compute == nil { @@ -274,6 +308,9 @@ func (o ListStreamQuery) MarshalJSON() ([]byte, error) { if o.UnparsedObject != nil { return datadog.Marshal(o.UnparsedObject) } + if o.ClusteringPatternFieldPath != nil { + toSerialize["clustering_pattern_field_path"] = o.ClusteringPatternFieldPath + } if o.Compute != nil { toSerialize["compute"] = o.Compute } @@ -304,14 +341,15 @@ func (o ListStreamQuery) MarshalJSON() ([]byte, error) { // UnmarshalJSON deserializes the given payload. func (o *ListStreamQuery) UnmarshalJSON(bytes []byte) (err error) { all := struct { - Compute []ListStreamComputeItems `json:"compute,omitempty"` - DataSource *ListStreamSource `json:"data_source"` - EventSize *WidgetEventSize `json:"event_size,omitempty"` - GroupBy []ListStreamGroupByItems `json:"group_by,omitempty"` - Indexes []string `json:"indexes,omitempty"` - QueryString *string `json:"query_string"` - Sort *WidgetFieldSort `json:"sort,omitempty"` - Storage *string `json:"storage,omitempty"` + ClusteringPatternFieldPath *string `json:"clustering_pattern_field_path,omitempty"` + Compute []ListStreamComputeItems `json:"compute,omitempty"` + DataSource *ListStreamSource `json:"data_source"` + EventSize *WidgetEventSize `json:"event_size,omitempty"` + GroupBy []ListStreamGroupByItems `json:"group_by,omitempty"` + Indexes []string `json:"indexes,omitempty"` + QueryString *string `json:"query_string"` + Sort *WidgetFieldSort `json:"sort,omitempty"` + Storage *string `json:"storage,omitempty"` }{} if err = datadog.Unmarshal(bytes, &all); err != nil { return datadog.Unmarshal(bytes, &o.UnparsedObject) @@ -324,12 +362,13 @@ func (o *ListStreamQuery) UnmarshalJSON(bytes []byte) (err error) { } additionalProperties := make(map[string]interface{}) if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil { - datadog.DeleteKeys(additionalProperties, &[]string{"compute", "data_source", "event_size", "group_by", "indexes", "query_string", "sort", "storage"}) + datadog.DeleteKeys(additionalProperties, &[]string{"clustering_pattern_field_path", "compute", "data_source", "event_size", "group_by", "indexes", "query_string", "sort", "storage"}) } else { return err } hasInvalidField := false + o.ClusteringPatternFieldPath = all.ClusteringPatternFieldPath o.Compute = all.Compute if !all.DataSource.IsValid() { hasInvalidField = true diff --git a/examples/v1/dashboards/CreateDashboard_1039800684.go b/examples/v1/dashboards/CreateDashboard_1039800684.go index 9d4816e6e30..7ef9dab0042 100644 --- a/examples/v1/dashboards/CreateDashboard_1039800684.go +++ b/examples/v1/dashboards/CreateDashboard_1039800684.go @@ -28,10 +28,16 @@ func main() { Width: datadogV1.LISTSTREAMCOLUMNWIDTH_AUTO, Field: "timestamp", }, + { + Width: datadogV1.LISTSTREAMCOLUMNWIDTH_AUTO, + Field: "message", + IsClusteringPatternFieldPath: datadog.PtrBool(true), + }, }, Query: datadogV1.ListStreamQuery{ - DataSource: datadogV1.LISTSTREAMSOURCE_LOGS_PATTERN_STREAM, - QueryString: "", + DataSource: datadogV1.LISTSTREAMSOURCE_LOGS_PATTERN_STREAM, + QueryString: "", + ClusteringPatternFieldPath: datadog.PtrString("message"), GroupBy: []datadogV1.ListStreamGroupByItems{ { Facet: "service", diff --git a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze index d64ee9657a5..f275079feea 100644 --- a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze +++ b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.freeze @@ -1 +1 @@ -2024-11-15T19:32:46.627Z \ No newline at end of file +2024-11-20T19:43:46.485Z \ No newline at end of file diff --git a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml index 434aaec3ebd..bdc2767ea17 100644 --- a/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml +++ b/tests/scenarios/cassettes/TestScenarios/v1/Feature_Dashboards/Scenario_Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget.yaml @@ -1,7 +1,7 @@ interactions: - request: body: | - {"layout_type":"ordered","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1731699166 with list_stream widget","widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"}],"query":{"data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"}}]} + {"layout_type":"ordered","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1732131826 with list_stream widget","widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"},{"field":"message","is_clustering_pattern_field_path":true,"width":"auto"}],"query":{"clustering_pattern_field_path":"message","data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"}}]} form: {} headers: Accept: @@ -12,8 +12,9 @@ interactions: method: POST url: https://api.datadoghq.com/api/v1/dashboard response: - body: '{"id":"hem-inu-je6","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1731699166 - with list_stream widget","description":null,"author_handle":"frog@datadoghq.com","author_name":null,"layout_type":"ordered","url":"/dashboard/hem-inu-je6/test-createanewdashboardwithlogspatternstreamliststreamwidget-1731699166-with-li","is_read_only":false,"template_variables":null,"widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"}],"query":{"data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"},"id":4012469646916199}],"notify_list":null,"created_at":"2024-11-15T19:32:46.772627+00:00","modified_at":"2024-11-15T19:32:46.772627+00:00","restricted_roles":[]} + body: '{"id":"r75-hd7-sd9","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1732131826 + with list_stream widget","description":null,"author_handle":"9919ec9b-ebc7-49ee-8dc8-03626e717cca","author_name":"CI + Account","layout_type":"ordered","url":"/dashboard/r75-hd7-sd9/test-createanewdashboardwithlogspatternstreamliststreamwidget-1732131826-with-li","is_read_only":false,"template_variables":null,"widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"},{"field":"message","is_clustering_pattern_field_path":true,"width":"auto"}],"query":{"clustering_pattern_field_path":"message","data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"},"id":6154246442450384}],"notify_list":null,"created_at":"2024-11-20T19:43:46.871965+00:00","modified_at":"2024-11-20T19:43:46.871965+00:00","restricted_roles":[]} ' code: 200 @@ -30,9 +31,9 @@ interactions: - application/json id: 1 method: DELETE - url: https://api.datadoghq.com/api/v1/dashboard/hem-inu-je6 + url: https://api.datadoghq.com/api/v1/dashboard/r75-hd7-sd9 response: - body: '{"deleted_dashboard_id":"hem-inu-je6"} + body: '{"deleted_dashboard_id":"r75-hd7-sd9"} ' code: 200 diff --git a/tests/scenarios/features/v1/dashboards.feature b/tests/scenarios/features/v1/dashboards.feature index 8d1a1bb8aca..208a4dd3966 100644 --- a/tests/scenarios/features/v1/dashboards.feature +++ b/tests/scenarios/features/v1/dashboards.feature @@ -534,11 +534,12 @@ Feature: Dashboards @team:DataDog/dashboards-backend Scenario: Create a new dashboard with logs_pattern_stream list_stream widget Given new "CreateDashboard" request - And body with value {"layout_type": "ordered", "title": "{{ unique }} with list_stream widget","widgets": [{"definition": {"type": "list_stream","requests": [{"columns":[{"width":"auto","field":"timestamp"}],"query":{"data_source":"logs_pattern_stream","query_string":"","group_by":[{"facet":"service"}]},"response_format":"event_list"}]}}]} + And body with value {"layout_type": "ordered", "title": "{{ unique }} with list_stream widget","widgets": [{"definition": {"type": "list_stream","requests": [{"columns":[{"width":"auto","field":"timestamp"},{"width":"auto","field":"message", "is_clustering_pattern_field_path": true}],"query":{"data_source":"logs_pattern_stream","query_string":"","clustering_pattern_field_path":"message","group_by":[{"facet":"service"}]}, "response_format":"event_list"}]}}]} When the request is sent Then the response status is 200 OK And the response "widgets[0].definition.requests[0].query.data_source" is equal to "logs_pattern_stream" And the response "widgets[0].definition.requests[0].query.group_by[0].facet" is equal to "service" + And the response "widgets[0].definition.requests[0].query.clustering_pattern_field_path" is equal to "message" @team:DataDog/dashboards-backend Scenario: Create a new dashboard with logs_stream list_stream widget and storage parameter