Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updated OpenAPI logs_pattern_query to support Patterns for any attribute #2797

Merged
merged 1 commit into from
Nov 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .apigentools-info
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
"spec_versions": {
"v1": {
"apigentools_version": "1.6.6",
"regenerated": "2024-11-20 20:14:23.296853",
"spec_repo_commit": "ebf27b5e"
"regenerated": "2024-11-20 21:48:32.490853",
"spec_repo_commit": "34905ccb"
},
"v2": {
"apigentools_version": "1.6.6",
"regenerated": "2024-11-20 20:14:23.315976",
"spec_repo_commit": "ebf27b5e"
"regenerated": "2024-11-20 21:48:32.509653",
"spec_repo_commit": "34905ccb"
}
}
}
11 changes: 11 additions & 0 deletions .generator/schemas/v1/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4654,6 +4654,11 @@ components:
description: Widget column field.
example: content
type: string
is_clustering_pattern_field_path:
description: Identifies the clustering pattern field column, usable only
with logs_pattern_stream.
example: true
type: boolean
width:
$ref: '#/components/schemas/ListStreamColumnWidth'
required:
Expand Down Expand Up @@ -4733,6 +4738,12 @@ components:
ListStreamQuery:
description: Updated list stream widget.
properties:
clustering_pattern_field_path:
default: message
description: Specifies the field for logs pattern clustering. Usable only
with logs_pattern_stream.
example: message
type: string
compute:
description: Compute configuration for the List Stream Widget. Compute can
be used only with the logs_transaction_stream (from 1 to 5 items) list
Expand Down
41 changes: 38 additions & 3 deletions api/datadogV1/model_list_stream_column.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ import (
type ListStreamColumn struct {
// Widget column field.
Field string `json:"field"`
// Identifies the clustering pattern field column, usable only with logs_pattern_stream.
IsClusteringPatternFieldPath *bool `json:"is_clustering_pattern_field_path,omitempty"`
// Widget column width.
Width ListStreamColumnWidth `json:"width"`
// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
Expand Down Expand Up @@ -63,6 +65,34 @@ func (o *ListStreamColumn) SetField(v string) {
o.Field = v
}

// GetIsClusteringPatternFieldPath returns the IsClusteringPatternFieldPath field value if set, zero value otherwise.
func (o *ListStreamColumn) GetIsClusteringPatternFieldPath() bool {
if o == nil || o.IsClusteringPatternFieldPath == nil {
var ret bool
return ret
}
return *o.IsClusteringPatternFieldPath
}

// GetIsClusteringPatternFieldPathOk returns a tuple with the IsClusteringPatternFieldPath field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ListStreamColumn) GetIsClusteringPatternFieldPathOk() (*bool, bool) {
if o == nil || o.IsClusteringPatternFieldPath == nil {
return nil, false
}
return o.IsClusteringPatternFieldPath, true
}

// HasIsClusteringPatternFieldPath returns a boolean if a field has been set.
func (o *ListStreamColumn) HasIsClusteringPatternFieldPath() bool {
return o != nil && o.IsClusteringPatternFieldPath != nil
}

// SetIsClusteringPatternFieldPath gets a reference to the given bool and assigns it to the IsClusteringPatternFieldPath field.
func (o *ListStreamColumn) SetIsClusteringPatternFieldPath(v bool) {
o.IsClusteringPatternFieldPath = &v
}

// GetWidth returns the Width field value.
func (o *ListStreamColumn) GetWidth() ListStreamColumnWidth {
if o == nil {
Expand Down Expand Up @@ -93,6 +123,9 @@ func (o ListStreamColumn) MarshalJSON() ([]byte, error) {
return datadog.Marshal(o.UnparsedObject)
}
toSerialize["field"] = o.Field
if o.IsClusteringPatternFieldPath != nil {
toSerialize["is_clustering_pattern_field_path"] = o.IsClusteringPatternFieldPath
}
toSerialize["width"] = o.Width

for key, value := range o.AdditionalProperties {
Expand All @@ -104,8 +137,9 @@ func (o ListStreamColumn) MarshalJSON() ([]byte, error) {
// UnmarshalJSON deserializes the given payload.
func (o *ListStreamColumn) UnmarshalJSON(bytes []byte) (err error) {
all := struct {
Field *string `json:"field"`
Width *ListStreamColumnWidth `json:"width"`
Field *string `json:"field"`
IsClusteringPatternFieldPath *bool `json:"is_clustering_pattern_field_path,omitempty"`
Width *ListStreamColumnWidth `json:"width"`
}{}
if err = datadog.Unmarshal(bytes, &all); err != nil {
return datadog.Unmarshal(bytes, &o.UnparsedObject)
Expand All @@ -118,13 +152,14 @@ func (o *ListStreamColumn) UnmarshalJSON(bytes []byte) (err error) {
}
additionalProperties := make(map[string]interface{})
if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil {
datadog.DeleteKeys(additionalProperties, &[]string{"field", "width"})
datadog.DeleteKeys(additionalProperties, &[]string{"field", "is_clustering_pattern_field_path", "width"})
} else {
return err
}

hasInvalidField := false
o.Field = *all.Field
o.IsClusteringPatternFieldPath = all.IsClusteringPatternFieldPath
if !all.Width.IsValid() {
hasInvalidField = true
} else {
Expand Down
57 changes: 48 additions & 9 deletions api/datadogV1/model_list_stream_query.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import (

// ListStreamQuery Updated list stream widget.
type ListStreamQuery struct {
// Specifies the field for logs pattern clustering. Usable only with logs_pattern_stream.
ClusteringPatternFieldPath *string `json:"clustering_pattern_field_path,omitempty"`
// Compute configuration for the List Stream Widget. Compute can be used only with the logs_transaction_stream (from 1 to 5 items) list stream source.
Compute []ListStreamComputeItems `json:"compute,omitempty"`
// Source from which to query items to display in the stream.
Expand Down Expand Up @@ -39,6 +41,8 @@ type ListStreamQuery struct {
// will change when the set of required properties is changed.
func NewListStreamQuery(dataSource ListStreamSource, queryString string) *ListStreamQuery {
this := ListStreamQuery{}
var clusteringPatternFieldPath string = "message"

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🔵 Code Quality Violation

Suggested change
var clusteringPatternFieldPath string = "message"
var clusteringPatternFieldPath = "message"
redundant type declaration (...read more)

In Go, it is considered good practice to avoid declaring the type when it is obvious or when the type can be inferred from the assignment. This is known as type inference, and it offers several benefits:

  1. Readability: By omitting the explicit type declaration, the code becomes more concise and easier to read. Redundant type declarations can clutter the code and introduce unnecessary noise. When the type is obvious from the assigned value, omitting the type declaration can improve code readability and make it more expressive.
  2. Flexibility and maintainability: Using type inference allows for easier changes to the underlying type without manually updating every instance where it is declared. If the type needs to be changed in the future, you only need to modify the assignment, and Go's type inference mechanism will handle the rest. This reduces the maintenance effort required and improves code maintainability.
  3. Clean code appearance: Omitting the type declaration when it is obvious results in cleaner code syntax. Code that is free from excessive explicit type declarations tends to look more elegant and consistent. It minimizes redundancy and focuses on the essential logic, contributing to a cleaner and more streamlined codebase.
  4. Compatibility: Go's type inference mechanism ensures compatibility with future changes to the type of the assigned value. If the assigned value is changed to a type-compatible value, the code will continue to compile and run without any modifications. This allows for flexibility in your code while maintaining correctness.

That being said, it is important to strike a balance and avoid excessive use of type inference. Clear and explicit type declarations are still valuable when they enhance code clarity, such as when documenting or expressing the intent of the code. It is essential to find the right balance between brevity and clarity in your codebase.

By utilizing Go's type inference mechanism and avoiding explicit type declarations when the type is obvious, you can achieve more readable, maintainable, and concise code that adheres to Go's idiomatic style.

View in Datadog  Leave us feedback  Documentation

this.ClusteringPatternFieldPath = &clusteringPatternFieldPath
this.DataSource = dataSource
this.QueryString = queryString
return &this
Expand All @@ -49,11 +53,41 @@ func NewListStreamQuery(dataSource ListStreamSource, queryString string) *ListSt
// but it doesn't guarantee that properties required by API are set.
func NewListStreamQueryWithDefaults() *ListStreamQuery {
this := ListStreamQuery{}
var clusteringPatternFieldPath string = "message"

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🔵 Code Quality Violation

Suggested change
var clusteringPatternFieldPath string = "message"
var clusteringPatternFieldPath = "message"
redundant type declaration (...read more)

In Go, it is considered good practice to avoid declaring the type when it is obvious or when the type can be inferred from the assignment. This is known as type inference, and it offers several benefits:

  1. Readability: By omitting the explicit type declaration, the code becomes more concise and easier to read. Redundant type declarations can clutter the code and introduce unnecessary noise. When the type is obvious from the assigned value, omitting the type declaration can improve code readability and make it more expressive.
  2. Flexibility and maintainability: Using type inference allows for easier changes to the underlying type without manually updating every instance where it is declared. If the type needs to be changed in the future, you only need to modify the assignment, and Go's type inference mechanism will handle the rest. This reduces the maintenance effort required and improves code maintainability.
  3. Clean code appearance: Omitting the type declaration when it is obvious results in cleaner code syntax. Code that is free from excessive explicit type declarations tends to look more elegant and consistent. It minimizes redundancy and focuses on the essential logic, contributing to a cleaner and more streamlined codebase.
  4. Compatibility: Go's type inference mechanism ensures compatibility with future changes to the type of the assigned value. If the assigned value is changed to a type-compatible value, the code will continue to compile and run without any modifications. This allows for flexibility in your code while maintaining correctness.

That being said, it is important to strike a balance and avoid excessive use of type inference. Clear and explicit type declarations are still valuable when they enhance code clarity, such as when documenting or expressing the intent of the code. It is essential to find the right balance between brevity and clarity in your codebase.

By utilizing Go's type inference mechanism and avoiding explicit type declarations when the type is obvious, you can achieve more readable, maintainable, and concise code that adheres to Go's idiomatic style.

View in Datadog  Leave us feedback  Documentation

this.ClusteringPatternFieldPath = &clusteringPatternFieldPath
var dataSource ListStreamSource = LISTSTREAMSOURCE_APM_ISSUE_STREAM
this.DataSource = dataSource
return &this
}

// GetClusteringPatternFieldPath returns the ClusteringPatternFieldPath field value if set, zero value otherwise.
func (o *ListStreamQuery) GetClusteringPatternFieldPath() string {
if o == nil || o.ClusteringPatternFieldPath == nil {
var ret string
return ret
}
return *o.ClusteringPatternFieldPath
}

// GetClusteringPatternFieldPathOk returns a tuple with the ClusteringPatternFieldPath field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ListStreamQuery) GetClusteringPatternFieldPathOk() (*string, bool) {
if o == nil || o.ClusteringPatternFieldPath == nil {
return nil, false
}
return o.ClusteringPatternFieldPath, true
}

// HasClusteringPatternFieldPath returns a boolean if a field has been set.
func (o *ListStreamQuery) HasClusteringPatternFieldPath() bool {
return o != nil && o.ClusteringPatternFieldPath != nil
}

// SetClusteringPatternFieldPath gets a reference to the given string and assigns it to the ClusteringPatternFieldPath field.
func (o *ListStreamQuery) SetClusteringPatternFieldPath(v string) {
o.ClusteringPatternFieldPath = &v
}

// GetCompute returns the Compute field value if set, zero value otherwise.
func (o *ListStreamQuery) GetCompute() []ListStreamComputeItems {
if o == nil || o.Compute == nil {
Expand Down Expand Up @@ -274,6 +308,9 @@ func (o ListStreamQuery) MarshalJSON() ([]byte, error) {
if o.UnparsedObject != nil {
return datadog.Marshal(o.UnparsedObject)
}
if o.ClusteringPatternFieldPath != nil {
toSerialize["clustering_pattern_field_path"] = o.ClusteringPatternFieldPath
}
if o.Compute != nil {
toSerialize["compute"] = o.Compute
}
Expand Down Expand Up @@ -304,14 +341,15 @@ func (o ListStreamQuery) MarshalJSON() ([]byte, error) {
// UnmarshalJSON deserializes the given payload.
func (o *ListStreamQuery) UnmarshalJSON(bytes []byte) (err error) {
all := struct {
Compute []ListStreamComputeItems `json:"compute,omitempty"`
DataSource *ListStreamSource `json:"data_source"`
EventSize *WidgetEventSize `json:"event_size,omitempty"`
GroupBy []ListStreamGroupByItems `json:"group_by,omitempty"`
Indexes []string `json:"indexes,omitempty"`
QueryString *string `json:"query_string"`
Sort *WidgetFieldSort `json:"sort,omitempty"`
Storage *string `json:"storage,omitempty"`
ClusteringPatternFieldPath *string `json:"clustering_pattern_field_path,omitempty"`
Compute []ListStreamComputeItems `json:"compute,omitempty"`
DataSource *ListStreamSource `json:"data_source"`
EventSize *WidgetEventSize `json:"event_size,omitempty"`
GroupBy []ListStreamGroupByItems `json:"group_by,omitempty"`
Indexes []string `json:"indexes,omitempty"`
QueryString *string `json:"query_string"`
Sort *WidgetFieldSort `json:"sort,omitempty"`
Storage *string `json:"storage,omitempty"`
}{}
if err = datadog.Unmarshal(bytes, &all); err != nil {
return datadog.Unmarshal(bytes, &o.UnparsedObject)
Expand All @@ -324,12 +362,13 @@ func (o *ListStreamQuery) UnmarshalJSON(bytes []byte) (err error) {
}
additionalProperties := make(map[string]interface{})
if err = datadog.Unmarshal(bytes, &additionalProperties); err == nil {
datadog.DeleteKeys(additionalProperties, &[]string{"compute", "data_source", "event_size", "group_by", "indexes", "query_string", "sort", "storage"})
datadog.DeleteKeys(additionalProperties, &[]string{"clustering_pattern_field_path", "compute", "data_source", "event_size", "group_by", "indexes", "query_string", "sort", "storage"})
} else {
return err
}

hasInvalidField := false
o.ClusteringPatternFieldPath = all.ClusteringPatternFieldPath
o.Compute = all.Compute
if !all.DataSource.IsValid() {
hasInvalidField = true
Expand Down
10 changes: 8 additions & 2 deletions examples/v1/dashboards/CreateDashboard_1039800684.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,16 @@ func main() {
Width: datadogV1.LISTSTREAMCOLUMNWIDTH_AUTO,
Field: "timestamp",
},
{
Width: datadogV1.LISTSTREAMCOLUMNWIDTH_AUTO,
Field: "message",
IsClusteringPatternFieldPath: datadog.PtrBool(true),
},
},
Query: datadogV1.ListStreamQuery{
DataSource: datadogV1.LISTSTREAMSOURCE_LOGS_PATTERN_STREAM,
QueryString: "",
DataSource: datadogV1.LISTSTREAMSOURCE_LOGS_PATTERN_STREAM,
QueryString: "",
ClusteringPatternFieldPath: datadog.PtrString("message"),
GroupBy: []datadogV1.ListStreamGroupByItems{
{
Facet: "service",
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2024-11-15T19:32:46.627Z
2024-11-20T19:43:46.485Z
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
interactions:
- request:
body: |
{"layout_type":"ordered","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1731699166 with list_stream widget","widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"}],"query":{"data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"}}]}
{"layout_type":"ordered","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1732131826 with list_stream widget","widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"},{"field":"message","is_clustering_pattern_field_path":true,"width":"auto"}],"query":{"clustering_pattern_field_path":"message","data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"}}]}
form: {}
headers:
Accept:
Expand All @@ -12,8 +12,9 @@ interactions:
method: POST
url: https://api.datadoghq.com/api/v1/dashboard
response:
body: '{"id":"hem-inu-je6","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1731699166
with list_stream widget","description":null,"author_handle":"[email protected]","author_name":null,"layout_type":"ordered","url":"/dashboard/hem-inu-je6/test-createanewdashboardwithlogspatternstreamliststreamwidget-1731699166-with-li","is_read_only":false,"template_variables":null,"widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"}],"query":{"data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"},"id":4012469646916199}],"notify_list":null,"created_at":"2024-11-15T19:32:46.772627+00:00","modified_at":"2024-11-15T19:32:46.772627+00:00","restricted_roles":[]}
body: '{"id":"r75-hd7-sd9","title":"Test-Create_a_new_dashboard_with_logs_pattern_stream_list_stream_widget-1732131826
with list_stream widget","description":null,"author_handle":"9919ec9b-ebc7-49ee-8dc8-03626e717cca","author_name":"CI
Account","layout_type":"ordered","url":"/dashboard/r75-hd7-sd9/test-createanewdashboardwithlogspatternstreamliststreamwidget-1732131826-with-li","is_read_only":false,"template_variables":null,"widgets":[{"definition":{"requests":[{"columns":[{"field":"timestamp","width":"auto"},{"field":"message","is_clustering_pattern_field_path":true,"width":"auto"}],"query":{"clustering_pattern_field_path":"message","data_source":"logs_pattern_stream","group_by":[{"facet":"service"}],"query_string":""},"response_format":"event_list"}],"type":"list_stream"},"id":6154246442450384}],"notify_list":null,"created_at":"2024-11-20T19:43:46.871965+00:00","modified_at":"2024-11-20T19:43:46.871965+00:00","restricted_roles":[]}

'
code: 200
Expand All @@ -30,9 +31,9 @@ interactions:
- application/json
id: 1
method: DELETE
url: https://api.datadoghq.com/api/v1/dashboard/hem-inu-je6
url: https://api.datadoghq.com/api/v1/dashboard/r75-hd7-sd9
response:
body: '{"deleted_dashboard_id":"hem-inu-je6"}
body: '{"deleted_dashboard_id":"r75-hd7-sd9"}

'
code: 200
Expand Down
3 changes: 2 additions & 1 deletion tests/scenarios/features/v1/dashboards.feature
Original file line number Diff line number Diff line change
Expand Up @@ -534,11 +534,12 @@ Feature: Dashboards
@team:DataDog/dashboards-backend
Scenario: Create a new dashboard with logs_pattern_stream list_stream widget
Given new "CreateDashboard" request
And body with value {"layout_type": "ordered", "title": "{{ unique }} with list_stream widget","widgets": [{"definition": {"type": "list_stream","requests": [{"columns":[{"width":"auto","field":"timestamp"}],"query":{"data_source":"logs_pattern_stream","query_string":"","group_by":[{"facet":"service"}]},"response_format":"event_list"}]}}]}
And body with value {"layout_type": "ordered", "title": "{{ unique }} with list_stream widget","widgets": [{"definition": {"type": "list_stream","requests": [{"columns":[{"width":"auto","field":"timestamp"},{"width":"auto","field":"message", "is_clustering_pattern_field_path": true}],"query":{"data_source":"logs_pattern_stream","query_string":"","clustering_pattern_field_path":"message","group_by":[{"facet":"service"}]}, "response_format":"event_list"}]}}]}
When the request is sent
Then the response status is 200 OK
And the response "widgets[0].definition.requests[0].query.data_source" is equal to "logs_pattern_stream"
And the response "widgets[0].definition.requests[0].query.group_by[0].facet" is equal to "service"
And the response "widgets[0].definition.requests[0].query.clustering_pattern_field_path" is equal to "message"

@team:DataDog/dashboards-backend
Scenario: Create a new dashboard with logs_stream list_stream widget and storage parameter
Expand Down
Loading