Skip to content

Commit

Permalink
Regenerate client from commit edd5d6d6 of spec repo (#1475)
Browse files Browse the repository at this point in the history
Co-authored-by: ci.datadog-api-spec <[email protected]>
Co-authored-by: api-clients-generation-pipeline[bot] <54105614+api-clients-generation-pipeline[bot]@users.noreply.github.com>
  • Loading branch information
api-clients-generation-pipeline[bot] and ci.datadog-api-spec authored May 4, 2022
1 parent d8b3c0c commit abcc412
Show file tree
Hide file tree
Showing 7 changed files with 140 additions and 26 deletions.
8 changes: 4 additions & 4 deletions .apigentools-info
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
"spec_versions": {
"v1": {
"apigentools_version": "1.6.2",
"regenerated": "2022-05-03 09:44:32.134069",
"spec_repo_commit": "b0dd9845"
"regenerated": "2022-05-04 10:00:09.727623",
"spec_repo_commit": "edd5d6d6"
},
"v2": {
"apigentools_version": "1.6.2",
"regenerated": "2022-05-03 09:44:32.150463",
"spec_repo_commit": "b0dd9845"
"regenerated": "2022-05-04 10:00:09.740044",
"spec_repo_commit": "edd5d6d6"
}
}
}
12 changes: 12 additions & 0 deletions .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2875,6 +2875,12 @@ components:
in the archive.
example: source:nginx
type: string
rehydration_max_scan_size_in_gb:
description: Maximum scan size for rehydration from this archive.
example: 100
format: int64
nullable: true
type: integer
rehydration_tags:
description: An array of tags to add to rehydrated logs from an archive.
example:
Expand Down Expand Up @@ -2919,6 +2925,12 @@ components:
in the archive.
example: source:nginx
type: string
rehydration_max_scan_size_in_gb:
description: Maximum scan size for rehydration from this archive.
example: 100
format: int64
nullable: true
type: integer
rehydration_tags:
description: An array of tags to add to rehydrated logs from an archive.
example:
Expand Down
62 changes: 56 additions & 6 deletions api/v2/datadog/model_logs_archive_attributes.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ type LogsArchiveAttributes struct {
Name string `json:"name"`
// The archive query/filter. Logs matching this query are included in the archive.
Query string `json:"query"`
// Maximum scan size for rehydration from this archive.
RehydrationMaxScanSizeInGb NullableInt64 `json:"rehydration_max_scan_size_in_gb,omitempty"`
// An array of tags to add to rehydrated logs from an archive.
RehydrationTags []string `json:"rehydration_tags,omitempty"`
// The state of the archive.
Expand Down Expand Up @@ -158,6 +160,49 @@ func (o *LogsArchiveAttributes) SetQuery(v string) {
o.Query = v
}

// GetRehydrationMaxScanSizeInGb returns the RehydrationMaxScanSizeInGb field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *LogsArchiveAttributes) GetRehydrationMaxScanSizeInGb() int64 {
if o == nil || o.RehydrationMaxScanSizeInGb.Get() == nil {
var ret int64
return ret
}
return *o.RehydrationMaxScanSizeInGb.Get()
}

// GetRehydrationMaxScanSizeInGbOk returns a tuple with the RehydrationMaxScanSizeInGb field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *LogsArchiveAttributes) GetRehydrationMaxScanSizeInGbOk() (*int64, bool) {
if o == nil {
return nil, false
}
return o.RehydrationMaxScanSizeInGb.Get(), o.RehydrationMaxScanSizeInGb.IsSet()
}

// HasRehydrationMaxScanSizeInGb returns a boolean if a field has been set.
func (o *LogsArchiveAttributes) HasRehydrationMaxScanSizeInGb() bool {
if o != nil && o.RehydrationMaxScanSizeInGb.IsSet() {
return true
}

return false
}

// SetRehydrationMaxScanSizeInGb gets a reference to the given NullableInt64 and assigns it to the RehydrationMaxScanSizeInGb field.
func (o *LogsArchiveAttributes) SetRehydrationMaxScanSizeInGb(v int64) {
o.RehydrationMaxScanSizeInGb.Set(&v)
}

// SetRehydrationMaxScanSizeInGbNil sets the value for RehydrationMaxScanSizeInGb to be an explicit nil
func (o *LogsArchiveAttributes) SetRehydrationMaxScanSizeInGbNil() {
o.RehydrationMaxScanSizeInGb.Set(nil)
}

// UnsetRehydrationMaxScanSizeInGb ensures that no value is present for RehydrationMaxScanSizeInGb, not even an explicit nil
func (o *LogsArchiveAttributes) UnsetRehydrationMaxScanSizeInGb() {
o.RehydrationMaxScanSizeInGb.Unset()
}

// GetRehydrationTags returns the RehydrationTags field value if set, zero value otherwise.
func (o *LogsArchiveAttributes) GetRehydrationTags() []string {
if o == nil || o.RehydrationTags == nil {
Expand Down Expand Up @@ -233,6 +278,9 @@ func (o LogsArchiveAttributes) MarshalJSON() ([]byte, error) {
}
toSerialize["name"] = o.Name
toSerialize["query"] = o.Query
if o.RehydrationMaxScanSizeInGb.IsSet() {
toSerialize["rehydration_max_scan_size_in_gb"] = o.RehydrationMaxScanSizeInGb.Get()
}
if o.RehydrationTags != nil {
toSerialize["rehydration_tags"] = o.RehydrationTags
}
Expand All @@ -254,12 +302,13 @@ func (o *LogsArchiveAttributes) UnmarshalJSON(bytes []byte) (err error) {
Query *string `json:"query"`
}{}
all := struct {
Destination NullableLogsArchiveDestination `json:"destination"`
IncludeTags *bool `json:"include_tags,omitempty"`
Name string `json:"name"`
Query string `json:"query"`
RehydrationTags []string `json:"rehydration_tags,omitempty"`
State *LogsArchiveState `json:"state,omitempty"`
Destination NullableLogsArchiveDestination `json:"destination"`
IncludeTags *bool `json:"include_tags,omitempty"`
Name string `json:"name"`
Query string `json:"query"`
RehydrationMaxScanSizeInGb NullableInt64 `json:"rehydration_max_scan_size_in_gb,omitempty"`
RehydrationTags []string `json:"rehydration_tags,omitempty"`
State *LogsArchiveState `json:"state,omitempty"`
}{}
err = json.Unmarshal(bytes, &required)
if err != nil {
Expand Down Expand Up @@ -295,6 +344,7 @@ func (o *LogsArchiveAttributes) UnmarshalJSON(bytes []byte) (err error) {
o.IncludeTags = all.IncludeTags
o.Name = all.Name
o.Query = all.Query
o.RehydrationMaxScanSizeInGb = all.RehydrationMaxScanSizeInGb
o.RehydrationTags = all.RehydrationTags
o.State = all.State
return nil
Expand Down
60 changes: 55 additions & 5 deletions api/v2/datadog/model_logs_archive_create_request_attributes.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ type LogsArchiveCreateRequestAttributes struct {
Name string `json:"name"`
// The archive query/filter. Logs matching this query are included in the archive.
Query string `json:"query"`
// Maximum scan size for rehydration from this archive.
RehydrationMaxScanSizeInGb NullableInt64 `json:"rehydration_max_scan_size_in_gb,omitempty"`
// An array of tags to add to rehydrated logs from an archive.
RehydrationTags []string `json:"rehydration_tags,omitempty"`
// UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct
Expand Down Expand Up @@ -154,6 +156,49 @@ func (o *LogsArchiveCreateRequestAttributes) SetQuery(v string) {
o.Query = v
}

// GetRehydrationMaxScanSizeInGb returns the RehydrationMaxScanSizeInGb field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *LogsArchiveCreateRequestAttributes) GetRehydrationMaxScanSizeInGb() int64 {
if o == nil || o.RehydrationMaxScanSizeInGb.Get() == nil {
var ret int64
return ret
}
return *o.RehydrationMaxScanSizeInGb.Get()
}

// GetRehydrationMaxScanSizeInGbOk returns a tuple with the RehydrationMaxScanSizeInGb field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *LogsArchiveCreateRequestAttributes) GetRehydrationMaxScanSizeInGbOk() (*int64, bool) {
if o == nil {
return nil, false
}
return o.RehydrationMaxScanSizeInGb.Get(), o.RehydrationMaxScanSizeInGb.IsSet()
}

// HasRehydrationMaxScanSizeInGb returns a boolean if a field has been set.
func (o *LogsArchiveCreateRequestAttributes) HasRehydrationMaxScanSizeInGb() bool {
if o != nil && o.RehydrationMaxScanSizeInGb.IsSet() {
return true
}

return false
}

// SetRehydrationMaxScanSizeInGb gets a reference to the given NullableInt64 and assigns it to the RehydrationMaxScanSizeInGb field.
func (o *LogsArchiveCreateRequestAttributes) SetRehydrationMaxScanSizeInGb(v int64) {
o.RehydrationMaxScanSizeInGb.Set(&v)
}

// SetRehydrationMaxScanSizeInGbNil sets the value for RehydrationMaxScanSizeInGb to be an explicit nil
func (o *LogsArchiveCreateRequestAttributes) SetRehydrationMaxScanSizeInGbNil() {
o.RehydrationMaxScanSizeInGb.Set(nil)
}

// UnsetRehydrationMaxScanSizeInGb ensures that no value is present for RehydrationMaxScanSizeInGb, not even an explicit nil
func (o *LogsArchiveCreateRequestAttributes) UnsetRehydrationMaxScanSizeInGb() {
o.RehydrationMaxScanSizeInGb.Unset()
}

// GetRehydrationTags returns the RehydrationTags field value if set, zero value otherwise.
func (o *LogsArchiveCreateRequestAttributes) GetRehydrationTags() []string {
if o == nil || o.RehydrationTags == nil {
Expand Down Expand Up @@ -197,6 +242,9 @@ func (o LogsArchiveCreateRequestAttributes) MarshalJSON() ([]byte, error) {
}
toSerialize["name"] = o.Name
toSerialize["query"] = o.Query
if o.RehydrationMaxScanSizeInGb.IsSet() {
toSerialize["rehydration_max_scan_size_in_gb"] = o.RehydrationMaxScanSizeInGb.Get()
}
if o.RehydrationTags != nil {
toSerialize["rehydration_tags"] = o.RehydrationTags
}
Expand All @@ -215,11 +263,12 @@ func (o *LogsArchiveCreateRequestAttributes) UnmarshalJSON(bytes []byte) (err er
Query *string `json:"query"`
}{}
all := struct {
Destination LogsArchiveCreateRequestDestination `json:"destination"`
IncludeTags *bool `json:"include_tags,omitempty"`
Name string `json:"name"`
Query string `json:"query"`
RehydrationTags []string `json:"rehydration_tags,omitempty"`
Destination LogsArchiveCreateRequestDestination `json:"destination"`
IncludeTags *bool `json:"include_tags,omitempty"`
Name string `json:"name"`
Query string `json:"query"`
RehydrationMaxScanSizeInGb NullableInt64 `json:"rehydration_max_scan_size_in_gb,omitempty"`
RehydrationTags []string `json:"rehydration_tags,omitempty"`
}{}
err = json.Unmarshal(bytes, &required)
if err != nil {
Expand Down Expand Up @@ -247,6 +296,7 @@ func (o *LogsArchiveCreateRequestAttributes) UnmarshalJSON(bytes []byte) (err er
o.IncludeTags = all.IncludeTags
o.Name = all.Name
o.Query = all.Query
o.RehydrationMaxScanSizeInGb = all.RehydrationMaxScanSizeInGb
o.RehydrationTags = all.RehydrationTags
return nil
}
7 changes: 4 additions & 3 deletions examples/v2/logs-archives/CreateLogsArchive.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@ func main() {
StorageAccount: "account-name",
Type: datadog.LOGSARCHIVEDESTINATIONAZURETYPE_AZURE,
}},
IncludeTags: datadog.PtrBool(false),
Name: "Nginx Archive",
Query: "source:nginx",
IncludeTags: datadog.PtrBool(false),
Name: "Nginx Archive",
Query: "source:nginx",
RehydrationMaxScanSizeInGb: *datadog.NewNullableInt64(datadog.PtrInt64(100)),
RehydrationTags: []string{
"team:intake",
"team:app",
Expand Down
7 changes: 4 additions & 3 deletions examples/v2/logs-archives/UpdateLogsArchive.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,10 @@ func main() {
StorageAccount: "account-name",
Type: datadog.LOGSARCHIVEDESTINATIONAZURETYPE_AZURE,
}},
IncludeTags: datadog.PtrBool(false),
Name: "Nginx Archive",
Query: "source:nginx",
IncludeTags: datadog.PtrBool(false),
Name: "Nginx Archive",
Query: "source:nginx",
RehydrationMaxScanSizeInGb: *datadog.NewNullableInt64(datadog.PtrInt64(100)),
RehydrationTags: []string{
"team:intake",
"team:app",
Expand Down
10 changes: 5 additions & 5 deletions tests/scenarios/features/v2/logs_archives.feature
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@ Feature: Logs Archives
@generated @skip @team:DataDog/logs-backend
Scenario: Create an archive returns "Bad Request" response
Given new "CreateLogsArchive" request
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/logs-backend
Scenario: Create an archive returns "OK" response
Given new "CreateLogsArchive" request
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 200 OK

Expand Down Expand Up @@ -150,23 +150,23 @@ Feature: Logs Archives
Scenario: Update an archive returns "Bad Request" response
Given new "UpdateLogsArchive" request
And request contains "archive_id" parameter from "REPLACE.ME"
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/logs-backend
Scenario: Update an archive returns "Not found" response
Given new "UpdateLogsArchive" request
And request contains "archive_id" parameter from "REPLACE.ME"
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 404 Not found

@generated @skip @team:DataDog/logs-backend
Scenario: Update an archive returns "OK" response
Given new "UpdateLogsArchive" request
And request contains "archive_id" parameter from "REPLACE.ME"
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 200 OK

Expand Down

0 comments on commit abcc412

Please sign in to comment.