Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .generator/schemas/v2/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41379,6 +41379,8 @@ components:
LogsArchiveAttributes:
description: The attributes associated with the archive.
properties:
compression_method:
$ref: "#/components/schemas/LogsArchiveAttributesCompressionMethod"
destination:
$ref: "#/components/schemas/LogsArchiveDestination"
include_tags:
Expand Down Expand Up @@ -41416,6 +41418,17 @@ components:
- query
- destination
type: object
LogsArchiveAttributesCompressionMethod:
default: GZIP
description: The type of compression for the archive.
enum:
- GZIP
- ZSTD
example: GZIP
type: string
x-enum-varnames:
- GZIP
- ZSTD
LogsArchiveCreateRequest:
description: The logs archive.
properties:
Expand All @@ -41425,6 +41438,8 @@ components:
LogsArchiveCreateRequestAttributes:
description: The attributes associated with the archive.
properties:
compression_method:
$ref: "#/components/schemas/LogsArchiveAttributesCompressionMethod"
destination:
$ref: "#/components/schemas/LogsArchiveCreateRequestDestination"
include_tags:
Expand Down Expand Up @@ -106036,6 +106051,7 @@ paths:
value:
data:
attributes:
compression_method: GZIP
destination:
container: container-name
storage_account: account-name
Expand Down Expand Up @@ -106175,6 +106191,7 @@ paths:
value:
data:
attributes:
compression_method: GZIP
destination:
container: container-name
storage_account: account-name
Expand Down
1 change: 1 addition & 0 deletions examples/v2/logs-archives/CreateLogsArchive.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
body = DatadogAPIClient::V2::LogsArchiveCreateRequest.new({
data: DatadogAPIClient::V2::LogsArchiveCreateRequestDefinition.new({
attributes: DatadogAPIClient::V2::LogsArchiveCreateRequestAttributes.new({
compression_method: DatadogAPIClient::V2::LogsArchiveAttributesCompressionMethod::GZIP,
destination: DatadogAPIClient::V2::LogsArchiveDestinationAzure.new({
container: "container-name",
integration: DatadogAPIClient::V2::LogsArchiveIntegrationAzure.new({
Expand Down
1 change: 1 addition & 0 deletions examples/v2/logs-archives/UpdateLogsArchive.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
body = DatadogAPIClient::V2::LogsArchiveCreateRequest.new({
data: DatadogAPIClient::V2::LogsArchiveCreateRequestDefinition.new({
attributes: DatadogAPIClient::V2::LogsArchiveCreateRequestAttributes.new({
compression_method: DatadogAPIClient::V2::LogsArchiveAttributesCompressionMethod::GZIP,
destination: DatadogAPIClient::V2::LogsArchiveDestinationAzure.new({
container: "container-name",
integration: DatadogAPIClient::V2::LogsArchiveIntegrationAzure.new({
Expand Down
10 changes: 5 additions & 5 deletions features/v2/logs_archives.feature
Original file line number Diff line number Diff line change
Expand Up @@ -12,14 +12,14 @@ Feature: Logs Archives
@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-forwarding
Scenario: Create an archive returns "Bad Request" response
Given new "CreateLogsArchive" request
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"compression_method": "GZIP", "destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-forwarding
Scenario: Create an archive returns "OK" response
Given new "CreateLogsArchive" request
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"compression_method": "GZIP", "destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 200 OK

Expand Down Expand Up @@ -150,23 +150,23 @@ Feature: Logs Archives
Scenario: Update an archive returns "Bad Request" response
Given new "UpdateLogsArchive" request
And request contains "archive_id" parameter from "REPLACE.ME"
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"compression_method": "GZIP", "destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 400 Bad Request

@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-forwarding
Scenario: Update an archive returns "Not found" response
Given new "UpdateLogsArchive" request
And request contains "archive_id" parameter from "REPLACE.ME"
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"compression_method": "GZIP", "destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 404 Not found

@generated @skip @team:DataDog/logs-backend @team:DataDog/logs-forwarding
Scenario: Update an archive returns "OK" response
Given new "UpdateLogsArchive" request
And request contains "archive_id" parameter from "REPLACE.ME"
And body with value {"data": {"attributes": {"destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
And body with value {"data": {"attributes": {"compression_method": "GZIP", "destination": {"container": "container-name", "integration": {"client_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa", "tenant_id": "aaaaaaaa-1a1a-1a1a-1a1a-aaaaaaaaaaaa"}, "storage_account": "account-name", "type": "azure"}, "include_tags": false, "name": "Nginx Archive", "query": "source:nginx", "rehydration_max_scan_size_in_gb": 100, "rehydration_tags": ["team:intake", "team:app"]}, "type": "archives"}}
When the request is sent
Then the response status is 200 OK

Expand Down
1 change: 1 addition & 0 deletions lib/datadog_api_client/inflector.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3625,6 +3625,7 @@ def overrides
"v2.logs_aggregation_function" => "LogsAggregationFunction",
"v2.logs_archive" => "LogsArchive",
"v2.logs_archive_attributes" => "LogsArchiveAttributes",
"v2.logs_archive_attributes_compression_method" => "LogsArchiveAttributesCompressionMethod",
"v2.logs_archive_create_request" => "LogsArchiveCreateRequest",
"v2.logs_archive_create_request_attributes" => "LogsArchiveCreateRequestAttributes",
"v2.logs_archive_create_request_definition" => "LogsArchiveCreateRequestDefinition",
Expand Down
12 changes: 11 additions & 1 deletion lib/datadog_api_client/v2/models/logs_archive_attributes.rb
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ module DatadogAPIClient::V2
class LogsArchiveAttributes
include BaseGenericModel

# The type of compression for the archive.
attr_accessor :compression_method

# An archive's destination.
attr_accessor :destination

Expand Down Expand Up @@ -49,6 +52,7 @@ class LogsArchiveAttributes
# @!visibility private
def self.attribute_map
{
:'compression_method' => :'compression_method',
:'destination' => :'destination',
:'include_tags' => :'include_tags',
:'name' => :'name',
Expand All @@ -63,6 +67,7 @@ def self.attribute_map
# @!visibility private
def self.openapi_types
{
:'compression_method' => :'LogsArchiveAttributesCompressionMethod',
:'destination' => :'LogsArchiveDestination',
:'include_tags' => :'Boolean',
:'name' => :'String',
Expand Down Expand Up @@ -100,6 +105,10 @@ def initialize(attributes = {})
end
}

if attributes.key?(:'compression_method')
self.compression_method = attributes[:'compression_method']
end

if attributes.key?(:'destination')
self.destination = attributes[:'destination']
end
Expand Down Expand Up @@ -186,6 +195,7 @@ def to_hash
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
compression_method == o.compression_method &&
destination == o.destination &&
include_tags == o.include_tags &&
name == o.name &&
Expand All @@ -200,7 +210,7 @@ def ==(o)
# @return [Integer] Hash code
# @!visibility private
def hash
[destination, include_tags, name, query, rehydration_max_scan_size_in_gb, rehydration_tags, state, additional_properties].hash
[compression_method, destination, include_tags, name, query, rehydration_max_scan_size_in_gb, rehydration_tags, state, additional_properties].hash
end
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
=begin
#Datadog API V2 Collection

#Collection of all Datadog Public endpoints.

The version of the OpenAPI document: 1.0
Contact: support@datadoghq.com
Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator

Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.

=end

require 'date'
require 'time'

module DatadogAPIClient::V2
# The type of compression for the archive.
class LogsArchiveAttributesCompressionMethod
include BaseEnumModel

GZIP = "GZIP".freeze
ZSTD = "ZSTD".freeze
end
end
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@ module DatadogAPIClient::V2
class LogsArchiveCreateRequestAttributes
include BaseGenericModel

# The type of compression for the archive.
attr_accessor :compression_method

# An archive's destination.
attr_reader :destination

Expand All @@ -46,6 +49,7 @@ class LogsArchiveCreateRequestAttributes
# @!visibility private
def self.attribute_map
{
:'compression_method' => :'compression_method',
:'destination' => :'destination',
:'include_tags' => :'include_tags',
:'name' => :'name',
Expand All @@ -59,6 +63,7 @@ def self.attribute_map
# @!visibility private
def self.openapi_types
{
:'compression_method' => :'LogsArchiveAttributesCompressionMethod',
:'destination' => :'LogsArchiveCreateRequestDestination',
:'include_tags' => :'Boolean',
:'name' => :'String',
Expand Down Expand Up @@ -94,6 +99,10 @@ def initialize(attributes = {})
end
}

if attributes.key?(:'compression_method')
self.compression_method = attributes[:'compression_method']
end

if attributes.key?(:'destination')
self.destination = attributes[:'destination']
end
Expand Down Expand Up @@ -187,6 +196,7 @@ def to_hash
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
compression_method == o.compression_method &&
destination == o.destination &&
include_tags == o.include_tags &&
name == o.name &&
Expand All @@ -200,7 +210,7 @@ def ==(o)
# @return [Integer] Hash code
# @!visibility private
def hash
[destination, include_tags, name, query, rehydration_max_scan_size_in_gb, rehydration_tags, additional_properties].hash
[compression_method, destination, include_tags, name, query, rehydration_max_scan_size_in_gb, rehydration_tags, additional_properties].hash
end
end
end
Loading