From cc0f9a33393e520be6819e05ad1ca1772d4e047e Mon Sep 17 00:00:00 2001 From: Anagha Joshi Date: Sat, 9 May 2026 11:37:03 +0530 Subject: [PATCH] feat: Add Logpush Jobs API V1 SDK with unit and integration tests Signed-off-by: Anagha Joshi --- ibm_cloud_networking_services/__init__.py | 1 + .../logpush_jobs_api_v1.py | 2657 +++++++++++++++++ test/integration/test_logpush_jobs_api_v1.py | 451 +++ test/unit/test_logpush_jobs_api_v1.py | 1898 ++++++++++++ 4 files changed, 5007 insertions(+) create mode 100644 ibm_cloud_networking_services/logpush_jobs_api_v1.py create mode 100644 test/integration/test_logpush_jobs_api_v1.py create mode 100644 test/unit/test_logpush_jobs_api_v1.py diff --git a/ibm_cloud_networking_services/__init__.py b/ibm_cloud_networking_services/__init__.py index 9ae03d7..58d34af 100644 --- a/ibm_cloud_networking_services/__init__.py +++ b/ibm_cloud_networking_services/__init__.py @@ -52,6 +52,7 @@ from .zones_v1 import ZonesV1 from .webhooks_v1 import WebhooksV1 from .alerts_v1 import AlertsV1 +from .logpush_jobs_api_v1 import LogpushJobsApiV1 # Private DNS Service Packages from .dns_zones_v1 import DnsZonesV1 diff --git a/ibm_cloud_networking_services/logpush_jobs_api_v1.py b/ibm_cloud_networking_services/logpush_jobs_api_v1.py new file mode 100644 index 0000000..9647f76 --- /dev/null +++ b/ibm_cloud_networking_services/logpush_jobs_api_v1.py @@ -0,0 +1,2657 @@ +# coding: utf-8 + +# (C) Copyright IBM Corp. 2026. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# IBM OpenAPI SDK Code Generator Version: 3.114.0-a902401e-20260427-192904 + +""" +CIS Logpush Jobs + +API Version: 1.0.0 +""" + +from enum import Enum +from typing import Dict, List, Optional +import json + +from ibm_cloud_sdk_core import BaseService, DetailedResponse +from ibm_cloud_sdk_core.authenticators.authenticator import Authenticator +from ibm_cloud_sdk_core.get_authenticator import get_authenticator_from_environment +from ibm_cloud_sdk_core.utils import convert_model + +from .common import get_sdk_headers + +############################################################################## +# Service +############################################################################## + + +class LogpushJobsApiV1(BaseService): + """The Logpush Jobs API V1 service.""" + + DEFAULT_SERVICE_URL = 'https://api.cis.cloud.ibm.com' + DEFAULT_SERVICE_NAME = 'logpush_jobs_api' + + @classmethod + def new_instance( + cls, + crn: str, + dataset: str, + zone_id: str, + service_name: str = DEFAULT_SERVICE_NAME, + ) -> 'LogpushJobsApiV1': + """ + Return a new client for the Logpush Jobs API service using the specified + parameters and external configuration. + + :param str crn: Full URL-encoded CRN of the service instance. + + :param str dataset: The dataset. + + :param str zone_id: Zone identifier. + """ + if crn is None: + raise ValueError('crn must be provided') + if dataset is None: + raise ValueError('dataset must be provided') + if zone_id is None: + raise ValueError('zone_id must be provided') + + authenticator = get_authenticator_from_environment(service_name) + service = cls( + crn, + dataset, + zone_id, + authenticator + ) + service.configure_service(service_name) + return service + + def __init__( + self, + crn: str, + dataset: str, + zone_id: str, + authenticator: Authenticator = None, + ) -> None: + """ + Construct a new client for the Logpush Jobs API service. + + :param str crn: Full URL-encoded CRN of the service instance. + + :param str dataset: The dataset. + + :param str zone_id: Zone identifier. + + :param Authenticator authenticator: The authenticator specifies the authentication mechanism. + Get up to date information from https://github.com/IBM/python-sdk-core/blob/main/README.md + about initializing the authenticator of your choice. + """ + if crn is None: + raise ValueError('crn must be provided') + if dataset is None: + raise ValueError('dataset must be provided') + if zone_id is None: + raise ValueError('zone_id must be provided') + + BaseService.__init__(self, service_url=self.DEFAULT_SERVICE_URL, authenticator=authenticator) + self.crn = crn + self.dataset = dataset + self.zone_id = zone_id + + ######################### + # Logpush Jobs + ######################### + + def get_logpush_jobs_v2( + self, + **kwargs, + ) -> DetailedResponse: + """ + List logpush jobs. + + List configured logpush jobs for your domain. + + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `ListLogpushJobsResp` object + """ + + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='get_logpush_jobs_v2', + ) + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/jobs'.format(**path_param_dict) + request = self.prepare_request( + method='GET', + url=url, + headers=headers, + ) + + response = self.send(request, **kwargs) + return response + + def create_logpush_job_v2( + self, + *, + create_logpush_job_v2_request: Optional['CreateLogpushJobV2Request'] = None, + **kwargs, + ) -> DetailedResponse: + """ + Create a logpush jobs. + + Create a new logpush job for the domain. + + :param CreateLogpushJobV2Request create_logpush_job_v2_request: (optional) + Create logpush job body. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `LogpushJobsResp` object + """ + + if create_logpush_job_v2_request is not None and isinstance(create_logpush_job_v2_request, CreateLogpushJobV2Request): + create_logpush_job_v2_request = convert_model(create_logpush_job_v2_request) + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='create_logpush_job_v2', + ) + headers.update(sdk_headers) + + data = json.dumps(create_logpush_job_v2_request) + headers['content-type'] = 'application/json' + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/jobs'.format(**path_param_dict) + request = self.prepare_request( + method='POST', + url=url, + headers=headers, + data=data, + ) + + response = self.send(request, **kwargs) + return response + + def get_logpush_job_v2( + self, + job_id: str, + **kwargs, + ) -> DetailedResponse: + """ + Get a logpush job. + + Get a logpush job for a given zone. + + :param str job_id: logpush job identifier. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `LogpushJobsResp` object + """ + + if not job_id: + raise ValueError('job_id must be provided') + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='get_logpush_job_v2', + ) + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id', 'job_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id, job_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/jobs/{job_id}'.format(**path_param_dict) + request = self.prepare_request( + method='GET', + url=url, + headers=headers, + ) + + response = self.send(request, **kwargs) + return response + + def update_logpush_job_v2( + self, + job_id: str, + *, + update_logpush_job_v2_request: Optional['UpdateLogpushJobV2Request'] = None, + **kwargs, + ) -> DetailedResponse: + """ + Update a logpush job. + + Update an existing logpush job for a given zone. + + :param str job_id: logpush job identifier. + :param UpdateLogpushJobV2Request update_logpush_job_v2_request: (optional) + Update logpush job. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `LogpushJobsResp` object + """ + + if not job_id: + raise ValueError('job_id must be provided') + if update_logpush_job_v2_request is not None and isinstance(update_logpush_job_v2_request, UpdateLogpushJobV2Request): + update_logpush_job_v2_request = convert_model(update_logpush_job_v2_request) + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='update_logpush_job_v2', + ) + headers.update(sdk_headers) + + data = json.dumps(update_logpush_job_v2_request) + headers['content-type'] = 'application/json' + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id', 'job_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id, job_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/jobs/{job_id}'.format(**path_param_dict) + request = self.prepare_request( + method='PUT', + url=url, + headers=headers, + data=data, + ) + + response = self.send(request, **kwargs) + return response + + def delete_logpush_job_v2( + self, + job_id: str, + **kwargs, + ) -> DetailedResponse: + """ + Delete a logpush job. + + Delete a logpush job for a zone. + + :param str job_id: logpush job identifier. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `DeleteLogpushJobResp` object + """ + + if not job_id: + raise ValueError('job_id must be provided') + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='delete_logpush_job_v2', + ) + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id', 'job_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id, job_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/jobs/{job_id}'.format(**path_param_dict) + request = self.prepare_request( + method='DELETE', + url=url, + headers=headers, + ) + + response = self.send(request, **kwargs) + return response + + def get_logpush_ownership_v2( + self, + *, + cos: Optional[dict] = None, + **kwargs, + ) -> DetailedResponse: + """ + Get a new ownership challenge sent to your destination. + + Get a new ownership challenge. + + :param dict cos: (optional) Information to identify the COS bucket where + the data will be pushed. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `OwnershipChallengeResp` object + """ + + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='get_logpush_ownership_v2', + ) + headers.update(sdk_headers) + + data = { + 'cos': cos, + } + data = {k: v for (k, v) in data.items() if v is not None} + data = json.dumps(data) + headers['content-type'] = 'application/json' + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/ownership'.format(**path_param_dict) + request = self.prepare_request( + method='POST', + url=url, + headers=headers, + data=data, + ) + + response = self.send(request, **kwargs) + return response + + def validate_logpush_ownership_challenge_v2( + self, + *, + cos: Optional[dict] = None, + ownership_challenge: Optional[str] = None, + **kwargs, + ) -> DetailedResponse: + """ + Validate ownership challenge of the destination. + + Validate ownership challenge of the destination. + + :param dict cos: (optional) Information to identify the COS bucket where + the data will be pushed. + :param str ownership_challenge: (optional) Ownership challenge token to + prove destination ownership. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `OwnershipChallengeValidateResult` object + """ + + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='validate_logpush_ownership_challenge_v2', + ) + headers.update(sdk_headers) + + data = { + 'cos': cos, + 'ownership_challenge': ownership_challenge, + } + data = {k: v for (k, v) in data.items() if v is not None} + data = json.dumps(data) + headers['content-type'] = 'application/json' + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/ownership/validate'.format(**path_param_dict) + request = self.prepare_request( + method='POST', + url=url, + headers=headers, + data=data, + ) + + response = self.send(request, **kwargs) + return response + + def list_fields_for_dataset_v2( + self, + **kwargs, + ) -> DetailedResponse: + """ + The list of all fields available for a dataset. + + The list of all fields available for a dataset. + + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `ListFieldsResp` object + """ + + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='list_fields_for_dataset_v2', + ) + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id', 'dataset'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id, self.dataset) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/datasets/{dataset}/fields'.format(**path_param_dict) + request = self.prepare_request( + method='GET', + url=url, + headers=headers, + ) + + response = self.send(request, **kwargs) + return response + + def list_logpush_jobs_for_dataset_v2( + self, + **kwargs, + ) -> DetailedResponse: + """ + List logpush jobs for dataset. + + List configured logpush jobs for a dataset. + + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `LogpushJobsResp` object + """ + + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='list_logpush_jobs_for_dataset_v2', + ) + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id', 'dataset'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id, self.dataset) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v2/{crn}/zones/{zone_id}/logpush/datasets/{dataset}/jobs'.format(**path_param_dict) + request = self.prepare_request( + method='GET', + url=url, + headers=headers, + ) + + response = self.send(request, **kwargs) + return response + + def get_logs_retention( + self, + **kwargs, + ) -> DetailedResponse: + """ + Get log retention. + + Get log retention setting for Logpull/Logpush on your domain. + + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `LogRetentionResp` object + """ + + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='get_logs_retention', + ) + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v1/{crn}/zones/{zone_id}/logs/retention'.format(**path_param_dict) + request = self.prepare_request( + method='GET', + url=url, + headers=headers, + ) + + response = self.send(request, **kwargs) + return response + + def create_log_retention( + self, + *, + flag: Optional[bool] = None, + **kwargs, + ) -> DetailedResponse: + """ + Update log retention. + + Update log retention flag for Logpull/Logpush. + + :param bool flag: (optional) + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `LogRetentionResp` object + """ + + headers = {} + sdk_headers = get_sdk_headers( + service_name=self.DEFAULT_SERVICE_NAME, + service_version='V1', + operation_id='create_log_retention', + ) + headers.update(sdk_headers) + + data = { + 'flag': flag, + } + data = {k: v for (k, v) in data.items() if v is not None} + data = json.dumps(data) + headers['content-type'] = 'application/json' + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['crn', 'zone_id'] + path_param_values = self.encode_path_vars(self.crn, self.zone_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v1/{crn}/zones/{zone_id}/logs/retention'.format(**path_param_dict) + request = self.prepare_request( + method='POST', + url=url, + headers=headers, + data=data, + ) + + response = self.send(request, **kwargs) + return response + + +############################################################################## +# Models +############################################################################## + + +class CreateLogpushJobV2Request: + """ + CreateLogpushJobV2Request. + + """ + + def __init__( + self, + ) -> None: + """ + Initialize a CreateLogpushJobV2Request object. + + """ + msg = "Cannot instantiate base class. Instead, instantiate one of the defined subclasses: {0}".format( + ", ".join(['CreateLogpushJobV2RequestLogpushJobCosReq', 'CreateLogpushJobV2RequestLogpushJobLogdnaReq', 'CreateLogpushJobV2RequestLogpushJobIbmclReq', 'CreateLogpushJobV2RequestLogpushJobGenericReq']) + ) + raise Exception(msg) + + +class LogRetentionRespResult: + """ + LogRetentionRespResult. + + :param bool flag: (optional) + """ + + def __init__( + self, + *, + flag: Optional[bool] = None, + ) -> None: + """ + Initialize a LogRetentionRespResult object. + + :param bool flag: (optional) + """ + self.flag = flag + + @classmethod + def from_dict(cls, _dict: Dict) -> 'LogRetentionRespResult': + """Initialize a LogRetentionRespResult object from a json dictionary.""" + args = {} + if (flag := _dict.get('flag')) is not None: + args['flag'] = flag + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a LogRetentionRespResult object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'flag') and self.flag is not None: + _dict['flag'] = self.flag + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this LogRetentionRespResult object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'LogRetentionRespResult') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'LogRetentionRespResult') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class LogpushJobIbmclReqIbmcl: + """ + Required information to push logs to your Cloud Logs instance. + + :param str instance_id: GUID of the IBM Cloud Logs instance where you want to + send logs. + :param str region: Region where the IBM Cloud Logs instance is located. + :param str api_key: IBM Cloud API key used to generate a token for pushing to + your Cloud Logs instance. + """ + + def __init__( + self, + instance_id: str, + region: str, + api_key: str, + ) -> None: + """ + Initialize a LogpushJobIbmclReqIbmcl object. + + :param str instance_id: GUID of the IBM Cloud Logs instance where you want + to send logs. + :param str region: Region where the IBM Cloud Logs instance is located. + :param str api_key: IBM Cloud API key used to generate a token for pushing + to your Cloud Logs instance. + """ + self.instance_id = instance_id + self.region = region + self.api_key = api_key + + @classmethod + def from_dict(cls, _dict: Dict) -> 'LogpushJobIbmclReqIbmcl': + """Initialize a LogpushJobIbmclReqIbmcl object from a json dictionary.""" + args = {} + if (instance_id := _dict.get('instance_id')) is not None: + args['instance_id'] = instance_id + else: + raise ValueError('Required property \'instance_id\' not present in LogpushJobIbmclReqIbmcl JSON') + if (region := _dict.get('region')) is not None: + args['region'] = region + else: + raise ValueError('Required property \'region\' not present in LogpushJobIbmclReqIbmcl JSON') + if (api_key := _dict.get('api_key')) is not None: + args['api_key'] = api_key + else: + raise ValueError('Required property \'api_key\' not present in LogpushJobIbmclReqIbmcl JSON') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a LogpushJobIbmclReqIbmcl object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'instance_id') and self.instance_id is not None: + _dict['instance_id'] = self.instance_id + if hasattr(self, 'region') and self.region is not None: + _dict['region'] = self.region + if hasattr(self, 'api_key') and self.api_key is not None: + _dict['api_key'] = self.api_key + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this LogpushJobIbmclReqIbmcl object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'LogpushJobIbmclReqIbmcl') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'LogpushJobIbmclReqIbmcl') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class LogpushJobsUpdateIbmclReqIbmcl: + """ + Required information to push logs to your Cloud Logs instance. + + :param str instance_id: (optional) GUID of the IBM Cloud Logs instance where you + want to send logs. + :param str region: (optional) Region where the IBM Cloud Logs instance is + located. + :param str api_key: (optional) IBM Cloud API key used to generate a token for + pushing to your Cloud Logs instance. + """ + + def __init__( + self, + *, + instance_id: Optional[str] = None, + region: Optional[str] = None, + api_key: Optional[str] = None, + ) -> None: + """ + Initialize a LogpushJobsUpdateIbmclReqIbmcl object. + + :param str instance_id: (optional) GUID of the IBM Cloud Logs instance + where you want to send logs. + :param str region: (optional) Region where the IBM Cloud Logs instance is + located. + :param str api_key: (optional) IBM Cloud API key used to generate a token + for pushing to your Cloud Logs instance. + """ + self.instance_id = instance_id + self.region = region + self.api_key = api_key + + @classmethod + def from_dict(cls, _dict: Dict) -> 'LogpushJobsUpdateIbmclReqIbmcl': + """Initialize a LogpushJobsUpdateIbmclReqIbmcl object from a json dictionary.""" + args = {} + if (instance_id := _dict.get('instance_id')) is not None: + args['instance_id'] = instance_id + if (region := _dict.get('region')) is not None: + args['region'] = region + if (api_key := _dict.get('api_key')) is not None: + args['api_key'] = api_key + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a LogpushJobsUpdateIbmclReqIbmcl object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'instance_id') and self.instance_id is not None: + _dict['instance_id'] = self.instance_id + if hasattr(self, 'region') and self.region is not None: + _dict['region'] = self.region + if hasattr(self, 'api_key') and self.api_key is not None: + _dict['api_key'] = self.api_key + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this LogpushJobsUpdateIbmclReqIbmcl object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'LogpushJobsUpdateIbmclReqIbmcl') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'LogpushJobsUpdateIbmclReqIbmcl') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class UpdateLogpushJobV2Request: + """ + UpdateLogpushJobV2Request. + + """ + + def __init__( + self, + ) -> None: + """ + Initialize a UpdateLogpushJobV2Request object. + + """ + msg = "Cannot instantiate base class. Instead, instantiate one of the defined subclasses: {0}".format( + ", ".join(['UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq', 'UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq', 'UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq', 'UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq']) + ) + raise Exception(msg) + + +class DeleteLogpushJobResp: + """ + delete logpush job response. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param dict result: result. + """ + + def __init__( + self, + success: bool, + errors: List[List[str]], + messages: List[List[str]], + result: dict, + ) -> None: + """ + Initialize a DeleteLogpushJobResp object. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param dict result: result. + """ + self.success = success + self.errors = errors + self.messages = messages + self.result = result + + @classmethod + def from_dict(cls, _dict: Dict) -> 'DeleteLogpushJobResp': + """Initialize a DeleteLogpushJobResp object from a json dictionary.""" + args = {} + if (success := _dict.get('success')) is not None: + args['success'] = success + else: + raise ValueError('Required property \'success\' not present in DeleteLogpushJobResp JSON') + if (errors := _dict.get('errors')) is not None: + args['errors'] = errors + else: + raise ValueError('Required property \'errors\' not present in DeleteLogpushJobResp JSON') + if (messages := _dict.get('messages')) is not None: + args['messages'] = messages + else: + raise ValueError('Required property \'messages\' not present in DeleteLogpushJobResp JSON') + if (result := _dict.get('result')) is not None: + args['result'] = result + else: + raise ValueError('Required property \'result\' not present in DeleteLogpushJobResp JSON') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a DeleteLogpushJobResp object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'success') and self.success is not None: + _dict['success'] = self.success + if hasattr(self, 'errors') and self.errors is not None: + _dict['errors'] = self.errors + if hasattr(self, 'messages') and self.messages is not None: + _dict['messages'] = self.messages + if hasattr(self, 'result') and self.result is not None: + _dict['result'] = self.result + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this DeleteLogpushJobResp object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'DeleteLogpushJobResp') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'DeleteLogpushJobResp') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class ListFieldsResp: + """ + list fields response. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param dict result: (optional) result. + """ + + def __init__( + self, + success: bool, + errors: List[List[str]], + messages: List[List[str]], + *, + result: Optional[dict] = None, + ) -> None: + """ + Initialize a ListFieldsResp object. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param dict result: (optional) result. + """ + self.success = success + self.errors = errors + self.messages = messages + self.result = result + + @classmethod + def from_dict(cls, _dict: Dict) -> 'ListFieldsResp': + """Initialize a ListFieldsResp object from a json dictionary.""" + args = {} + if (success := _dict.get('success')) is not None: + args['success'] = success + else: + raise ValueError('Required property \'success\' not present in ListFieldsResp JSON') + if (errors := _dict.get('errors')) is not None: + args['errors'] = errors + else: + raise ValueError('Required property \'errors\' not present in ListFieldsResp JSON') + if (messages := _dict.get('messages')) is not None: + args['messages'] = messages + else: + raise ValueError('Required property \'messages\' not present in ListFieldsResp JSON') + if (result := _dict.get('result')) is not None: + args['result'] = result + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a ListFieldsResp object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'success') and self.success is not None: + _dict['success'] = self.success + if hasattr(self, 'errors') and self.errors is not None: + _dict['errors'] = self.errors + if hasattr(self, 'messages') and self.messages is not None: + _dict['messages'] = self.messages + if hasattr(self, 'result') and self.result is not None: + _dict['result'] = self.result + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this ListFieldsResp object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'ListFieldsResp') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'ListFieldsResp') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class ListLogpushJobsResp: + """ + List Logpush Jobs Response. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param List[LogpushJobPack] result: result. + """ + + def __init__( + self, + success: bool, + errors: List[List[str]], + messages: List[List[str]], + result: List['LogpushJobPack'], + ) -> None: + """ + Initialize a ListLogpushJobsResp object. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param List[LogpushJobPack] result: result. + """ + self.success = success + self.errors = errors + self.messages = messages + self.result = result + + @classmethod + def from_dict(cls, _dict: Dict) -> 'ListLogpushJobsResp': + """Initialize a ListLogpushJobsResp object from a json dictionary.""" + args = {} + if (success := _dict.get('success')) is not None: + args['success'] = success + else: + raise ValueError('Required property \'success\' not present in ListLogpushJobsResp JSON') + if (errors := _dict.get('errors')) is not None: + args['errors'] = errors + else: + raise ValueError('Required property \'errors\' not present in ListLogpushJobsResp JSON') + if (messages := _dict.get('messages')) is not None: + args['messages'] = messages + else: + raise ValueError('Required property \'messages\' not present in ListLogpushJobsResp JSON') + if (result := _dict.get('result')) is not None: + args['result'] = [LogpushJobPack.from_dict(v) for v in result] + else: + raise ValueError('Required property \'result\' not present in ListLogpushJobsResp JSON') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a ListLogpushJobsResp object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'success') and self.success is not None: + _dict['success'] = self.success + if hasattr(self, 'errors') and self.errors is not None: + _dict['errors'] = self.errors + if hasattr(self, 'messages') and self.messages is not None: + _dict['messages'] = self.messages + if hasattr(self, 'result') and self.result is not None: + result_list = [] + for v in self.result: + if isinstance(v, dict): + result_list.append(v) + else: + result_list.append(v.to_dict()) + _dict['result'] = result_list + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this ListLogpushJobsResp object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'ListLogpushJobsResp') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'ListLogpushJobsResp') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class LogRetentionResp: + """ + log retention result. + + :param LogRetentionRespResult result: (optional) + :param bool success: (optional) success response. + :param List[List[str]] errors: (optional) errors. + :param List[List[str]] messages: (optional) messages. + """ + + def __init__( + self, + *, + result: Optional['LogRetentionRespResult'] = None, + success: Optional[bool] = None, + errors: Optional[List[List[str]]] = None, + messages: Optional[List[List[str]]] = None, + ) -> None: + """ + Initialize a LogRetentionResp object. + + :param LogRetentionRespResult result: (optional) + :param bool success: (optional) success response. + :param List[List[str]] errors: (optional) errors. + :param List[List[str]] messages: (optional) messages. + """ + self.result = result + self.success = success + self.errors = errors + self.messages = messages + + @classmethod + def from_dict(cls, _dict: Dict) -> 'LogRetentionResp': + """Initialize a LogRetentionResp object from a json dictionary.""" + args = {} + if (result := _dict.get('result')) is not None: + args['result'] = LogRetentionRespResult.from_dict(result) + if (success := _dict.get('success')) is not None: + args['success'] = success + if (errors := _dict.get('errors')) is not None: + args['errors'] = errors + if (messages := _dict.get('messages')) is not None: + args['messages'] = messages + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a LogRetentionResp object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'result') and self.result is not None: + if isinstance(self.result, dict): + _dict['result'] = self.result + else: + _dict['result'] = self.result.to_dict() + if hasattr(self, 'success') and self.success is not None: + _dict['success'] = self.success + if hasattr(self, 'errors') and self.errors is not None: + _dict['errors'] = self.errors + if hasattr(self, 'messages') and self.messages is not None: + _dict['messages'] = self.messages + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this LogRetentionResp object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'LogRetentionResp') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'LogRetentionResp') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class LogpushJobPack: + """ + logpush job pack. + + :param int id: Logpush Job ID. + :param str name: Logpush Job Name. + :param bool enabled: Whether the logpush job enabled or not. + :param str dataset: Dataset to be pulled. + :param str frequency: The frequency at which CIS sends batches of logs to your + destination. + :param str logpull_options: Configuration string. + :param str destination_conf: Uniquely identifies a resource (such as an s3 + bucket) where data will be pushed. + :param str last_complete: (optional) Records the last time for which logs have + been successfully pushed. + :param str last_error: (optional) Records the last time the job failed. + :param str error_message: (optional) The last failure. + """ + + def __init__( + self, + id: int, + name: str, + enabled: bool, + dataset: str, + frequency: str, + logpull_options: str, + destination_conf: str, + *, + last_complete: Optional[str] = None, + last_error: Optional[str] = None, + error_message: Optional[str] = None, + ) -> None: + """ + Initialize a LogpushJobPack object. + + :param int id: Logpush Job ID. + :param str name: Logpush Job Name. + :param bool enabled: Whether the logpush job enabled or not. + :param str dataset: Dataset to be pulled. + :param str frequency: The frequency at which CIS sends batches of logs to + your destination. + :param str logpull_options: Configuration string. + :param str destination_conf: Uniquely identifies a resource (such as an s3 + bucket) where data will be pushed. + :param str last_complete: (optional) Records the last time for which logs + have been successfully pushed. + :param str last_error: (optional) Records the last time the job failed. + :param str error_message: (optional) The last failure. + """ + self.id = id + self.name = name + self.enabled = enabled + self.dataset = dataset + self.frequency = frequency + self.logpull_options = logpull_options + self.destination_conf = destination_conf + self.last_complete = last_complete + self.last_error = last_error + self.error_message = error_message + + @classmethod + def from_dict(cls, _dict: Dict) -> 'LogpushJobPack': + """Initialize a LogpushJobPack object from a json dictionary.""" + args = {} + if (id := _dict.get('id')) is not None: + args['id'] = id + else: + raise ValueError('Required property \'id\' not present in LogpushJobPack JSON') + if (name := _dict.get('name')) is not None: + args['name'] = name + else: + raise ValueError('Required property \'name\' not present in LogpushJobPack JSON') + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + else: + raise ValueError('Required property \'enabled\' not present in LogpushJobPack JSON') + if (dataset := _dict.get('dataset')) is not None: + args['dataset'] = dataset + else: + raise ValueError('Required property \'dataset\' not present in LogpushJobPack JSON') + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + else: + raise ValueError('Required property \'frequency\' not present in LogpushJobPack JSON') + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + else: + raise ValueError('Required property \'logpull_options\' not present in LogpushJobPack JSON') + if (destination_conf := _dict.get('destination_conf')) is not None: + args['destination_conf'] = destination_conf + else: + raise ValueError('Required property \'destination_conf\' not present in LogpushJobPack JSON') + if (last_complete := _dict.get('last_complete')) is not None: + args['last_complete'] = last_complete + if (last_error := _dict.get('last_error')) is not None: + args['last_error'] = last_error + if (error_message := _dict.get('error_message')) is not None: + args['error_message'] = error_message + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a LogpushJobPack object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'id') and self.id is not None: + _dict['id'] = self.id + if hasattr(self, 'name') and self.name is not None: + _dict['name'] = self.name + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'dataset') and self.dataset is not None: + _dict['dataset'] = self.dataset + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'destination_conf') and self.destination_conf is not None: + _dict['destination_conf'] = self.destination_conf + if hasattr(self, 'last_complete') and self.last_complete is not None: + _dict['last_complete'] = self.last_complete + if hasattr(self, 'last_error') and self.last_error is not None: + _dict['last_error'] = self.last_error + if hasattr(self, 'error_message') and self.error_message is not None: + _dict['error_message'] = self.error_message + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this LogpushJobPack object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'LogpushJobPack') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'LogpushJobPack') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class LogpushJobsResp: + """ + logpush job response. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param LogpushJobPack result: logpush job pack. + """ + + def __init__( + self, + success: bool, + errors: List[List[str]], + messages: List[List[str]], + result: 'LogpushJobPack', + ) -> None: + """ + Initialize a LogpushJobsResp object. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param LogpushJobPack result: logpush job pack. + """ + self.success = success + self.errors = errors + self.messages = messages + self.result = result + + @classmethod + def from_dict(cls, _dict: Dict) -> 'LogpushJobsResp': + """Initialize a LogpushJobsResp object from a json dictionary.""" + args = {} + if (success := _dict.get('success')) is not None: + args['success'] = success + else: + raise ValueError('Required property \'success\' not present in LogpushJobsResp JSON') + if (errors := _dict.get('errors')) is not None: + args['errors'] = errors + else: + raise ValueError('Required property \'errors\' not present in LogpushJobsResp JSON') + if (messages := _dict.get('messages')) is not None: + args['messages'] = messages + else: + raise ValueError('Required property \'messages\' not present in LogpushJobsResp JSON') + if (result := _dict.get('result')) is not None: + args['result'] = LogpushJobPack.from_dict(result) + else: + raise ValueError('Required property \'result\' not present in LogpushJobsResp JSON') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a LogpushJobsResp object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'success') and self.success is not None: + _dict['success'] = self.success + if hasattr(self, 'errors') and self.errors is not None: + _dict['errors'] = self.errors + if hasattr(self, 'messages') and self.messages is not None: + _dict['messages'] = self.messages + if hasattr(self, 'result') and self.result is not None: + if isinstance(self.result, dict): + _dict['result'] = self.result + else: + _dict['result'] = self.result.to_dict() + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this LogpushJobsResp object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'LogpushJobsResp') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'LogpushJobsResp') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class OwnershipChallengeResp: + """ + Get Logpush Ownership Challenge Response. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param OwnershipChallengeResult result: ownership challenge result. + """ + + def __init__( + self, + success: bool, + errors: List[List[str]], + messages: List[List[str]], + result: 'OwnershipChallengeResult', + ) -> None: + """ + Initialize a OwnershipChallengeResp object. + + :param bool success: success response. + :param List[List[str]] errors: errors. + :param List[List[str]] messages: messages. + :param OwnershipChallengeResult result: ownership challenge result. + """ + self.success = success + self.errors = errors + self.messages = messages + self.result = result + + @classmethod + def from_dict(cls, _dict: Dict) -> 'OwnershipChallengeResp': + """Initialize a OwnershipChallengeResp object from a json dictionary.""" + args = {} + if (success := _dict.get('success')) is not None: + args['success'] = success + else: + raise ValueError('Required property \'success\' not present in OwnershipChallengeResp JSON') + if (errors := _dict.get('errors')) is not None: + args['errors'] = errors + else: + raise ValueError('Required property \'errors\' not present in OwnershipChallengeResp JSON') + if (messages := _dict.get('messages')) is not None: + args['messages'] = messages + else: + raise ValueError('Required property \'messages\' not present in OwnershipChallengeResp JSON') + if (result := _dict.get('result')) is not None: + args['result'] = OwnershipChallengeResult.from_dict(result) + else: + raise ValueError('Required property \'result\' not present in OwnershipChallengeResp JSON') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a OwnershipChallengeResp object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'success') and self.success is not None: + _dict['success'] = self.success + if hasattr(self, 'errors') and self.errors is not None: + _dict['errors'] = self.errors + if hasattr(self, 'messages') and self.messages is not None: + _dict['messages'] = self.messages + if hasattr(self, 'result') and self.result is not None: + if isinstance(self.result, dict): + _dict['result'] = self.result + else: + _dict['result'] = self.result.to_dict() + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this OwnershipChallengeResp object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'OwnershipChallengeResp') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'OwnershipChallengeResp') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class OwnershipChallengeResult: + """ + ownership challenge result. + + :param str filename: file name. + :param bool valid: valid. + :param str messages: (optional) message. + """ + + def __init__( + self, + filename: str, + valid: bool, + *, + messages: Optional[str] = None, + ) -> None: + """ + Initialize a OwnershipChallengeResult object. + + :param str filename: file name. + :param bool valid: valid. + :param str messages: (optional) message. + """ + self.filename = filename + self.valid = valid + self.messages = messages + + @classmethod + def from_dict(cls, _dict: Dict) -> 'OwnershipChallengeResult': + """Initialize a OwnershipChallengeResult object from a json dictionary.""" + args = {} + if (filename := _dict.get('filename')) is not None: + args['filename'] = filename + else: + raise ValueError('Required property \'filename\' not present in OwnershipChallengeResult JSON') + if (valid := _dict.get('valid')) is not None: + args['valid'] = valid + else: + raise ValueError('Required property \'valid\' not present in OwnershipChallengeResult JSON') + if (messages := _dict.get('messages')) is not None: + args['messages'] = messages + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a OwnershipChallengeResult object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'filename') and self.filename is not None: + _dict['filename'] = self.filename + if hasattr(self, 'valid') and self.valid is not None: + _dict['valid'] = self.valid + if hasattr(self, 'messages') and self.messages is not None: + _dict['messages'] = self.messages + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this OwnershipChallengeResult object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'OwnershipChallengeResult') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'OwnershipChallengeResult') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class OwnershipChallengeValidateResult: + """ + ownership challenge validate result. + + :param bool valid: valid. + """ + + def __init__( + self, + valid: bool, + ) -> None: + """ + Initialize a OwnershipChallengeValidateResult object. + + :param bool valid: valid. + """ + self.valid = valid + + @classmethod + def from_dict(cls, _dict: Dict) -> 'OwnershipChallengeValidateResult': + """Initialize a OwnershipChallengeValidateResult object from a json dictionary.""" + args = {} + if (valid := _dict.get('valid')) is not None: + args['valid'] = valid + else: + raise ValueError('Required property \'valid\' not present in OwnershipChallengeValidateResult JSON') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a OwnershipChallengeValidateResult object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'valid') and self.valid is not None: + _dict['valid'] = self.valid + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this OwnershipChallengeValidateResult object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'OwnershipChallengeValidateResult') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'OwnershipChallengeValidateResult') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + +class CreateLogpushJobV2RequestLogpushJobCosReq(CreateLogpushJobV2Request): + """ + Create COS logpush job input. + + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param dict cos: Information to identify the COS bucket where the data will be + pushed. + :param str ownership_challenge: Ownership challenge token to prove destination + ownership. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + cos: dict, + ownership_challenge: str, + *, + name: Optional[str] = None, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + dataset: Optional[str] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a CreateLogpushJobV2RequestLogpushJobCosReq object. + + :param dict cos: Information to identify the COS bucket where the data will + be pushed. + :param str ownership_challenge: Ownership challenge token to prove + destination ownership. + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.name = name + self.enabled = enabled + self.logpull_options = logpull_options + self.cos = cos + self.ownership_challenge = ownership_challenge + self.dataset = dataset + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'CreateLogpushJobV2RequestLogpushJobCosReq': + """Initialize a CreateLogpushJobV2RequestLogpushJobCosReq object from a json dictionary.""" + args = {} + if (name := _dict.get('name')) is not None: + args['name'] = name + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (cos := _dict.get('cos')) is not None: + args['cos'] = cos + else: + raise ValueError('Required property \'cos\' not present in CreateLogpushJobV2RequestLogpushJobCosReq JSON') + if (ownership_challenge := _dict.get('ownership_challenge')) is not None: + args['ownership_challenge'] = ownership_challenge + else: + raise ValueError('Required property \'ownership_challenge\' not present in CreateLogpushJobV2RequestLogpushJobCosReq JSON') + if (dataset := _dict.get('dataset')) is not None: + args['dataset'] = dataset + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a CreateLogpushJobV2RequestLogpushJobCosReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'name') and self.name is not None: + _dict['name'] = self.name + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'cos') and self.cos is not None: + _dict['cos'] = self.cos + if hasattr(self, 'ownership_challenge') and self.ownership_challenge is not None: + _dict['ownership_challenge'] = self.ownership_challenge + if hasattr(self, 'dataset') and self.dataset is not None: + _dict['dataset'] = self.dataset + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this CreateLogpushJobV2RequestLogpushJobCosReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'CreateLogpushJobV2RequestLogpushJobCosReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'CreateLogpushJobV2RequestLogpushJobCosReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class DatasetEnum(str, Enum): + """ + Dataset to be pulled. + """ + + HTTP_REQUESTS = 'http_requests' + RANGE_EVENTS = 'range_events' + FIREWALL_EVENTS = 'firewall_events' + + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + + + +class CreateLogpushJobV2RequestLogpushJobGenericReq(CreateLogpushJobV2Request): + """ + Create logpush job for a generic destination. + + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job is enabled or not. + :param str logpull_options: (optional) Configuration string. + :param str destination_conf: Uniquely identifies a resource where data will be + pushed. Additional configuration parameters supported by the destination may be + included. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + destination_conf: str, + *, + name: Optional[str] = None, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + dataset: Optional[str] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a CreateLogpushJobV2RequestLogpushJobGenericReq object. + + :param str destination_conf: Uniquely identifies a resource where data will + be pushed. Additional configuration parameters supported by the destination + may be included. + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job is enabled or not. + :param str logpull_options: (optional) Configuration string. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.name = name + self.enabled = enabled + self.logpull_options = logpull_options + self.destination_conf = destination_conf + self.dataset = dataset + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'CreateLogpushJobV2RequestLogpushJobGenericReq': + """Initialize a CreateLogpushJobV2RequestLogpushJobGenericReq object from a json dictionary.""" + args = {} + if (name := _dict.get('name')) is not None: + args['name'] = name + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (destination_conf := _dict.get('destination_conf')) is not None: + args['destination_conf'] = destination_conf + else: + raise ValueError('Required property \'destination_conf\' not present in CreateLogpushJobV2RequestLogpushJobGenericReq JSON') + if (dataset := _dict.get('dataset')) is not None: + args['dataset'] = dataset + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a CreateLogpushJobV2RequestLogpushJobGenericReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'name') and self.name is not None: + _dict['name'] = self.name + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'destination_conf') and self.destination_conf is not None: + _dict['destination_conf'] = self.destination_conf + if hasattr(self, 'dataset') and self.dataset is not None: + _dict['dataset'] = self.dataset + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this CreateLogpushJobV2RequestLogpushJobGenericReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'CreateLogpushJobV2RequestLogpushJobGenericReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'CreateLogpushJobV2RequestLogpushJobGenericReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class DatasetEnum(str, Enum): + """ + Dataset to be pulled. + """ + + HTTP_REQUESTS = 'http_requests' + RANGE_EVENTS = 'range_events' + FIREWALL_EVENTS = 'firewall_events' + + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + + + +class CreateLogpushJobV2RequestLogpushJobIbmclReq(CreateLogpushJobV2Request): + """ + Create IBM Cloud Logs logpush job input. + + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job is enabled or not. + :param str logpull_options: (optional) Configuration string. + :param LogpushJobIbmclReqIbmcl ibmcl: Required information to push logs to your + Cloud Logs instance. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + ibmcl: 'LogpushJobIbmclReqIbmcl', + *, + name: Optional[str] = None, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + dataset: Optional[str] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a CreateLogpushJobV2RequestLogpushJobIbmclReq object. + + :param LogpushJobIbmclReqIbmcl ibmcl: Required information to push logs to + your Cloud Logs instance. + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job is enabled or not. + :param str logpull_options: (optional) Configuration string. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.name = name + self.enabled = enabled + self.logpull_options = logpull_options + self.ibmcl = ibmcl + self.dataset = dataset + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'CreateLogpushJobV2RequestLogpushJobIbmclReq': + """Initialize a CreateLogpushJobV2RequestLogpushJobIbmclReq object from a json dictionary.""" + args = {} + if (name := _dict.get('name')) is not None: + args['name'] = name + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (ibmcl := _dict.get('ibmcl')) is not None: + args['ibmcl'] = LogpushJobIbmclReqIbmcl.from_dict(ibmcl) + else: + raise ValueError('Required property \'ibmcl\' not present in CreateLogpushJobV2RequestLogpushJobIbmclReq JSON') + if (dataset := _dict.get('dataset')) is not None: + args['dataset'] = dataset + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a CreateLogpushJobV2RequestLogpushJobIbmclReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'name') and self.name is not None: + _dict['name'] = self.name + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'ibmcl') and self.ibmcl is not None: + if isinstance(self.ibmcl, dict): + _dict['ibmcl'] = self.ibmcl + else: + _dict['ibmcl'] = self.ibmcl.to_dict() + if hasattr(self, 'dataset') and self.dataset is not None: + _dict['dataset'] = self.dataset + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this CreateLogpushJobV2RequestLogpushJobIbmclReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'CreateLogpushJobV2RequestLogpushJobIbmclReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'CreateLogpushJobV2RequestLogpushJobIbmclReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class DatasetEnum(str, Enum): + """ + Dataset to be pulled. + """ + + HTTP_REQUESTS = 'http_requests' + RANGE_EVENTS = 'range_events' + FIREWALL_EVENTS = 'firewall_events' + + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + + + +class CreateLogpushJobV2RequestLogpushJobLogdnaReq(CreateLogpushJobV2Request): + """ + Create LogDNA logpush job input. + + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param dict logdna: Information to identify the LogDNA instance the data will be + pushed. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + logdna: dict, + *, + name: Optional[str] = None, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + dataset: Optional[str] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a CreateLogpushJobV2RequestLogpushJobLogdnaReq object. + + :param dict logdna: Information to identify the LogDNA instance the data + will be pushed. + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.name = name + self.enabled = enabled + self.logpull_options = logpull_options + self.logdna = logdna + self.dataset = dataset + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'CreateLogpushJobV2RequestLogpushJobLogdnaReq': + """Initialize a CreateLogpushJobV2RequestLogpushJobLogdnaReq object from a json dictionary.""" + args = {} + if (name := _dict.get('name')) is not None: + args['name'] = name + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (logdna := _dict.get('logdna')) is not None: + args['logdna'] = logdna + else: + raise ValueError('Required property \'logdna\' not present in CreateLogpushJobV2RequestLogpushJobLogdnaReq JSON') + if (dataset := _dict.get('dataset')) is not None: + args['dataset'] = dataset + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a CreateLogpushJobV2RequestLogpushJobLogdnaReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'name') and self.name is not None: + _dict['name'] = self.name + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'logdna') and self.logdna is not None: + _dict['logdna'] = self.logdna + if hasattr(self, 'dataset') and self.dataset is not None: + _dict['dataset'] = self.dataset + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this CreateLogpushJobV2RequestLogpushJobLogdnaReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'CreateLogpushJobV2RequestLogpushJobLogdnaReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'CreateLogpushJobV2RequestLogpushJobLogdnaReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class DatasetEnum(str, Enum): + """ + Dataset to be pulled. + """ + + HTTP_REQUESTS = 'http_requests' + RANGE_EVENTS = 'range_events' + FIREWALL_EVENTS = 'firewall_events' + + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + + + +class UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq(UpdateLogpushJobV2Request): + """ + Update COS logpush job input. + + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param dict cos: (optional) Information to identify the COS bucket where the + data will be pushed. + :param str ownership_challenge: (optional) Ownership challenge token to prove + destination ownership. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + *, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + cos: Optional[dict] = None, + ownership_challenge: Optional[str] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq object. + + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param dict cos: (optional) Information to identify the COS bucket where + the data will be pushed. + :param str ownership_challenge: (optional) Ownership challenge token to + prove destination ownership. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.enabled = enabled + self.logpull_options = logpull_options + self.cos = cos + self.ownership_challenge = ownership_challenge + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq': + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq object from a json dictionary.""" + args = {} + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (cos := _dict.get('cos')) is not None: + args['cos'] = cos + if (ownership_challenge := _dict.get('ownership_challenge')) is not None: + args['ownership_challenge'] = ownership_challenge + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'cos') and self.cos is not None: + _dict['cos'] = self.cos + if hasattr(self, 'ownership_challenge') and self.ownership_challenge is not None: + _dict['ownership_challenge'] = self.ownership_challenge + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + + + +class UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq(UpdateLogpushJobV2Request): + """ + Create logpush job for a generic destination. + + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job is enabled or not. + :param str logpull_options: (optional) Configuration string. + :param str destination_conf: (optional) Uniquely identifies a resource where + data will be pushed. Additional configuration parameters supported by the + destination may be included. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + *, + name: Optional[str] = None, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + destination_conf: Optional[str] = None, + dataset: Optional[str] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq object. + + :param str name: (optional) Logpush Job Name. + :param bool enabled: (optional) Whether the logpush job is enabled or not. + :param str logpull_options: (optional) Configuration string. + :param str destination_conf: (optional) Uniquely identifies a resource + where data will be pushed. Additional configuration parameters supported by + the destination may be included. + :param str dataset: (optional) Dataset to be pulled. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.name = name + self.enabled = enabled + self.logpull_options = logpull_options + self.destination_conf = destination_conf + self.dataset = dataset + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq': + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq object from a json dictionary.""" + args = {} + if (name := _dict.get('name')) is not None: + args['name'] = name + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (destination_conf := _dict.get('destination_conf')) is not None: + args['destination_conf'] = destination_conf + if (dataset := _dict.get('dataset')) is not None: + args['dataset'] = dataset + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'name') and self.name is not None: + _dict['name'] = self.name + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'destination_conf') and self.destination_conf is not None: + _dict['destination_conf'] = self.destination_conf + if hasattr(self, 'dataset') and self.dataset is not None: + _dict['dataset'] = self.dataset + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class DatasetEnum(str, Enum): + """ + Dataset to be pulled. + """ + + HTTP_REQUESTS = 'http_requests' + RANGE_EVENTS = 'range_events' + FIREWALL_EVENTS = 'firewall_events' + + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + + + +class UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq(UpdateLogpushJobV2Request): + """ + Update IBM Cloud Logs logpush job input. + + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param LogpushJobsUpdateIbmclReqIbmcl ibmcl: (optional) Required information to + push logs to your Cloud Logs instance. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + *, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + ibmcl: Optional['LogpushJobsUpdateIbmclReqIbmcl'] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq object. + + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param LogpushJobsUpdateIbmclReqIbmcl ibmcl: (optional) Required + information to push logs to your Cloud Logs instance. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.enabled = enabled + self.logpull_options = logpull_options + self.ibmcl = ibmcl + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq': + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq object from a json dictionary.""" + args = {} + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (ibmcl := _dict.get('ibmcl')) is not None: + args['ibmcl'] = LogpushJobsUpdateIbmclReqIbmcl.from_dict(ibmcl) + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'ibmcl') and self.ibmcl is not None: + if isinstance(self.ibmcl, dict): + _dict['ibmcl'] = self.ibmcl + else: + _dict['ibmcl'] = self.ibmcl.to_dict() + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + + + +class UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq(UpdateLogpushJobV2Request): + """ + Update LogDNA logpush job input. + + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param dict logdna: (optional) Information to identify the LogDNA instance the + data will be pushed. + :param str frequency: (optional) The frequency at which CIS sends batches of + logs to your destination. + """ + + def __init__( + self, + *, + enabled: Optional[bool] = None, + logpull_options: Optional[str] = None, + logdna: Optional[dict] = None, + frequency: Optional[str] = None, + ) -> None: + """ + Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq object. + + :param bool enabled: (optional) Whether the logpush job enabled or not. + :param str logpull_options: (optional) Configuration string. + :param dict logdna: (optional) Information to identify the LogDNA instance + the data will be pushed. + :param str frequency: (optional) The frequency at which CIS sends batches + of logs to your destination. + """ + # pylint: disable=super-init-not-called + self.enabled = enabled + self.logpull_options = logpull_options + self.logdna = logdna + self.frequency = frequency + + @classmethod + def from_dict(cls, _dict: Dict) -> 'UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq': + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq object from a json dictionary.""" + args = {} + if (enabled := _dict.get('enabled')) is not None: + args['enabled'] = enabled + if (logpull_options := _dict.get('logpull_options')) is not None: + args['logpull_options'] = logpull_options + if (logdna := _dict.get('logdna')) is not None: + args['logdna'] = logdna + if (frequency := _dict.get('frequency')) is not None: + args['frequency'] = frequency + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'enabled') and self.enabled is not None: + _dict['enabled'] = self.enabled + if hasattr(self, 'logpull_options') and self.logpull_options is not None: + _dict['logpull_options'] = self.logpull_options + if hasattr(self, 'logdna') and self.logdna is not None: + _dict['logdna'] = self.logdna + if hasattr(self, 'frequency') and self.frequency is not None: + _dict['frequency'] = self.frequency + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class FrequencyEnum(str, Enum): + """ + The frequency at which CIS sends batches of logs to your destination. + """ + + HIGH = 'high' + LOW = 'low' + diff --git a/test/integration/test_logpush_jobs_api_v1.py b/test/integration/test_logpush_jobs_api_v1.py new file mode 100644 index 0000000..79e6bc9 --- /dev/null +++ b/test/integration/test_logpush_jobs_api_v1.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +# (C) Copyright IBM Corp. 2026. + +""" +Integration test code to execute logpush jobs functions +""" + +import os +import unittest +from dotenv import load_dotenv, find_dotenv +from ibm_cloud_sdk_core.api_exception import ApiException +from ibm_cloud_networking_services.logpush_jobs_api_v1 import ( + LogpushJobsApiV1, + CreateLogpushJobV2RequestLogpushJobLogdnaReq, + CreateLogpushJobV2RequestLogpushJobGenericReq, + CreateLogpushJobV2RequestLogpushJobCosReq, + CreateLogpushJobV2RequestLogpushJobIbmclReq, + UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq, + UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq, + UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq, + LogpushJobIbmclReqIbmcl, +) + +configFile = "cis.env" + +# load the .env file containing your environment variables +try: + load_dotenv(find_dotenv(filename=configFile)) +except: + print('warning: no cis.env file loaded') + + +class TestLogpushJobsApiV1(unittest.TestCase): + """ Integration tests for Logpush Jobs API """ + + @unittest.skip("Skipping...") + + def setUp(self): + """ test case setup """ + if not os.path.exists(configFile): + raise unittest.SkipTest( + 'External configuration not available, skipping...') + + self.crn = os.getenv("CRN") + self.zone_id = os.getenv("ZONE_ID") + self.endpoint = os.getenv("API_ENDPOINT") + self.ingress_key = os.getenv("INGRESS_KEY") + self.logdna_region = os.getenv("LOGDNA_REGION") + self.logdna_domain = os.getenv("DOMAIN_NAME") + self.cos_bucket = os.getenv("COS_BUCKET") + self.cos_region = os.getenv("COS_REGION") + self.cos_instance = os.getenv("COS_INSTANCE") + self.ownership_token = os.getenv("OWNERSHIP_TOKEN") + self.dataset = "http_requests" + + # create logpush jobs service instance + self.service = LogpushJobsApiV1.new_instance( + crn=self.crn, + zone_id=self.zone_id, + dataset=self.dataset, + service_name="cis_services" + ) + self.service.set_service_url(self.endpoint) + self._cleanup_jobs() + + def tearDown(self): + """ tear down """ + # Delete the resources + self._cleanup_jobs() + print("Clean up complete") + + def _cleanup_jobs(self): + """ Delete all existing logpush jobs """ + try: + response = self.service.get_logpush_jobs_v2() + assert response is not None + result = response.get_result() + if result and result.get("result"): + for job in result.get("result"): + job_id = str(job.get("id")) + self.service.delete_logpush_job_v2(job_id=job_id) + except ApiException as e: + print(f"Cleanup error: {e}") + + def test_1_logpush_jobs_logdna(self): + """ create/update/delete/get logpush jobs for logdna """ + # Skip this test - requires LogDNA setup + self.skipTest("Skipping LogDNA test") + + # Create logpush job for LogDNA + create_request = CreateLogpushJobV2RequestLogpushJobLogdnaReq( + name="Test123", + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + logdna={ + "ingress_key": self.ingress_key, + "region": self.logdna_region, + "hostname": self.logdna_domain + }, + dataset="http_requests", + frequency="high" + ) + + response = self.service.create_logpush_job_v2( + create_logpush_job_v2_request=create_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + job = result.get("result") + job_id = str(job.get("id")) + + # List all logpush jobs + response = self.service.get_logpush_jobs_v2() + assert response is not None + result = response.get_result() + assert result.get("success") is True + all_jobs = result.get("result") + + # Get specific logpush job + get_job = all_jobs[0] + get_job_id = str(get_job.get("id")) + response = self.service.get_logpush_job_v2(job_id=get_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Update logpush job + update_request = UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq( + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + logdna={ + "ingress_key": self.ingress_key, + "region": self.logdna_region, + "hostname": self.logdna_domain + }, + frequency="high" + ) + + response = self.service.update_logpush_job_v2( + job_id=job_id, + update_logpush_job_v2_request=update_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Delete all logpush jobs + for this_job in all_jobs: + this_job_id = str(this_job.get("id")) + response = self.service.delete_logpush_job_v2(job_id=this_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + def test_2_logpush_jobs_generic(self): + """ create/update/delete/get logpush jobs for generic destination """ + # Skip this test - requires S3 setup + self.skipTest("Skipping generic destination test") + + # Create logpush job with generic destination + create_request = CreateLogpushJobV2RequestLogpushJobGenericReq( + name="Test123", + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + destination_conf="s3://mybucket/logs?region=us-west-2", + dataset="http_requests", + frequency="high" + ) + + response = self.service.create_logpush_job_v2( + create_logpush_job_v2_request=create_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + job = result.get("result") + job_id = str(job.get("id")) + + # List all logpush jobs + response = self.service.get_logpush_jobs_v2() + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Get specific logpush job + response = self.service.get_logpush_job_v2(job_id=job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Update logpush job + update_request = UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq( + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + destination_conf="s3://mybucket/logs?region=us-west-1", + frequency="high" + ) + + response = self.service.update_logpush_job_v2( + job_id=job_id, + update_logpush_job_v2_request=update_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Delete logpush job + response = self.service.delete_logpush_job_v2(job_id=job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + def test_3_logpush_jobs_http_destination(self): + """ create/update/delete/get logpush jobs with custom HTTP destination """ + + # Create logpush job with custom HTTP destination + create_request = CreateLogpushJobV2RequestLogpushJobGenericReq( + name="Test123", + enabled=False, + logpull_options="fields=ClientIP,ClientRequestHost,ClientRequestMethod", + destination_conf="https://httpbin.org/post", + dataset="http_requests", + frequency="high" + ) + + response = self.service.create_logpush_job_v2( + create_logpush_job_v2_request=create_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + job = result.get("result") + job_id = str(job.get("id")) + + # List all logpush jobs + response = self.service.get_logpush_jobs_v2() + assert response is not None + result = response.get_result() + assert result.get("success") is True + all_jobs = result.get("result") + + # Get specific logpush job + get_job = all_jobs[0] + get_job_id = str(get_job.get("id")) + response = self.service.get_logpush_job_v2(job_id=get_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Update logpush job + update_request = UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq( + enabled=False, + logpull_options="fields=ClientIP,ClientRequestHost", + destination_conf="https://httpbin.org/post", + frequency="high" + ) + + response = self.service.update_logpush_job_v2( + job_id=job_id, + update_logpush_job_v2_request=update_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Delete all logpush jobs + for this_job in all_jobs: + this_job_id = str(this_job.get("id")) + response = self.service.delete_logpush_job_v2(job_id=this_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + def test_4_logpush_jobs_cos(self): + """ create/update/delete/get logpush jobs for COS """ + # Skip this test - requires COS setup + self.skipTest("Skipping COS test") + + # Create logpush job for COS + create_request = CreateLogpushJobV2RequestLogpushJobCosReq( + name="Test123", + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + cos={ + "bucket_name": "cos-bucket001", + "region": "us-south", + "id": "231f5467-3072-4cb9-9e39-a906fa3032ea" + }, + dataset="http_requests", + frequency="high", + ownership_challenge="xxxxx" + ) + + response = self.service.create_logpush_job_v2( + create_logpush_job_v2_request=create_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + job = result.get("result") + job_id = str(job.get("id")) + + # List all logpush jobs + response = self.service.get_logpush_jobs_v2() + assert response is not None + result = response.get_result() + assert result.get("success") is True + all_jobs = result.get("result") + + # Get specific logpush job + get_job = all_jobs[0] + get_job_id = str(get_job.get("id")) + response = self.service.get_logpush_job_v2(job_id=get_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Update logpush job + update_request = UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq( + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + frequency="low" + ) + + response = self.service.update_logpush_job_v2( + job_id=job_id, + update_logpush_job_v2_request=update_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Delete all logpush jobs + for this_job in all_jobs: + this_job_id = str(this_job.get("id")) + response = self.service.delete_logpush_job_v2(job_id=this_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + def test_5_logpush_jobs_ibmcl(self): + """ create/update/delete/get logpush jobs for IBM Cloud Logs """ + + # Create logpush job for IBM Cloud Logs + ibmcl_config = LogpushJobIbmclReqIbmcl( + instance_id=os.getenv("CIS_IBMCL_INSTANCE_ID"), + region="us-south", + api_key=os.getenv("CIS_SERVICES_APIKEY") + ) + + create_request = CreateLogpushJobV2RequestLogpushJobIbmclReq( + name="Test123", + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + ibmcl=ibmcl_config, + dataset="http_requests", + frequency="high" + ) + + response = self.service.create_logpush_job_v2( + create_logpush_job_v2_request=create_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + job = result.get("result") + job_id = str(job.get("id")) + + # List all logpush jobs + response = self.service.get_logpush_jobs_v2() + assert response is not None + result = response.get_result() + assert result.get("success") is True + all_jobs = result.get("result") + + # Get specific logpush job + get_job = all_jobs[0] + get_job_id = str(get_job.get("id")) + response = self.service.get_logpush_job_v2(job_id=get_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Update logpush job + update_request = UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq( + enabled=False, + logpull_options="timestamps=rfc3339×tamps=rfc3339", + frequency="low" + ) + + response = self.service.update_logpush_job_v2( + job_id=job_id, + update_logpush_job_v2_request=update_request + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Delete all logpush jobs + for this_job in all_jobs: + this_job_id = str(this_job.get("id")) + response = self.service.delete_logpush_job_v2(job_id=this_job_id) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + def test_6_ownership_challenge(self): + """ Post/Validate Logpush Ownership challenge """ + + # Send ownership to destination + response = self.service.get_logpush_ownership_v2( + cos={ + "bucket_name": self.cos_bucket, + "region": self.cos_region, + "id": self.cos_instance + } + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # Validate Logpush Ownership Challenge + response = self.service.validate_logpush_ownership_challenge_v2( + cos={ + "bucket_name": self.cos_bucket, + "region": self.cos_region, + "id": self.cos_instance + }, + ownership_challenge=self.ownership_token + ) + assert response is not None + result = response.get_result() + assert result.get("success") is True + + def test_7_list_fields_and_jobs(self): + """ List available fields and jobs """ + + # List available fields + response = self.service.list_fields_for_dataset_v2() + assert response is not None + result = response.get_result() + assert result.get("success") is True + + # List logpush jobs for dataset + response = self.service.list_logpush_jobs_for_dataset_v2() + assert response is not None + + +if __name__ == '__main__': + unittest.main() diff --git a/test/unit/test_logpush_jobs_api_v1.py b/test/unit/test_logpush_jobs_api_v1.py new file mode 100644 index 0000000..5292808 --- /dev/null +++ b/test/unit/test_logpush_jobs_api_v1.py @@ -0,0 +1,1898 @@ +# -*- coding: utf-8 -*- +# (C) Copyright IBM Corp. 2026. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Unit Tests for LogpushJobsApiV1 +""" + +from ibm_cloud_sdk_core.authenticators.no_auth_authenticator import NoAuthAuthenticator +import inspect +import json +import os +import pytest +import re +import responses +import urllib +from ibm_cloud_networking_services.logpush_jobs_api_v1 import * + +crn = 'testString' +dataset = 'testString' +zone_id = 'testString' + +_service = LogpushJobsApiV1( + authenticator=NoAuthAuthenticator(), + crn=crn, + dataset=dataset, + zone_id=zone_id, +) + +_base_url = 'https://api.cis.cloud.ibm.com' +_service.set_service_url(_base_url) + + +def preprocess_url(operation_path: str): + """ + Returns the request url associated with the specified operation path. + This will be base_url concatenated with a quoted version of operation_path. + The returned request URL is used to register the mock response so it needs + to match the request URL that is formed by the requests library. + """ + + # Form the request URL from the base URL and operation path. + request_url = _base_url + operation_path + + # If the request url does NOT end with a /, then just return it as-is. + # Otherwise, return a regular expression that matches one or more trailing /. + if not request_url.endswith('/'): + return request_url + return re.compile(request_url.rstrip('/') + '/+') + + +############################################################################## +# Start of Service: LogpushJobs +############################################################################## +# region + + +class TestNewInstance: + """ + Test Class for new_instance + """ + + def test_new_instance(self): + """ + new_instance() + """ + os.environ['TEST_SERVICE_AUTH_TYPE'] = 'noAuth' + + service = LogpushJobsApiV1.new_instance( + crn=crn, + dataset=dataset, + zone_id=zone_id, + service_name='TEST_SERVICE', + ) + + assert service is not None + assert isinstance(service, LogpushJobsApiV1) + + def test_new_instance_without_authenticator(self): + """ + new_instance_without_authenticator() + """ + with pytest.raises(ValueError, match='authenticator must be provided'): + service = LogpushJobsApiV1.new_instance( + crn=crn, + dataset=dataset, + zone_id=zone_id, + service_name='TEST_SERVICE_NOT_FOUND', + ) + + def test_new_instance_without_required_params(self): + """ + new_instance_without_required_params() + """ + with pytest.raises(TypeError, match='new_instance\\(\\) missing \\d required positional arguments?: \'.*\''): + service = LogpushJobsApiV1.new_instance() + + def test_new_instance_required_param_none(self): + """ + new_instance_required_param_none() + """ + with pytest.raises(ValueError, match='crn must be provided'): + service = LogpushJobsApiV1.new_instance( + crn=None, + dataset=None, + zone_id=None, + ) + + +class TestGetLogpushJobsV2: + """ + Test Class for get_logpush_jobs_v2 + """ + + @responses.activate + def test_get_logpush_jobs_v2_all_params(self): + """ + get_logpush_jobs_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": [{"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}]}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.get_logpush_jobs_v2() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_get_logpush_jobs_v2_all_params_with_retries(self): + # Enable retries and run test_get_logpush_jobs_v2_all_params. + _service.enable_retries() + self.test_get_logpush_jobs_v2_all_params() + + # Disable retries and run test_get_logpush_jobs_v2_all_params. + _service.disable_retries() + self.test_get_logpush_jobs_v2_all_params() + + @responses.activate + def test_get_logpush_jobs_v2_value_error(self): + """ + test_get_logpush_jobs_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": [{"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}]}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.get_logpush_jobs_v2(**req_copy) + + def test_get_logpush_jobs_v2_value_error_with_retries(self): + # Enable retries and run test_get_logpush_jobs_v2_value_error. + _service.enable_retries() + self.test_get_logpush_jobs_v2_value_error() + + # Disable retries and run test_get_logpush_jobs_v2_value_error. + _service.disable_retries() + self.test_get_logpush_jobs_v2_value_error() + + +class TestCreateLogpushJobV2: + """ + Test Class for create_logpush_job_v2 + """ + + @responses.activate + def test_create_logpush_job_v2_all_params(self): + """ + create_logpush_job_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Construct a dict representation of a CreateLogpushJobV2RequestLogpushJobCosReq model + create_logpush_job_v2_request_model = {} + create_logpush_job_v2_request_model['name'] = 'My log push job' + create_logpush_job_v2_request_model['enabled'] = False + create_logpush_job_v2_request_model['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + create_logpush_job_v2_request_model['cos'] = {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + create_logpush_job_v2_request_model['ownership_challenge'] = '00000000000000000000000000000000' + create_logpush_job_v2_request_model['dataset'] = 'http_requests' + create_logpush_job_v2_request_model['frequency'] = 'high' + + # Set up parameter values + create_logpush_job_v2_request = create_logpush_job_v2_request_model + + # Invoke method + response = _service.create_logpush_job_v2( + create_logpush_job_v2_request=create_logpush_job_v2_request, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + # Validate body params + req_body = json.loads(str(responses.calls[0].request.body, 'utf-8')) + assert req_body == create_logpush_job_v2_request + + def test_create_logpush_job_v2_all_params_with_retries(self): + # Enable retries and run test_create_logpush_job_v2_all_params. + _service.enable_retries() + self.test_create_logpush_job_v2_all_params() + + # Disable retries and run test_create_logpush_job_v2_all_params. + _service.disable_retries() + self.test_create_logpush_job_v2_all_params() + + @responses.activate + def test_create_logpush_job_v2_required_params(self): + """ + test_create_logpush_job_v2_required_params() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.create_logpush_job_v2() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_create_logpush_job_v2_required_params_with_retries(self): + # Enable retries and run test_create_logpush_job_v2_required_params. + _service.enable_retries() + self.test_create_logpush_job_v2_required_params() + + # Disable retries and run test_create_logpush_job_v2_required_params. + _service.disable_retries() + self.test_create_logpush_job_v2_required_params() + + @responses.activate + def test_create_logpush_job_v2_value_error(self): + """ + test_create_logpush_job_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.create_logpush_job_v2(**req_copy) + + def test_create_logpush_job_v2_value_error_with_retries(self): + # Enable retries and run test_create_logpush_job_v2_value_error. + _service.enable_retries() + self.test_create_logpush_job_v2_value_error() + + # Disable retries and run test_create_logpush_job_v2_value_error. + _service.disable_retries() + self.test_create_logpush_job_v2_value_error() + + +class TestGetLogpushJobV2: + """ + Test Class for get_logpush_job_v2 + """ + + @responses.activate + def test_get_logpush_job_v2_all_params(self): + """ + get_logpush_job_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs/testString') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + job_id = 'testString' + + # Invoke method + response = _service.get_logpush_job_v2( + job_id, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_get_logpush_job_v2_all_params_with_retries(self): + # Enable retries and run test_get_logpush_job_v2_all_params. + _service.enable_retries() + self.test_get_logpush_job_v2_all_params() + + # Disable retries and run test_get_logpush_job_v2_all_params. + _service.disable_retries() + self.test_get_logpush_job_v2_all_params() + + @responses.activate + def test_get_logpush_job_v2_value_error(self): + """ + test_get_logpush_job_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs/testString') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + job_id = 'testString' + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + "job_id": job_id, + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.get_logpush_job_v2(**req_copy) + + def test_get_logpush_job_v2_value_error_with_retries(self): + # Enable retries and run test_get_logpush_job_v2_value_error. + _service.enable_retries() + self.test_get_logpush_job_v2_value_error() + + # Disable retries and run test_get_logpush_job_v2_value_error. + _service.disable_retries() + self.test_get_logpush_job_v2_value_error() + + +class TestUpdateLogpushJobV2: + """ + Test Class for update_logpush_job_v2 + """ + + @responses.activate + def test_update_logpush_job_v2_all_params(self): + """ + update_logpush_job_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs/testString') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.PUT, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Construct a dict representation of a UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq model + update_logpush_job_v2_request_model = {} + update_logpush_job_v2_request_model['enabled'] = False + update_logpush_job_v2_request_model['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + update_logpush_job_v2_request_model['cos'] = {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + update_logpush_job_v2_request_model['ownership_challenge'] = '00000000000000000000000000000000' + update_logpush_job_v2_request_model['frequency'] = 'high' + + # Set up parameter values + job_id = 'testString' + update_logpush_job_v2_request = update_logpush_job_v2_request_model + + # Invoke method + response = _service.update_logpush_job_v2( + job_id, + update_logpush_job_v2_request=update_logpush_job_v2_request, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + # Validate body params + req_body = json.loads(str(responses.calls[0].request.body, 'utf-8')) + assert req_body == update_logpush_job_v2_request + + def test_update_logpush_job_v2_all_params_with_retries(self): + # Enable retries and run test_update_logpush_job_v2_all_params. + _service.enable_retries() + self.test_update_logpush_job_v2_all_params() + + # Disable retries and run test_update_logpush_job_v2_all_params. + _service.disable_retries() + self.test_update_logpush_job_v2_all_params() + + @responses.activate + def test_update_logpush_job_v2_required_params(self): + """ + test_update_logpush_job_v2_required_params() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs/testString') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.PUT, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + job_id = 'testString' + + # Invoke method + response = _service.update_logpush_job_v2( + job_id, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_update_logpush_job_v2_required_params_with_retries(self): + # Enable retries and run test_update_logpush_job_v2_required_params. + _service.enable_retries() + self.test_update_logpush_job_v2_required_params() + + # Disable retries and run test_update_logpush_job_v2_required_params. + _service.disable_retries() + self.test_update_logpush_job_v2_required_params() + + @responses.activate + def test_update_logpush_job_v2_value_error(self): + """ + test_update_logpush_job_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs/testString') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.PUT, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + job_id = 'testString' + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + "job_id": job_id, + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.update_logpush_job_v2(**req_copy) + + def test_update_logpush_job_v2_value_error_with_retries(self): + # Enable retries and run test_update_logpush_job_v2_value_error. + _service.enable_retries() + self.test_update_logpush_job_v2_value_error() + + # Disable retries and run test_update_logpush_job_v2_value_error. + _service.disable_retries() + self.test_update_logpush_job_v2_value_error() + + +class TestDeleteLogpushJobV2: + """ + Test Class for delete_logpush_job_v2 + """ + + @responses.activate + def test_delete_logpush_job_v2_all_params(self): + """ + delete_logpush_job_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs/testString') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"anyKey": "anyValue"}}' + responses.add( + responses.DELETE, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + job_id = 'testString' + + # Invoke method + response = _service.delete_logpush_job_v2( + job_id, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_delete_logpush_job_v2_all_params_with_retries(self): + # Enable retries and run test_delete_logpush_job_v2_all_params. + _service.enable_retries() + self.test_delete_logpush_job_v2_all_params() + + # Disable retries and run test_delete_logpush_job_v2_all_params. + _service.disable_retries() + self.test_delete_logpush_job_v2_all_params() + + @responses.activate + def test_delete_logpush_job_v2_value_error(self): + """ + test_delete_logpush_job_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/jobs/testString') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"anyKey": "anyValue"}}' + responses.add( + responses.DELETE, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + job_id = 'testString' + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + "job_id": job_id, + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.delete_logpush_job_v2(**req_copy) + + def test_delete_logpush_job_v2_value_error_with_retries(self): + # Enable retries and run test_delete_logpush_job_v2_value_error. + _service.enable_retries() + self.test_delete_logpush_job_v2_value_error() + + # Disable retries and run test_delete_logpush_job_v2_value_error. + _service.disable_retries() + self.test_delete_logpush_job_v2_value_error() + + +class TestGetLogpushOwnershipV2: + """ + Test Class for get_logpush_ownership_v2 + """ + + @responses.activate + def test_get_logpush_ownership_v2_all_params(self): + """ + get_logpush_ownership_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/ownership') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"filename": "logs/challenge-filename.txt", "valid": true, "messages": "messages"}}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + cos = {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + + # Invoke method + response = _service.get_logpush_ownership_v2( + cos=cos, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + # Validate body params + req_body = json.loads(str(responses.calls[0].request.body, 'utf-8')) + assert req_body['cos'] == {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + + def test_get_logpush_ownership_v2_all_params_with_retries(self): + # Enable retries and run test_get_logpush_ownership_v2_all_params. + _service.enable_retries() + self.test_get_logpush_ownership_v2_all_params() + + # Disable retries and run test_get_logpush_ownership_v2_all_params. + _service.disable_retries() + self.test_get_logpush_ownership_v2_all_params() + + @responses.activate + def test_get_logpush_ownership_v2_required_params(self): + """ + test_get_logpush_ownership_v2_required_params() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/ownership') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"filename": "logs/challenge-filename.txt", "valid": true, "messages": "messages"}}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.get_logpush_ownership_v2() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_get_logpush_ownership_v2_required_params_with_retries(self): + # Enable retries and run test_get_logpush_ownership_v2_required_params. + _service.enable_retries() + self.test_get_logpush_ownership_v2_required_params() + + # Disable retries and run test_get_logpush_ownership_v2_required_params. + _service.disable_retries() + self.test_get_logpush_ownership_v2_required_params() + + @responses.activate + def test_get_logpush_ownership_v2_value_error(self): + """ + test_get_logpush_ownership_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/ownership') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"filename": "logs/challenge-filename.txt", "valid": true, "messages": "messages"}}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.get_logpush_ownership_v2(**req_copy) + + def test_get_logpush_ownership_v2_value_error_with_retries(self): + # Enable retries and run test_get_logpush_ownership_v2_value_error. + _service.enable_retries() + self.test_get_logpush_ownership_v2_value_error() + + # Disable retries and run test_get_logpush_ownership_v2_value_error. + _service.disable_retries() + self.test_get_logpush_ownership_v2_value_error() + + +class TestValidateLogpushOwnershipChallengeV2: + """ + Test Class for validate_logpush_ownership_challenge_v2 + """ + + @responses.activate + def test_validate_logpush_ownership_challenge_v2_all_params(self): + """ + validate_logpush_ownership_challenge_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/ownership/validate') + mock_response = '{"valid": true}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + cos = {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + ownership_challenge = '00000000000000000000' + + # Invoke method + response = _service.validate_logpush_ownership_challenge_v2( + cos=cos, + ownership_challenge=ownership_challenge, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + # Validate body params + req_body = json.loads(str(responses.calls[0].request.body, 'utf-8')) + assert req_body['cos'] == {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + assert req_body['ownership_challenge'] == '00000000000000000000' + + def test_validate_logpush_ownership_challenge_v2_all_params_with_retries(self): + # Enable retries and run test_validate_logpush_ownership_challenge_v2_all_params. + _service.enable_retries() + self.test_validate_logpush_ownership_challenge_v2_all_params() + + # Disable retries and run test_validate_logpush_ownership_challenge_v2_all_params. + _service.disable_retries() + self.test_validate_logpush_ownership_challenge_v2_all_params() + + @responses.activate + def test_validate_logpush_ownership_challenge_v2_required_params(self): + """ + test_validate_logpush_ownership_challenge_v2_required_params() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/ownership/validate') + mock_response = '{"valid": true}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.validate_logpush_ownership_challenge_v2() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_validate_logpush_ownership_challenge_v2_required_params_with_retries(self): + # Enable retries and run test_validate_logpush_ownership_challenge_v2_required_params. + _service.enable_retries() + self.test_validate_logpush_ownership_challenge_v2_required_params() + + # Disable retries and run test_validate_logpush_ownership_challenge_v2_required_params. + _service.disable_retries() + self.test_validate_logpush_ownership_challenge_v2_required_params() + + @responses.activate + def test_validate_logpush_ownership_challenge_v2_value_error(self): + """ + test_validate_logpush_ownership_challenge_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/ownership/validate') + mock_response = '{"valid": true}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.validate_logpush_ownership_challenge_v2(**req_copy) + + def test_validate_logpush_ownership_challenge_v2_value_error_with_retries(self): + # Enable retries and run test_validate_logpush_ownership_challenge_v2_value_error. + _service.enable_retries() + self.test_validate_logpush_ownership_challenge_v2_value_error() + + # Disable retries and run test_validate_logpush_ownership_challenge_v2_value_error. + _service.disable_retries() + self.test_validate_logpush_ownership_challenge_v2_value_error() + + +class TestListFieldsForDatasetV2: + """ + Test Class for list_fields_for_dataset_v2 + """ + + @responses.activate + def test_list_fields_for_dataset_v2_all_params(self): + """ + list_fields_for_dataset_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/datasets/testString/fields') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"anyKey": "anyValue"}}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.list_fields_for_dataset_v2() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_list_fields_for_dataset_v2_all_params_with_retries(self): + # Enable retries and run test_list_fields_for_dataset_v2_all_params. + _service.enable_retries() + self.test_list_fields_for_dataset_v2_all_params() + + # Disable retries and run test_list_fields_for_dataset_v2_all_params. + _service.disable_retries() + self.test_list_fields_for_dataset_v2_all_params() + + @responses.activate + def test_list_fields_for_dataset_v2_value_error(self): + """ + test_list_fields_for_dataset_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/datasets/testString/fields') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"anyKey": "anyValue"}}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.list_fields_for_dataset_v2(**req_copy) + + def test_list_fields_for_dataset_v2_value_error_with_retries(self): + # Enable retries and run test_list_fields_for_dataset_v2_value_error. + _service.enable_retries() + self.test_list_fields_for_dataset_v2_value_error() + + # Disable retries and run test_list_fields_for_dataset_v2_value_error. + _service.disable_retries() + self.test_list_fields_for_dataset_v2_value_error() + + +class TestListLogpushJobsForDatasetV2: + """ + Test Class for list_logpush_jobs_for_dataset_v2 + """ + + @responses.activate + def test_list_logpush_jobs_for_dataset_v2_all_params(self): + """ + list_logpush_jobs_for_dataset_v2() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/datasets/testString/jobs') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.list_logpush_jobs_for_dataset_v2() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_list_logpush_jobs_for_dataset_v2_all_params_with_retries(self): + # Enable retries and run test_list_logpush_jobs_for_dataset_v2_all_params. + _service.enable_retries() + self.test_list_logpush_jobs_for_dataset_v2_all_params() + + # Disable retries and run test_list_logpush_jobs_for_dataset_v2_all_params. + _service.disable_retries() + self.test_list_logpush_jobs_for_dataset_v2_all_params() + + @responses.activate + def test_list_logpush_jobs_for_dataset_v2_value_error(self): + """ + test_list_logpush_jobs_for_dataset_v2_value_error() + """ + # Set up mock + url = preprocess_url('/v2/testString/zones/testString/logpush/datasets/testString/jobs') + mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": 5850, "name": "My log push job", "enabled": false, "dataset": "firewall_events", "frequency": "high", "logpull_options": "timestamps=rfc3339×tamps=rfc3339", "destination_conf": "cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea", "last_complete": "2022-01-15T16:33:31.834209Z", "last_error": "2022-01-15T16:33:31.834209Z", "error_message": "error_message"}}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.list_logpush_jobs_for_dataset_v2(**req_copy) + + def test_list_logpush_jobs_for_dataset_v2_value_error_with_retries(self): + # Enable retries and run test_list_logpush_jobs_for_dataset_v2_value_error. + _service.enable_retries() + self.test_list_logpush_jobs_for_dataset_v2_value_error() + + # Disable retries and run test_list_logpush_jobs_for_dataset_v2_value_error. + _service.disable_retries() + self.test_list_logpush_jobs_for_dataset_v2_value_error() + + +class TestGetLogsRetention: + """ + Test Class for get_logs_retention + """ + + @responses.activate + def test_get_logs_retention_all_params(self): + """ + get_logs_retention() + """ + # Set up mock + url = preprocess_url('/v1/testString/zones/testString/logs/retention') + mock_response = '{"result": {"flag": true}, "success": true, "errors": [["errors"]], "messages": [["messages"]]}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.get_logs_retention() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_get_logs_retention_all_params_with_retries(self): + # Enable retries and run test_get_logs_retention_all_params. + _service.enable_retries() + self.test_get_logs_retention_all_params() + + # Disable retries and run test_get_logs_retention_all_params. + _service.disable_retries() + self.test_get_logs_retention_all_params() + + @responses.activate + def test_get_logs_retention_value_error(self): + """ + test_get_logs_retention_value_error() + """ + # Set up mock + url = preprocess_url('/v1/testString/zones/testString/logs/retention') + mock_response = '{"result": {"flag": true}, "success": true, "errors": [["errors"]], "messages": [["messages"]]}' + responses.add( + responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.get_logs_retention(**req_copy) + + def test_get_logs_retention_value_error_with_retries(self): + # Enable retries and run test_get_logs_retention_value_error. + _service.enable_retries() + self.test_get_logs_retention_value_error() + + # Disable retries and run test_get_logs_retention_value_error. + _service.disable_retries() + self.test_get_logs_retention_value_error() + + +class TestCreateLogRetention: + """ + Test Class for create_log_retention + """ + + @responses.activate + def test_create_log_retention_all_params(self): + """ + create_log_retention() + """ + # Set up mock + url = preprocess_url('/v1/testString/zones/testString/logs/retention') + mock_response = '{"result": {"flag": true}, "success": true, "errors": [["errors"]], "messages": [["messages"]]}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Set up parameter values + flag = False + + # Invoke method + response = _service.create_log_retention( + flag=flag, + headers={}, + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + # Validate body params + req_body = json.loads(str(responses.calls[0].request.body, 'utf-8')) + assert req_body['flag'] == False + + def test_create_log_retention_all_params_with_retries(self): + # Enable retries and run test_create_log_retention_all_params. + _service.enable_retries() + self.test_create_log_retention_all_params() + + # Disable retries and run test_create_log_retention_all_params. + _service.disable_retries() + self.test_create_log_retention_all_params() + + @responses.activate + def test_create_log_retention_required_params(self): + """ + test_create_log_retention_required_params() + """ + # Set up mock + url = preprocess_url('/v1/testString/zones/testString/logs/retention') + mock_response = '{"result": {"flag": true}, "success": true, "errors": [["errors"]], "messages": [["messages"]]}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Invoke method + response = _service.create_log_retention() + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_create_log_retention_required_params_with_retries(self): + # Enable retries and run test_create_log_retention_required_params. + _service.enable_retries() + self.test_create_log_retention_required_params() + + # Disable retries and run test_create_log_retention_required_params. + _service.disable_retries() + self.test_create_log_retention_required_params() + + @responses.activate + def test_create_log_retention_value_error(self): + """ + test_create_log_retention_value_error() + """ + # Set up mock + url = preprocess_url('/v1/testString/zones/testString/logs/retention') + mock_response = '{"result": {"flag": true}, "success": true, "errors": [["errors"]], "messages": [["messages"]]}' + responses.add( + responses.POST, + url, + body=mock_response, + content_type='application/json', + status=200, + ) + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + } + for param in req_param_dict.keys(): + req_copy = {key: val if key is not param else None for (key, val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.create_log_retention(**req_copy) + + def test_create_log_retention_value_error_with_retries(self): + # Enable retries and run test_create_log_retention_value_error. + _service.enable_retries() + self.test_create_log_retention_value_error() + + # Disable retries and run test_create_log_retention_value_error. + _service.disable_retries() + self.test_create_log_retention_value_error() + + +# endregion +############################################################################## +# End of Service: LogpushJobs +############################################################################## + + +############################################################################## +# Start of Model Tests +############################################################################## +# region + + +class TestModel_LogRetentionRespResult: + """ + Test Class for LogRetentionRespResult + """ + + def test_log_retention_resp_result_serialization(self): + """ + Test serialization/deserialization for LogRetentionRespResult + """ + + # Construct a json representation of a LogRetentionRespResult model + log_retention_resp_result_model_json = {} + log_retention_resp_result_model_json['flag'] = True + + # Construct a model instance of LogRetentionRespResult by calling from_dict on the json representation + log_retention_resp_result_model = LogRetentionRespResult.from_dict(log_retention_resp_result_model_json) + assert log_retention_resp_result_model != False + + # Construct a model instance of LogRetentionRespResult by calling from_dict on the json representation + log_retention_resp_result_model_dict = LogRetentionRespResult.from_dict(log_retention_resp_result_model_json).__dict__ + log_retention_resp_result_model2 = LogRetentionRespResult(**log_retention_resp_result_model_dict) + + # Verify the model instances are equivalent + assert log_retention_resp_result_model == log_retention_resp_result_model2 + + # Convert model instance back to dict and verify no loss of data + log_retention_resp_result_model_json2 = log_retention_resp_result_model.to_dict() + assert log_retention_resp_result_model_json2 == log_retention_resp_result_model_json + + +class TestModel_LogpushJobIbmclReqIbmcl: + """ + Test Class for LogpushJobIbmclReqIbmcl + """ + + def test_logpush_job_ibmcl_req_ibmcl_serialization(self): + """ + Test serialization/deserialization for LogpushJobIbmclReqIbmcl + """ + + # Construct a json representation of a LogpushJobIbmclReqIbmcl model + logpush_job_ibmcl_req_ibmcl_model_json = {} + logpush_job_ibmcl_req_ibmcl_model_json['instance_id'] = '90d208cc-e1dd-4fb2-a938-358e5996f056' + logpush_job_ibmcl_req_ibmcl_model_json['region'] = 'eu-es' + logpush_job_ibmcl_req_ibmcl_model_json['api_key'] = 'XXXXXXXXXXXXXX' + + # Construct a model instance of LogpushJobIbmclReqIbmcl by calling from_dict on the json representation + logpush_job_ibmcl_req_ibmcl_model = LogpushJobIbmclReqIbmcl.from_dict(logpush_job_ibmcl_req_ibmcl_model_json) + assert logpush_job_ibmcl_req_ibmcl_model != False + + # Construct a model instance of LogpushJobIbmclReqIbmcl by calling from_dict on the json representation + logpush_job_ibmcl_req_ibmcl_model_dict = LogpushJobIbmclReqIbmcl.from_dict(logpush_job_ibmcl_req_ibmcl_model_json).__dict__ + logpush_job_ibmcl_req_ibmcl_model2 = LogpushJobIbmclReqIbmcl(**logpush_job_ibmcl_req_ibmcl_model_dict) + + # Verify the model instances are equivalent + assert logpush_job_ibmcl_req_ibmcl_model == logpush_job_ibmcl_req_ibmcl_model2 + + # Convert model instance back to dict and verify no loss of data + logpush_job_ibmcl_req_ibmcl_model_json2 = logpush_job_ibmcl_req_ibmcl_model.to_dict() + assert logpush_job_ibmcl_req_ibmcl_model_json2 == logpush_job_ibmcl_req_ibmcl_model_json + + +class TestModel_LogpushJobsUpdateIbmclReqIbmcl: + """ + Test Class for LogpushJobsUpdateIbmclReqIbmcl + """ + + def test_logpush_jobs_update_ibmcl_req_ibmcl_serialization(self): + """ + Test serialization/deserialization for LogpushJobsUpdateIbmclReqIbmcl + """ + + # Construct a json representation of a LogpushJobsUpdateIbmclReqIbmcl model + logpush_jobs_update_ibmcl_req_ibmcl_model_json = {} + logpush_jobs_update_ibmcl_req_ibmcl_model_json['instance_id'] = '90d208cc-e1dd-4fb2-a938-358e5996f056' + logpush_jobs_update_ibmcl_req_ibmcl_model_json['region'] = 'eu-es' + logpush_jobs_update_ibmcl_req_ibmcl_model_json['api_key'] = 'XXXXXXXXXXXXXX' + + # Construct a model instance of LogpushJobsUpdateIbmclReqIbmcl by calling from_dict on the json representation + logpush_jobs_update_ibmcl_req_ibmcl_model = LogpushJobsUpdateIbmclReqIbmcl.from_dict(logpush_jobs_update_ibmcl_req_ibmcl_model_json) + assert logpush_jobs_update_ibmcl_req_ibmcl_model != False + + # Construct a model instance of LogpushJobsUpdateIbmclReqIbmcl by calling from_dict on the json representation + logpush_jobs_update_ibmcl_req_ibmcl_model_dict = LogpushJobsUpdateIbmclReqIbmcl.from_dict(logpush_jobs_update_ibmcl_req_ibmcl_model_json).__dict__ + logpush_jobs_update_ibmcl_req_ibmcl_model2 = LogpushJobsUpdateIbmclReqIbmcl(**logpush_jobs_update_ibmcl_req_ibmcl_model_dict) + + # Verify the model instances are equivalent + assert logpush_jobs_update_ibmcl_req_ibmcl_model == logpush_jobs_update_ibmcl_req_ibmcl_model2 + + # Convert model instance back to dict and verify no loss of data + logpush_jobs_update_ibmcl_req_ibmcl_model_json2 = logpush_jobs_update_ibmcl_req_ibmcl_model.to_dict() + assert logpush_jobs_update_ibmcl_req_ibmcl_model_json2 == logpush_jobs_update_ibmcl_req_ibmcl_model_json + + +class TestModel_DeleteLogpushJobResp: + """ + Test Class for DeleteLogpushJobResp + """ + + def test_delete_logpush_job_resp_serialization(self): + """ + Test serialization/deserialization for DeleteLogpushJobResp + """ + + # Construct a json representation of a DeleteLogpushJobResp model + delete_logpush_job_resp_model_json = {} + delete_logpush_job_resp_model_json['success'] = True + delete_logpush_job_resp_model_json['errors'] = [['testString']] + delete_logpush_job_resp_model_json['messages'] = [['testString']] + delete_logpush_job_resp_model_json['result'] = {'anyKey': 'anyValue'} + + # Construct a model instance of DeleteLogpushJobResp by calling from_dict on the json representation + delete_logpush_job_resp_model = DeleteLogpushJobResp.from_dict(delete_logpush_job_resp_model_json) + assert delete_logpush_job_resp_model != False + + # Construct a model instance of DeleteLogpushJobResp by calling from_dict on the json representation + delete_logpush_job_resp_model_dict = DeleteLogpushJobResp.from_dict(delete_logpush_job_resp_model_json).__dict__ + delete_logpush_job_resp_model2 = DeleteLogpushJobResp(**delete_logpush_job_resp_model_dict) + + # Verify the model instances are equivalent + assert delete_logpush_job_resp_model == delete_logpush_job_resp_model2 + + # Convert model instance back to dict and verify no loss of data + delete_logpush_job_resp_model_json2 = delete_logpush_job_resp_model.to_dict() + assert delete_logpush_job_resp_model_json2 == delete_logpush_job_resp_model_json + + +class TestModel_ListFieldsResp: + """ + Test Class for ListFieldsResp + """ + + def test_list_fields_resp_serialization(self): + """ + Test serialization/deserialization for ListFieldsResp + """ + + # Construct a json representation of a ListFieldsResp model + list_fields_resp_model_json = {} + list_fields_resp_model_json['success'] = True + list_fields_resp_model_json['errors'] = [['testString']] + list_fields_resp_model_json['messages'] = [['testString']] + list_fields_resp_model_json['result'] = {} + + # Construct a model instance of ListFieldsResp by calling from_dict on the json representation + list_fields_resp_model = ListFieldsResp.from_dict(list_fields_resp_model_json) + assert list_fields_resp_model != False + + # Construct a model instance of ListFieldsResp by calling from_dict on the json representation + list_fields_resp_model_dict = ListFieldsResp.from_dict(list_fields_resp_model_json).__dict__ + list_fields_resp_model2 = ListFieldsResp(**list_fields_resp_model_dict) + + # Verify the model instances are equivalent + assert list_fields_resp_model == list_fields_resp_model2 + + # Convert model instance back to dict and verify no loss of data + list_fields_resp_model_json2 = list_fields_resp_model.to_dict() + assert list_fields_resp_model_json2 == list_fields_resp_model_json + + +class TestModel_ListLogpushJobsResp: + """ + Test Class for ListLogpushJobsResp + """ + + def test_list_logpush_jobs_resp_serialization(self): + """ + Test serialization/deserialization for ListLogpushJobsResp + """ + + # Construct dict forms of any model objects needed in order to build this model. + + logpush_job_pack_model = {} # LogpushJobPack + logpush_job_pack_model['id'] = 5850 + logpush_job_pack_model['name'] = 'My log push job' + logpush_job_pack_model['enabled'] = False + logpush_job_pack_model['dataset'] = 'firewall_events' + logpush_job_pack_model['frequency'] = 'high' + logpush_job_pack_model['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + logpush_job_pack_model['destination_conf'] = 'cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea' + logpush_job_pack_model['last_complete'] = '2022-01-15T16:33:31.834209Z' + logpush_job_pack_model['last_error'] = '2022-01-15T16:33:31.834209Z' + logpush_job_pack_model['error_message'] = 'testString' + + # Construct a json representation of a ListLogpushJobsResp model + list_logpush_jobs_resp_model_json = {} + list_logpush_jobs_resp_model_json['success'] = True + list_logpush_jobs_resp_model_json['errors'] = [['testString']] + list_logpush_jobs_resp_model_json['messages'] = [['testString']] + list_logpush_jobs_resp_model_json['result'] = [logpush_job_pack_model] + + # Construct a model instance of ListLogpushJobsResp by calling from_dict on the json representation + list_logpush_jobs_resp_model = ListLogpushJobsResp.from_dict(list_logpush_jobs_resp_model_json) + assert list_logpush_jobs_resp_model != False + + # Construct a model instance of ListLogpushJobsResp by calling from_dict on the json representation + list_logpush_jobs_resp_model_dict = ListLogpushJobsResp.from_dict(list_logpush_jobs_resp_model_json).__dict__ + list_logpush_jobs_resp_model2 = ListLogpushJobsResp(**list_logpush_jobs_resp_model_dict) + + # Verify the model instances are equivalent + assert list_logpush_jobs_resp_model == list_logpush_jobs_resp_model2 + + # Convert model instance back to dict and verify no loss of data + list_logpush_jobs_resp_model_json2 = list_logpush_jobs_resp_model.to_dict() + assert list_logpush_jobs_resp_model_json2 == list_logpush_jobs_resp_model_json + + +class TestModel_LogRetentionResp: + """ + Test Class for LogRetentionResp + """ + + def test_log_retention_resp_serialization(self): + """ + Test serialization/deserialization for LogRetentionResp + """ + + # Construct dict forms of any model objects needed in order to build this model. + + log_retention_resp_result_model = {} # LogRetentionRespResult + log_retention_resp_result_model['flag'] = True + + # Construct a json representation of a LogRetentionResp model + log_retention_resp_model_json = {} + log_retention_resp_model_json['result'] = log_retention_resp_result_model + log_retention_resp_model_json['success'] = True + log_retention_resp_model_json['errors'] = [['testString']] + log_retention_resp_model_json['messages'] = [['testString']] + + # Construct a model instance of LogRetentionResp by calling from_dict on the json representation + log_retention_resp_model = LogRetentionResp.from_dict(log_retention_resp_model_json) + assert log_retention_resp_model != False + + # Construct a model instance of LogRetentionResp by calling from_dict on the json representation + log_retention_resp_model_dict = LogRetentionResp.from_dict(log_retention_resp_model_json).__dict__ + log_retention_resp_model2 = LogRetentionResp(**log_retention_resp_model_dict) + + # Verify the model instances are equivalent + assert log_retention_resp_model == log_retention_resp_model2 + + # Convert model instance back to dict and verify no loss of data + log_retention_resp_model_json2 = log_retention_resp_model.to_dict() + assert log_retention_resp_model_json2 == log_retention_resp_model_json + + +class TestModel_LogpushJobPack: + """ + Test Class for LogpushJobPack + """ + + def test_logpush_job_pack_serialization(self): + """ + Test serialization/deserialization for LogpushJobPack + """ + + # Construct a json representation of a LogpushJobPack model + logpush_job_pack_model_json = {} + logpush_job_pack_model_json['id'] = 5850 + logpush_job_pack_model_json['name'] = 'My log push job' + logpush_job_pack_model_json['enabled'] = False + logpush_job_pack_model_json['dataset'] = 'firewall_events' + logpush_job_pack_model_json['frequency'] = 'high' + logpush_job_pack_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + logpush_job_pack_model_json['destination_conf'] = 'cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea' + logpush_job_pack_model_json['last_complete'] = '2022-01-15T16:33:31.834209Z' + logpush_job_pack_model_json['last_error'] = '2022-01-15T16:33:31.834209Z' + logpush_job_pack_model_json['error_message'] = 'testString' + + # Construct a model instance of LogpushJobPack by calling from_dict on the json representation + logpush_job_pack_model = LogpushJobPack.from_dict(logpush_job_pack_model_json) + assert logpush_job_pack_model != False + + # Construct a model instance of LogpushJobPack by calling from_dict on the json representation + logpush_job_pack_model_dict = LogpushJobPack.from_dict(logpush_job_pack_model_json).__dict__ + logpush_job_pack_model2 = LogpushJobPack(**logpush_job_pack_model_dict) + + # Verify the model instances are equivalent + assert logpush_job_pack_model == logpush_job_pack_model2 + + # Convert model instance back to dict and verify no loss of data + logpush_job_pack_model_json2 = logpush_job_pack_model.to_dict() + assert logpush_job_pack_model_json2 == logpush_job_pack_model_json + + +class TestModel_LogpushJobsResp: + """ + Test Class for LogpushJobsResp + """ + + def test_logpush_jobs_resp_serialization(self): + """ + Test serialization/deserialization for LogpushJobsResp + """ + + # Construct dict forms of any model objects needed in order to build this model. + + logpush_job_pack_model = {} # LogpushJobPack + logpush_job_pack_model['id'] = 5850 + logpush_job_pack_model['name'] = 'My log push job' + logpush_job_pack_model['enabled'] = False + logpush_job_pack_model['dataset'] = 'firewall_events' + logpush_job_pack_model['frequency'] = 'high' + logpush_job_pack_model['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + logpush_job_pack_model['destination_conf'] = 'cos://cos-bucket001?region=us-south&instance-id=231f5467-3072-4cb9-9e39-a906fa3032ea' + logpush_job_pack_model['last_complete'] = '2022-01-15T16:33:31.834209Z' + logpush_job_pack_model['last_error'] = '2022-01-15T16:33:31.834209Z' + logpush_job_pack_model['error_message'] = 'testString' + + # Construct a json representation of a LogpushJobsResp model + logpush_jobs_resp_model_json = {} + logpush_jobs_resp_model_json['success'] = True + logpush_jobs_resp_model_json['errors'] = [['testString']] + logpush_jobs_resp_model_json['messages'] = [['testString']] + logpush_jobs_resp_model_json['result'] = logpush_job_pack_model + + # Construct a model instance of LogpushJobsResp by calling from_dict on the json representation + logpush_jobs_resp_model = LogpushJobsResp.from_dict(logpush_jobs_resp_model_json) + assert logpush_jobs_resp_model != False + + # Construct a model instance of LogpushJobsResp by calling from_dict on the json representation + logpush_jobs_resp_model_dict = LogpushJobsResp.from_dict(logpush_jobs_resp_model_json).__dict__ + logpush_jobs_resp_model2 = LogpushJobsResp(**logpush_jobs_resp_model_dict) + + # Verify the model instances are equivalent + assert logpush_jobs_resp_model == logpush_jobs_resp_model2 + + # Convert model instance back to dict and verify no loss of data + logpush_jobs_resp_model_json2 = logpush_jobs_resp_model.to_dict() + assert logpush_jobs_resp_model_json2 == logpush_jobs_resp_model_json + + +class TestModel_OwnershipChallengeResp: + """ + Test Class for OwnershipChallengeResp + """ + + def test_ownership_challenge_resp_serialization(self): + """ + Test serialization/deserialization for OwnershipChallengeResp + """ + + # Construct dict forms of any model objects needed in order to build this model. + + ownership_challenge_result_model = {} # OwnershipChallengeResult + ownership_challenge_result_model['filename'] = 'logs/challenge-filename.txt' + ownership_challenge_result_model['valid'] = True + ownership_challenge_result_model['messages'] = 'testString' + + # Construct a json representation of a OwnershipChallengeResp model + ownership_challenge_resp_model_json = {} + ownership_challenge_resp_model_json['success'] = True + ownership_challenge_resp_model_json['errors'] = [['testString']] + ownership_challenge_resp_model_json['messages'] = [['testString']] + ownership_challenge_resp_model_json['result'] = ownership_challenge_result_model + + # Construct a model instance of OwnershipChallengeResp by calling from_dict on the json representation + ownership_challenge_resp_model = OwnershipChallengeResp.from_dict(ownership_challenge_resp_model_json) + assert ownership_challenge_resp_model != False + + # Construct a model instance of OwnershipChallengeResp by calling from_dict on the json representation + ownership_challenge_resp_model_dict = OwnershipChallengeResp.from_dict(ownership_challenge_resp_model_json).__dict__ + ownership_challenge_resp_model2 = OwnershipChallengeResp(**ownership_challenge_resp_model_dict) + + # Verify the model instances are equivalent + assert ownership_challenge_resp_model == ownership_challenge_resp_model2 + + # Convert model instance back to dict and verify no loss of data + ownership_challenge_resp_model_json2 = ownership_challenge_resp_model.to_dict() + assert ownership_challenge_resp_model_json2 == ownership_challenge_resp_model_json + + +class TestModel_OwnershipChallengeResult: + """ + Test Class for OwnershipChallengeResult + """ + + def test_ownership_challenge_result_serialization(self): + """ + Test serialization/deserialization for OwnershipChallengeResult + """ + + # Construct a json representation of a OwnershipChallengeResult model + ownership_challenge_result_model_json = {} + ownership_challenge_result_model_json['filename'] = 'logs/challenge-filename.txt' + ownership_challenge_result_model_json['valid'] = True + ownership_challenge_result_model_json['messages'] = 'testString' + + # Construct a model instance of OwnershipChallengeResult by calling from_dict on the json representation + ownership_challenge_result_model = OwnershipChallengeResult.from_dict(ownership_challenge_result_model_json) + assert ownership_challenge_result_model != False + + # Construct a model instance of OwnershipChallengeResult by calling from_dict on the json representation + ownership_challenge_result_model_dict = OwnershipChallengeResult.from_dict(ownership_challenge_result_model_json).__dict__ + ownership_challenge_result_model2 = OwnershipChallengeResult(**ownership_challenge_result_model_dict) + + # Verify the model instances are equivalent + assert ownership_challenge_result_model == ownership_challenge_result_model2 + + # Convert model instance back to dict and verify no loss of data + ownership_challenge_result_model_json2 = ownership_challenge_result_model.to_dict() + assert ownership_challenge_result_model_json2 == ownership_challenge_result_model_json + + +class TestModel_OwnershipChallengeValidateResult: + """ + Test Class for OwnershipChallengeValidateResult + """ + + def test_ownership_challenge_validate_result_serialization(self): + """ + Test serialization/deserialization for OwnershipChallengeValidateResult + """ + + # Construct a json representation of a OwnershipChallengeValidateResult model + ownership_challenge_validate_result_model_json = {} + ownership_challenge_validate_result_model_json['valid'] = True + + # Construct a model instance of OwnershipChallengeValidateResult by calling from_dict on the json representation + ownership_challenge_validate_result_model = OwnershipChallengeValidateResult.from_dict(ownership_challenge_validate_result_model_json) + assert ownership_challenge_validate_result_model != False + + # Construct a model instance of OwnershipChallengeValidateResult by calling from_dict on the json representation + ownership_challenge_validate_result_model_dict = OwnershipChallengeValidateResult.from_dict(ownership_challenge_validate_result_model_json).__dict__ + ownership_challenge_validate_result_model2 = OwnershipChallengeValidateResult(**ownership_challenge_validate_result_model_dict) + + # Verify the model instances are equivalent + assert ownership_challenge_validate_result_model == ownership_challenge_validate_result_model2 + + # Convert model instance back to dict and verify no loss of data + ownership_challenge_validate_result_model_json2 = ownership_challenge_validate_result_model.to_dict() + assert ownership_challenge_validate_result_model_json2 == ownership_challenge_validate_result_model_json + + +class TestModel_CreateLogpushJobV2RequestLogpushJobCosReq: + """ + Test Class for CreateLogpushJobV2RequestLogpushJobCosReq + """ + + def test_create_logpush_job_v2_request_logpush_job_cos_req_serialization(self): + """ + Test serialization/deserialization for CreateLogpushJobV2RequestLogpushJobCosReq + """ + + # Construct a json representation of a CreateLogpushJobV2RequestLogpushJobCosReq model + create_logpush_job_v2_request_logpush_job_cos_req_model_json = {} + create_logpush_job_v2_request_logpush_job_cos_req_model_json['name'] = 'My log push job' + create_logpush_job_v2_request_logpush_job_cos_req_model_json['enabled'] = False + create_logpush_job_v2_request_logpush_job_cos_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + create_logpush_job_v2_request_logpush_job_cos_req_model_json['cos'] = {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + create_logpush_job_v2_request_logpush_job_cos_req_model_json['ownership_challenge'] = '00000000000000000000000000000000' + create_logpush_job_v2_request_logpush_job_cos_req_model_json['dataset'] = 'http_requests' + create_logpush_job_v2_request_logpush_job_cos_req_model_json['frequency'] = 'high' + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobCosReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_cos_req_model = CreateLogpushJobV2RequestLogpushJobCosReq.from_dict(create_logpush_job_v2_request_logpush_job_cos_req_model_json) + assert create_logpush_job_v2_request_logpush_job_cos_req_model != False + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobCosReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_cos_req_model_dict = CreateLogpushJobV2RequestLogpushJobCosReq.from_dict(create_logpush_job_v2_request_logpush_job_cos_req_model_json).__dict__ + create_logpush_job_v2_request_logpush_job_cos_req_model2 = CreateLogpushJobV2RequestLogpushJobCosReq(**create_logpush_job_v2_request_logpush_job_cos_req_model_dict) + + # Verify the model instances are equivalent + assert create_logpush_job_v2_request_logpush_job_cos_req_model == create_logpush_job_v2_request_logpush_job_cos_req_model2 + + # Convert model instance back to dict and verify no loss of data + create_logpush_job_v2_request_logpush_job_cos_req_model_json2 = create_logpush_job_v2_request_logpush_job_cos_req_model.to_dict() + assert create_logpush_job_v2_request_logpush_job_cos_req_model_json2 == create_logpush_job_v2_request_logpush_job_cos_req_model_json + + +class TestModel_CreateLogpushJobV2RequestLogpushJobGenericReq: + """ + Test Class for CreateLogpushJobV2RequestLogpushJobGenericReq + """ + + def test_create_logpush_job_v2_request_logpush_job_generic_req_serialization(self): + """ + Test serialization/deserialization for CreateLogpushJobV2RequestLogpushJobGenericReq + """ + + # Construct a json representation of a CreateLogpushJobV2RequestLogpushJobGenericReq model + create_logpush_job_v2_request_logpush_job_generic_req_model_json = {} + create_logpush_job_v2_request_logpush_job_generic_req_model_json['name'] = 'My log push job' + create_logpush_job_v2_request_logpush_job_generic_req_model_json['enabled'] = False + create_logpush_job_v2_request_logpush_job_generic_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + create_logpush_job_v2_request_logpush_job_generic_req_model_json['destination_conf'] = 's3://mybucket/logs?region=us-west-2' + create_logpush_job_v2_request_logpush_job_generic_req_model_json['dataset'] = 'http_requests' + create_logpush_job_v2_request_logpush_job_generic_req_model_json['frequency'] = 'high' + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobGenericReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_generic_req_model = CreateLogpushJobV2RequestLogpushJobGenericReq.from_dict(create_logpush_job_v2_request_logpush_job_generic_req_model_json) + assert create_logpush_job_v2_request_logpush_job_generic_req_model != False + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobGenericReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_generic_req_model_dict = CreateLogpushJobV2RequestLogpushJobGenericReq.from_dict(create_logpush_job_v2_request_logpush_job_generic_req_model_json).__dict__ + create_logpush_job_v2_request_logpush_job_generic_req_model2 = CreateLogpushJobV2RequestLogpushJobGenericReq(**create_logpush_job_v2_request_logpush_job_generic_req_model_dict) + + # Verify the model instances are equivalent + assert create_logpush_job_v2_request_logpush_job_generic_req_model == create_logpush_job_v2_request_logpush_job_generic_req_model2 + + # Convert model instance back to dict and verify no loss of data + create_logpush_job_v2_request_logpush_job_generic_req_model_json2 = create_logpush_job_v2_request_logpush_job_generic_req_model.to_dict() + assert create_logpush_job_v2_request_logpush_job_generic_req_model_json2 == create_logpush_job_v2_request_logpush_job_generic_req_model_json + + +class TestModel_CreateLogpushJobV2RequestLogpushJobIbmclReq: + """ + Test Class for CreateLogpushJobV2RequestLogpushJobIbmclReq + """ + + def test_create_logpush_job_v2_request_logpush_job_ibmcl_req_serialization(self): + """ + Test serialization/deserialization for CreateLogpushJobV2RequestLogpushJobIbmclReq + """ + + # Construct dict forms of any model objects needed in order to build this model. + + logpush_job_ibmcl_req_ibmcl_model = {} # LogpushJobIbmclReqIbmcl + logpush_job_ibmcl_req_ibmcl_model['instance_id'] = '90d208cc-e1dd-4fb2-a938-358e5996f056' + logpush_job_ibmcl_req_ibmcl_model['region'] = 'eu-es' + logpush_job_ibmcl_req_ibmcl_model['api_key'] = 'XXXXXXXXXXXXXX' + + # Construct a json representation of a CreateLogpushJobV2RequestLogpushJobIbmclReq model + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json = {} + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json['name'] = 'My log push job' + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json['enabled'] = False + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json['ibmcl'] = logpush_job_ibmcl_req_ibmcl_model + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json['dataset'] = 'http_requests' + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json['frequency'] = 'high' + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobIbmclReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_ibmcl_req_model = CreateLogpushJobV2RequestLogpushJobIbmclReq.from_dict(create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json) + assert create_logpush_job_v2_request_logpush_job_ibmcl_req_model != False + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobIbmclReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_dict = CreateLogpushJobV2RequestLogpushJobIbmclReq.from_dict(create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json).__dict__ + create_logpush_job_v2_request_logpush_job_ibmcl_req_model2 = CreateLogpushJobV2RequestLogpushJobIbmclReq(**create_logpush_job_v2_request_logpush_job_ibmcl_req_model_dict) + + # Verify the model instances are equivalent + assert create_logpush_job_v2_request_logpush_job_ibmcl_req_model == create_logpush_job_v2_request_logpush_job_ibmcl_req_model2 + + # Convert model instance back to dict and verify no loss of data + create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json2 = create_logpush_job_v2_request_logpush_job_ibmcl_req_model.to_dict() + assert create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json2 == create_logpush_job_v2_request_logpush_job_ibmcl_req_model_json + + +class TestModel_CreateLogpushJobV2RequestLogpushJobLogdnaReq: + """ + Test Class for CreateLogpushJobV2RequestLogpushJobLogdnaReq + """ + + def test_create_logpush_job_v2_request_logpush_job_logdna_req_serialization(self): + """ + Test serialization/deserialization for CreateLogpushJobV2RequestLogpushJobLogdnaReq + """ + + # Construct a json representation of a CreateLogpushJobV2RequestLogpushJobLogdnaReq model + create_logpush_job_v2_request_logpush_job_logdna_req_model_json = {} + create_logpush_job_v2_request_logpush_job_logdna_req_model_json['name'] = 'My log push job' + create_logpush_job_v2_request_logpush_job_logdna_req_model_json['enabled'] = False + create_logpush_job_v2_request_logpush_job_logdna_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + create_logpush_job_v2_request_logpush_job_logdna_req_model_json['logdna'] = {'ingress_key': '8aef12bcd5e5af42', 'region': 'us-south', 'hostname': 'www.example.com'} + create_logpush_job_v2_request_logpush_job_logdna_req_model_json['dataset'] = 'http_requests' + create_logpush_job_v2_request_logpush_job_logdna_req_model_json['frequency'] = 'high' + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobLogdnaReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_logdna_req_model = CreateLogpushJobV2RequestLogpushJobLogdnaReq.from_dict(create_logpush_job_v2_request_logpush_job_logdna_req_model_json) + assert create_logpush_job_v2_request_logpush_job_logdna_req_model != False + + # Construct a model instance of CreateLogpushJobV2RequestLogpushJobLogdnaReq by calling from_dict on the json representation + create_logpush_job_v2_request_logpush_job_logdna_req_model_dict = CreateLogpushJobV2RequestLogpushJobLogdnaReq.from_dict(create_logpush_job_v2_request_logpush_job_logdna_req_model_json).__dict__ + create_logpush_job_v2_request_logpush_job_logdna_req_model2 = CreateLogpushJobV2RequestLogpushJobLogdnaReq(**create_logpush_job_v2_request_logpush_job_logdna_req_model_dict) + + # Verify the model instances are equivalent + assert create_logpush_job_v2_request_logpush_job_logdna_req_model == create_logpush_job_v2_request_logpush_job_logdna_req_model2 + + # Convert model instance back to dict and verify no loss of data + create_logpush_job_v2_request_logpush_job_logdna_req_model_json2 = create_logpush_job_v2_request_logpush_job_logdna_req_model.to_dict() + assert create_logpush_job_v2_request_logpush_job_logdna_req_model_json2 == create_logpush_job_v2_request_logpush_job_logdna_req_model_json + + +class TestModel_UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq: + """ + Test Class for UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq + """ + + def test_update_logpush_job_v2_request_logpush_jobs_update_cos_req_serialization(self): + """ + Test serialization/deserialization for UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq + """ + + # Construct a json representation of a UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq model + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json = {} + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json['enabled'] = False + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json['cos'] = {'bucket_name': 'cos-bucket001', 'region': 'us-south', 'id': '231f5467-3072-4cb9-9e39-a906fa3032ea'} + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json['ownership_challenge'] = '00000000000000000000000000000000' + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json['frequency'] = 'high' + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model = UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json) + assert update_logpush_job_v2_request_logpush_jobs_update_cos_req_model != False + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_dict = UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json).__dict__ + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model2 = UpdateLogpushJobV2RequestLogpushJobsUpdateCosReq(**update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_dict) + + # Verify the model instances are equivalent + assert update_logpush_job_v2_request_logpush_jobs_update_cos_req_model == update_logpush_job_v2_request_logpush_jobs_update_cos_req_model2 + + # Convert model instance back to dict and verify no loss of data + update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json2 = update_logpush_job_v2_request_logpush_jobs_update_cos_req_model.to_dict() + assert update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json2 == update_logpush_job_v2_request_logpush_jobs_update_cos_req_model_json + + +class TestModel_UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq: + """ + Test Class for UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq + """ + + def test_update_logpush_job_v2_request_logpush_jobs_update_generic_req_serialization(self): + """ + Test serialization/deserialization for UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq + """ + + # Construct a json representation of a UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq model + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json = {} + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json['name'] = 'My log push job' + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json['enabled'] = False + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json['destination_conf'] = 's3://mybucket/logs?region=us-west-2' + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json['dataset'] = 'http_requests' + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json['frequency'] = 'high' + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model = UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json) + assert update_logpush_job_v2_request_logpush_jobs_update_generic_req_model != False + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_dict = UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json).__dict__ + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model2 = UpdateLogpushJobV2RequestLogpushJobsUpdateGenericReq(**update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_dict) + + # Verify the model instances are equivalent + assert update_logpush_job_v2_request_logpush_jobs_update_generic_req_model == update_logpush_job_v2_request_logpush_jobs_update_generic_req_model2 + + # Convert model instance back to dict and verify no loss of data + update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json2 = update_logpush_job_v2_request_logpush_jobs_update_generic_req_model.to_dict() + assert update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json2 == update_logpush_job_v2_request_logpush_jobs_update_generic_req_model_json + + +class TestModel_UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq: + """ + Test Class for UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq + """ + + def test_update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_serialization(self): + """ + Test serialization/deserialization for UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq + """ + + # Construct dict forms of any model objects needed in order to build this model. + + logpush_jobs_update_ibmcl_req_ibmcl_model = {} # LogpushJobsUpdateIbmclReqIbmcl + logpush_jobs_update_ibmcl_req_ibmcl_model['instance_id'] = '90d208cc-e1dd-4fb2-a938-358e5996f056' + logpush_jobs_update_ibmcl_req_ibmcl_model['region'] = 'eu-es' + logpush_jobs_update_ibmcl_req_ibmcl_model['api_key'] = 'XXXXXXXXXXXXXX' + + # Construct a json representation of a UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq model + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json = {} + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json['enabled'] = False + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json['ibmcl'] = logpush_jobs_update_ibmcl_req_ibmcl_model + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json['frequency'] = 'high' + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model = UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json) + assert update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model != False + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_dict = UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json).__dict__ + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model2 = UpdateLogpushJobV2RequestLogpushJobsUpdateIbmclReq(**update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_dict) + + # Verify the model instances are equivalent + assert update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model == update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model2 + + # Convert model instance back to dict and verify no loss of data + update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json2 = update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model.to_dict() + assert update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json2 == update_logpush_job_v2_request_logpush_jobs_update_ibmcl_req_model_json + + +class TestModel_UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq: + """ + Test Class for UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq + """ + + def test_update_logpush_job_v2_request_logpush_jobs_update_logdna_req_serialization(self): + """ + Test serialization/deserialization for UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq + """ + + # Construct a json representation of a UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq model + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json = {} + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json['enabled'] = False + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json['logpull_options'] = 'timestamps=rfc3339×tamps=rfc3339' + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json['logdna'] = {'ingress_key': '8aef12bcd5e5af42', 'region': 'us-south', 'hostname': 'www.example.com'} + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json['frequency'] = 'high' + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model = UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json) + assert update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model != False + + # Construct a model instance of UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq by calling from_dict on the json representation + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_dict = UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq.from_dict(update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json).__dict__ + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model2 = UpdateLogpushJobV2RequestLogpushJobsUpdateLogdnaReq(**update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_dict) + + # Verify the model instances are equivalent + assert update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model == update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model2 + + # Convert model instance back to dict and verify no loss of data + update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json2 = update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model.to_dict() + assert update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json2 == update_logpush_job_v2_request_logpush_jobs_update_logdna_req_model_json + + +# endregion +############################################################################## +# End of Model Tests +##############################################################################