|
12 | 12 | import yaml |
13 | 13 | from openshift.dynamic import DynamicClient |
14 | 14 | from openshift.dynamic.exceptions import NotFoundError |
| 15 | +import boto3 |
| 16 | +from botocore.exceptions import ClientError, NoCredentialsError |
15 | 17 |
|
16 | 18 | logger = logging.getLogger(name=__name__) |
17 | 19 |
|
@@ -235,3 +237,85 @@ def backupResources(dynClient: DynamicClient, kind: str, api_version: str, backu |
235 | 237 | logger.error(f"Error backing up {kind} resources: {e}") |
236 | 238 | failed_count = 1 |
237 | 239 | return (backed_up_count, not_found_count, failed_count, discovered_secrets) |
| 240 | + |
| 241 | + |
| 242 | + |
| 243 | +def uploadToS3( |
| 244 | + file_path: str, |
| 245 | + bucket_name: str, |
| 246 | + object_name=None, |
| 247 | + endpoint_url=None, |
| 248 | + aws_access_key_id=None, |
| 249 | + aws_secret_access_key=None, |
| 250 | + region_name=None |
| 251 | +) -> bool: |
| 252 | + """ |
| 253 | + Upload a tar.gz file to S3-compatible storage. |
| 254 | +
|
| 255 | + Args: |
| 256 | + file_path: Path to the tar.gz file to upload |
| 257 | + bucket_name: Name of the S3 bucket |
| 258 | + object_name: S3 object name. If not specified, file_path basename is used |
| 259 | + endpoint_url: S3-compatible endpoint URL (e.g., for MinIO, Ceph) |
| 260 | + aws_access_key_id: AWS access key ID (if not using environment variables) |
| 261 | + aws_secret_access_key: AWS secret access key (if not using environment variables) |
| 262 | + region_name: AWS region name (default: us-east-1) |
| 263 | +
|
| 264 | + Returns: |
| 265 | + bool: True if file was uploaded successfully, False otherwise |
| 266 | + """ |
| 267 | + # If S3 object_name was not specified, use file_path basename |
| 268 | + if object_name is None: |
| 269 | + object_name = os.path.basename(file_path) |
| 270 | + |
| 271 | + # Validate file exists and is a tar.gz file |
| 272 | + if not os.path.exists(file_path): |
| 273 | + logger.error(f"File not found: {file_path}") |
| 274 | + return False |
| 275 | + |
| 276 | + if not file_path.endswith('.tar.gz'): |
| 277 | + logger.warning(f"File does not have .tar.gz extension: {file_path}") |
| 278 | + |
| 279 | + # Configure S3 client |
| 280 | + try: |
| 281 | + s3_config = {} |
| 282 | + |
| 283 | + if endpoint_url: |
| 284 | + s3_config['endpoint_url'] = endpoint_url |
| 285 | + |
| 286 | + if aws_access_key_id and aws_secret_access_key: |
| 287 | + s3_config['aws_access_key_id'] = aws_access_key_id |
| 288 | + s3_config['aws_secret_access_key'] = aws_secret_access_key |
| 289 | + |
| 290 | + if region_name: |
| 291 | + s3_config['region_name'] = region_name |
| 292 | + else: |
| 293 | + s3_config['region_name'] = 'us-east-1' |
| 294 | + |
| 295 | + s3_client = boto3.client('s3', **s3_config) |
| 296 | + |
| 297 | + # Upload the file |
| 298 | + logger.info(f"Uploading {file_path} to s3://{bucket_name}/{object_name}") |
| 299 | + |
| 300 | + file_size = os.path.getsize(file_path) |
| 301 | + logger.info(f"File size: {file_size / (1024 * 1024):.2f} MB") |
| 302 | + |
| 303 | + s3_client.upload_file(file_path, bucket_name, object_name) |
| 304 | + |
| 305 | + logger.info(f"Successfully uploaded {file_path} to s3://{bucket_name}/{object_name}") |
| 306 | + return True |
| 307 | + |
| 308 | + except FileNotFoundError: |
| 309 | + logger.error(f"File not found: {file_path}") |
| 310 | + return False |
| 311 | + except NoCredentialsError: |
| 312 | + logger.error("AWS credentials not found. Please provide credentials or configure environment variables.") |
| 313 | + return False |
| 314 | + except ClientError as e: |
| 315 | + error_code = e.response.get('Error', {}).get('Code', 'Unknown') |
| 316 | + error_message = e.response.get('Error', {}).get('Message', str(e)) |
| 317 | + logger.error(f"S3 client error ({error_code}): {error_message}") |
| 318 | + return False |
| 319 | + except Exception as e: |
| 320 | + logger.error(f"Unexpected error uploading to S3: {e}") |
| 321 | + return False |
0 commit comments