diff --git a/.github/workflows/build-deployment-zip.yaml b/.github/workflows/build-deployment-zip.yaml new file mode 100644 index 0000000..e0bf05f --- /dev/null +++ b/.github/workflows/build-deployment-zip.yaml @@ -0,0 +1,59 @@ +name: Build Lambda Deployment Package + +on: + release: + types: [published] + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: write # Required to upload assets to releases + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Create dependencies directory + run: mkdir -p dependencies + + - name: Install dependencies + run: | + pip install --target ./dependencies \ + boto3 \ + google-api-python-client \ + google-auth \ + google-auth-oauthlib \ + google-auth-httplib2 + + - name: Copy lambda function and chalicelib + run: | + cp lambda_function.py dependencies/ + cp -r chalicelib dependencies/ + + - name: Create deployment package + run: | + cd dependencies + zip -r ../deployment.zip . + cd .. + + - name: Upload deployment package as artifact + uses: actions/upload-artifact@v4 + with: + name: lambda-deployment-package + path: deployment.zip + retention-days: 90 + + - name: Upload deployment package to release + if: github.event_name == 'release' + uses: softprops/action-gh-release@v1 + with: + files: deployment.zip + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 4e2df48..888cdc4 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,7 @@ __pycache__/ *.bak policy.json + +# Deployment artifacts +deployment.zip +dependencies/ diff --git a/DEPLOYMENT.md b/DEPLOYMENT.md new file mode 100644 index 0000000..0c3c319 --- /dev/null +++ b/DEPLOYMENT.md @@ -0,0 +1,361 @@ +# AWS Lambda Deployment Guide + +This guide provides detailed instructions for deploying the YouTube streaming Lambda function directly to AWS without using Chalice. + +## Prerequisites + +Before you begin, ensure you have: + +1. **AWS Account** with permissions to: + - Create Lambda functions + - Create and manage S3 buckets + - Create IAM roles and policies + - (Optional) Create Lambda Function URLs + +2. **YouTube Data API v3 Credentials**: + - A Google Cloud Project with YouTube Data API v3 enabled + - OAuth 2.0 credentials configured for desktop application + - A valid `token.pickle` file (generated after OAuth flow) + +3. **S3 Bucket** for storing the YouTube API token + +## Step 1: Obtain the Deployment Package + +### Option A: Download from GitHub Releases (Recommended) + +1. Go to the [Releases page](https://github.com/AccelerationConsortium/streamingLambda/releases) +2. Download the latest `deployment.zip` file + +### Option B: Build Locally + +1. Clone the repository: + ```bash + git clone https://github.com/AccelerationConsortium/streamingLambda.git + cd streamingLambda + ``` + +2. Run the build script: + ```bash + ./build-deployment-zip.sh + ``` + + This will create `deployment.zip` (~40MB) in the current directory. + +## Step 2: Set Up S3 Bucket for YouTube Token + +1. **Create an S3 bucket**: + - Go to [S3 Console](https://console.aws.amazon.com/s3) + - Click "Create bucket" + - Choose a unique name (e.g., `my-org-youtube-token`) + - Select your preferred region + - Keep default settings for now + - Click "Create bucket" + +2. **Upload your token.pickle**: + - Navigate to your bucket + - Create a folder named `token` + - Upload your `token.pickle` file to the `token/` folder + - The full path should be: `s3://your-bucket-name/token/token.pickle` + +3. **Note the bucket name** - you'll need it for configuration + +## Step 3: Create IAM Role for Lambda + +1. **Go to [IAM Console](https://console.aws.amazon.com/iam)** + +2. **Create a new role**: + - Click "Roles" → "Create role" + - Select "AWS service" → "Lambda" + - Click "Next" + +3. **Attach policies**: + - Search and attach: `AWSLambdaBasicExecutionRole` (for CloudWatch Logs) + - Click "Next" + +4. **Create custom policy for S3 access**: + - Click "Create policy" (opens new tab) + - Switch to JSON tab and paste: + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject" + ], + "Resource": "arn:aws:s3:::YOUR-BUCKET-NAME/token/token.pickle" + } + ] + } + ``` + - Replace `YOUR-BUCKET-NAME` with your actual bucket name + - Click "Next: Tags" → "Next: Review" + - Name it `youtube-lambda-s3-access` + - Click "Create policy" + +5. **Return to role creation tab**: + - Refresh the policies list + - Search for and attach `youtube-lambda-s3-access` + - Click "Next" + +6. **Name the role**: + - Role name: `youtube-lambda-role` + - Click "Create role" + +## Step 4: Create Lambda Function + +1. **Go to [Lambda Console](https://console.aws.amazon.com/lambda)** + +2. **Create function**: + - Click "Create function" + - Select "Author from scratch" + - Function name: `youtube-stream` (or your preferred name) + - Runtime: **Python 3.11** + - Architecture: x86_64 + - Under "Change default execution role": + - Select "Use an existing role" + - Choose `youtube-lambda-role` (created in Step 3) + - Click "Create function" + +3. **Upload deployment package**: + - In the "Code" tab, click "Upload from" → ".zip file" + - Click "Upload" and select your `deployment.zip` + - Click "Save" + - Wait for the upload to complete (may take a minute for 40MB file) + +4. **Configure function settings**: + - Click on "Configuration" tab → "General configuration" → "Edit" + - **Memory**: 1024 MB (recommended) + - **Timeout**: 1 minute (60 seconds) + - Click "Save" + +## Step 5: Configure Environment (if needed) + +If your S3 bucket name or token path differs from the defaults, you need to update the configuration: + +### Default values in the code: +- S3 Bucket: `ac-token-youtube-api` +- S3 Key: `token/token.pickle` +- Channel ID: `UCHBzCfYpGwoqygH9YNh9A6g` + +### To customize: + +1. Download and extract `deployment.zip` +2. Edit `chalicelib/ytb_api_utils.py`: + ```python + # Lines 9-13 + CHANNEL_ID = "your-youtube-channel-id" # Optional: your channel ID + S3_BUCKET = "your-bucket-name" # Your S3 bucket name + S3_KEY = "token/token.pickle" # Path to token in S3 + ``` +3. Rebuild the deployment package: + ```bash + cd dependencies + zip -r ../deployment.zip . + cd .. + ``` +4. Upload the new `deployment.zip` to Lambda + +## Step 6: Create Function URL (Optional but Recommended) + +To enable HTTP access to your Lambda function: + +1. **In Lambda Console**, go to your function +2. Click "Configuration" → "Function URL" +3. Click "Create function URL" +4. **Auth type**: + - `NONE` - Public access (simpler, less secure) + - `AWS_IAM` - Requires AWS credentials (more secure) +5. Click "Save" +6. **Copy the Function URL** - you'll use this with your monitoring device + +Example Function URL: +``` +https://abcdefg123456.lambda-url.us-east-1.on.aws/ +``` + +## Step 7: Test the Function + +1. **In Lambda Console**, click "Test" tab +2. **Create new test event**: + - Event name: `test-create-stream` + - Event JSON: + ```json + { + "body": { + "action": "create", + "cam_name": "TestCamera", + "workflow_name": "TestWorkflow", + "privacy_status": "private" + } + } + ``` +3. Click "Save" +4. Click "Test" +5. Check the execution results: + - Success: You should see a 200 status code with stream details + - Failure: Check CloudWatch Logs for error details + +## Step 8: Test via HTTP (if using Function URL) + +Using curl: +```bash +curl -X POST https://YOUR-FUNCTION-URL/ \ + -H "Content-Type: application/json" \ + -d '{ + "body": { + "action": "create", + "cam_name": "Camera1", + "workflow_name": "MyWorkflow", + "privacy_status": "private" + } + }' +``` + +## API Reference + +### Request Format + +```json +{ + "body": { + "action": "create" | "end", + "cam_name": "string", + "workflow_name": "string", + "privacy_status": "public" | "private" | "unlisted" + } +} +``` + +### Parameters + +- **action** (required): + - `"create"` - Creates a new YouTube live broadcast + - `"end"` - Ends active broadcasts for the workflow + +- **cam_name** (optional): Name of the camera/device (default: "UnknownCam") + +- **workflow_name** (required): Identifier for your workflow. Used to: + - Group related streams + - Create/find playlists + - End specific broadcasts + +- **privacy_status** (optional): Video privacy setting (default: "private") + - `"public"` - Anyone can watch + - `"private"` - Only you can watch + - `"unlisted"` - Anyone with the link can watch + +### Response Format + +#### Success Response (action: create) +```json +{ + "statusCode": 200, + "body": { + "status": "created", + "result": { + "broadcast_id": "...", + "video_id": "...", + "stream_id": "...", + "playlist_id": "...", + "title": "...", + "privacy_status": "private", + "ffmpeg_url": "rtmp://...", + "video_url": "https://www.youtube.com/watch?v=...", + "playlist_add_status": "added" + } + } +} +``` + +#### Success Response (action: end) +```json +{ + "statusCode": 200, + "body": { + "status": "ended", + "message": "WorkflowName ended successfully" + } +} +``` + +#### Error Response +```json +{ + "statusCode": 400, + "body": { + "error": "Error description" + } +} +``` + +## Monitoring and Logs + +1. **CloudWatch Logs**: + - Go to [CloudWatch Console](https://console.aws.amazon.com/cloudwatch) + - Navigate to "Logs" → "Log groups" + - Find `/aws/lambda/youtube-stream` (or your function name) + - View recent invocations and error messages + +2. **Lambda Metrics**: + - In Lambda Console, click "Monitor" tab + - View invocation count, duration, errors, and throttles + +## Troubleshooting + +### Common Issues + +**1. "No valid credentials available"** +- Check that `token.pickle` exists in S3 at the correct path +- Verify the Lambda role has S3 read/write permissions +- Ensure the token hasn't been revoked in Google Cloud Console + +**2. "Access Denied" to S3** +- Verify the IAM role has the correct S3 permissions +- Check bucket name and key path match the code configuration + +**3. "Task timed out after 3.00 seconds"** +- Increase Lambda timeout to 60 seconds (see Step 4) +- Check network connectivity to YouTube API + +**4. "Invalid JSON or internal error"** +- Verify request format matches the API reference +- Check CloudWatch Logs for detailed error messages + +### Getting Help + +- **GitHub Issues**: [Report a bug](https://github.com/AccelerationConsortium/streamingLambda/issues) +- **Documentation**: [AC Training Lab](https://ac-training-lab.readthedocs.io/en/latest/devices/picam.html) +- **YouTube API**: [YouTube Data API v3 Docs](https://developers.google.com/youtube/v3) + +## Security Best Practices + +1. **Use IAM authentication** for Function URLs when possible +2. **Restrict S3 bucket access** to only the Lambda role +3. **Enable CloudTrail** to audit Lambda invocations +4. **Rotate credentials** regularly in Google Cloud Console +5. **Use private** or **unlisted** privacy status for sensitive streams +6. **Monitor costs** - YouTube API has quota limits + +## Updating the Function + +To update your Lambda function with new code: + +1. Download/build the latest `deployment.zip` +2. Go to Lambda Console → your function +3. Click "Upload from" → ".zip file" +4. Select the new `deployment.zip` +5. Click "Save" + +The function will be updated without downtime. + +## Cost Considerations + +- **Lambda**: First 1M requests/month are free, then $0.20 per 1M requests +- **S3**: Minimal costs for storing one small file +- **Data Transfer**: Charges may apply for data transfer out +- **YouTube API**: Free tier includes 10,000 quota units/day + +For most use cases, costs should be less than $1/month. diff --git a/README.md b/README.md index 454cca6..8773241 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,98 @@ -# YouTube Streaming Lambda Function (deployed via Chalice) +# YouTube Streaming Lambda Function -This repository deploys an AWS Lambda function using [AWS Chalice](https://github.com/aws/chalice) for automatic YouTube livestream creation and management from [monitoring device](https://ac-training-lab.readthedocs.io/en/latest/devices/picam.html). +This repository provides an AWS Lambda function for automatic YouTube livestream creation and management from [monitoring devices](https://ac-training-lab.readthedocs.io/en/latest/devices/picam.html). + +## Deployment Options + +### Option 1: Direct AWS Lambda Deployment (Recommended for external users) + +This method allows you to deploy the Lambda function directly to AWS without requiring Chalice or any special deployment tools. Perfect for users outside the Acceleration Consortium who want to set up their own YouTube streaming Lambda function. + +📖 **[See DEPLOYMENT.md for detailed step-by-step instructions](DEPLOYMENT.md)** + +#### Quick Start + +1. **Download the deployment package** + + Go to the [Releases](../../releases) page and download the latest `deployment.zip` file. + + Or build it locally: + ```bash + git clone https://github.com/AccelerationConsortium/streamingLambda.git + cd streamingLambda + ./build-deployment-zip.sh + ``` + +2. **Upload to AWS Lambda and Configure** + + Follow the [detailed deployment guide](DEPLOYMENT.md) for complete instructions on: + - Creating IAM roles with proper permissions + - Uploading the deployment package + - Configuring function settings + - Setting up S3 bucket for YouTube token + - Creating a Function URL + - Testing the function + +3. **Use with your monitoring device** + + Once deployed, use the Lambda Function URL with your [PiCam device](https://ac-training-lab.readthedocs.io/en/latest/devices/picam.html). + +### Option 2: Deployment via Chalice (For AC organization) + +This method uses [AWS Chalice](https://github.com/aws/chalice) for automatic deployment and is primarily used by the Acceleration Consortium for internal deployments. + +#### Prerequisites + +- AWS credentials configured +- Python 3.11+ +- Chalice installed + +#### Steps + +1. Install dependencies: + ```bash + pip install -r requirements.txt + ``` + +2. Deploy to AWS: + ```bash + chalice deploy --stage dev + ``` + +## How It Works + +The Lambda function integrates with the YouTube Data API v3 to: + +1. **Create broadcasts**: Automatically creates a YouTube live broadcast with a stream key +2. **Manage streams**: Binds streams to broadcasts and configures settings +3. **End broadcasts**: Gracefully ends active broadcasts for a given workflow +4. **Organize content**: Adds broadcasts to workflow-specific playlists + +## Configuration + +### YouTube API Credentials + +The function requires a `token.pickle` file containing valid YouTube API credentials stored in an S3 bucket. This token is automatically refreshed when expired. + +### Environment Variables + +If you need to customize the S3 bucket or channel ID, modify the constants in `chalicelib/ytb_api_utils.py`: + +```python +CHANNEL_ID = "your-channel-id" +S3_BUCKET = "your-bucket-name" +S3_KEY = "token/token.pickle" +``` + +## Related Documentation + +- [AC Training Lab - PiCam Device Setup](https://ac-training-lab.readthedocs.io/en/latest/devices/picam.html) +- [AWS Lambda Python Package Documentation](https://docs.aws.amazon.com/lambda/latest/dg/python-package.html) +- [YouTube Data API v3](https://developers.google.com/youtube/v3) + +## Support + +For issues related to: +- **Deployment**: Open an issue in this repository +- **PiCam device setup**: See [AC Training Lab documentation](https://ac-training-lab.readthedocs.io/en/latest/devices/picam.html) +- **YouTube API**: Consult the [YouTube API documentation](https://developers.google.com/youtube/v3) diff --git a/build-deployment-zip.sh b/build-deployment-zip.sh new file mode 100755 index 0000000..9418934 --- /dev/null +++ b/build-deployment-zip.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +# Build script for creating AWS Lambda deployment package +# This script creates a deployment.zip file that can be uploaded directly to AWS Lambda + +set -e + +echo "Building AWS Lambda deployment package..." + +# Clean up any existing artifacts +echo "Cleaning up existing artifacts..." +rm -rf dependencies deployment.zip + +# Create dependencies directory +echo "Creating dependencies directory..." +mkdir -p dependencies + +# Install Python dependencies +echo "Installing Python dependencies..." +pip install --target ./dependencies \ + boto3 \ + google-api-python-client \ + google-auth \ + google-auth-oauthlib \ + google-auth-httplib2 + +# Copy lambda function and chalicelib +echo "Copying lambda function and chalicelib..." +cp lambda_function.py dependencies/ +cp -r chalicelib dependencies/ + +# Create deployment package +echo "Creating deployment.zip..." +cd dependencies +zip -q -r ../deployment.zip . +cd .. + +# Get the size of the deployment package +SIZE=$(du -h deployment.zip | cut -f1) +echo "✓ deployment.zip created successfully (${SIZE})" +echo "" +echo "You can now upload deployment.zip to AWS Lambda!" +echo "See README.md for detailed deployment instructions." diff --git a/lambda_function.py b/lambda_function.py new file mode 100644 index 0000000..b624119 --- /dev/null +++ b/lambda_function.py @@ -0,0 +1,77 @@ +import json +import logging +from chalicelib.ytb_api_utils import ( + init_youtube_service, + create_broadcast_and_bind_stream, + end_active_broadcasts_for_device +) + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +def lambda_handler(event, context): + """ + AWS Lambda handler function for YouTube streaming management. + + This function can be directly deployed to AWS Lambda without Chalice. + + Expected event payload: + { + "body": { + "action": "create" or "end", + "cam_name": "camera name", + "workflow_name": "workflow identifier", + "privacy_status": "public", "private", or "unlisted" (optional, default: "private") + } + } + """ + logger.info("Lambda handler invoked") + try: + body = event.get("body") + if isinstance(body, str): + payload = json.loads(body) + elif isinstance(body, dict): + payload = body + else: + raise ValueError("Invalid body format") + + logger.info(f"Received payload: {payload}") + + action = payload.get("action") + cam_name = payload.get("cam_name", "UnknownCam") + workflow_name = payload.get("workflow_name", "UnknownWorkflow") + privacy_status = payload.get("privacy_status", "private") + + if action not in ("create", "end"): + return { + "statusCode": 400, + "body": json.dumps({"error": "Invalid or missing 'action'. Must be 'create' or 'end'."}) + } + + init_youtube_service() + + if action == "create": + result = create_broadcast_and_bind_stream(cam_name, workflow_name, privacy_status) + return { + "statusCode": 200, + "body": json.dumps({"status": "created", "result": result}) + } + else: # action == "end" + end_active_broadcasts_for_device(workflow_name) + return { + "statusCode": 200, + "body": json.dumps({"status": "ended", "message": f"{workflow_name} ended successfully"}) + } + + except ValueError as ve: + logger.exception("Invalid input") + return { + "statusCode": 400, + "body": json.dumps({"error": f"Invalid input: {str(ve)}"}) + } + except Exception as e: + logger.exception("Error in lambda_handler") + return { + "statusCode": 500, + "body": json.dumps({"error": f"Internal server error: {str(e)}"}) + }