Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ jobs:
CLOUDOS_WORKSPACE_ID: ${{ secrets.CLOUDOS_WORKSPACE_ID_ADAPT }}
CLOUDOS_URL: "https://cloudos.lifebit.ai"
run: |
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID
echo 'q' | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID
job_list_filtering:
needs: job_run_and_status
runs-on: ubuntu-latest
Expand All @@ -81,15 +81,15 @@ jobs:
run: |
JOB_ID="${{ needs.job_run_and_status.outputs.job_id }}"
# Test filtering by status, project and workflow name
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-status completed --filter-project cloudos-cli-tests --filter-workflow GH-rnatoy --last --last-n-jobs 10
echo 'q' | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-status completed --filter-project cloudos-cli-tests --filter-workflow GH-rnatoy --last --last-n-jobs 10
# Test filtering job id
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-id $JOB_ID
echo 'q' | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-id $JOB_ID
# Test filtering job name
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-name "cloudos-cli-CI-test" --last-n-jobs 10
echo 'q' | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-name "cloudos-cli-CI-test" --last-n-jobs 10
# Test filtering by only mine
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-only-mine --last-n-jobs 10
echo 'q' | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-only-mine --last-n-jobs 10
# Test filtering by queue
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-queue "cost_saving_standard_nextflow" --last-n-jobs 10
echo 'q' | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-queue "cost_saving_standard_nextflow" --last-n-jobs 10
job_details:
needs: job_run_and_status
runs-on: ubuntu-latest
Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/ci_az.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
CLOUDOS_WORKSPACE_ID: ${{ secrets.CLOUDOS_WORKSPACE_ID_AZURE }}
CLOUDOS_URL: "https://dev.sdlc.lifebit.ai"
run: |
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID
job_list_filtering_az:
needs: job_run_and_status_az
runs-on: ubuntu-latest
Expand All @@ -62,15 +62,15 @@ jobs:
run: |
JOB_ID="${{ needs.job_run_and_status_az.outputs.job_id }}"
# Test filtering by status, project and workflow name
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-status completed --filter-project cloudos-cli-tests --filter-workflow GH-rnatoy --last --last-n-jobs 10
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-status completed --filter-project cloudos-cli-tests --filter-workflow GH-rnatoy --last --last-n-jobs 10
# Test filtering job id
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-id $JOB_ID
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-id $JOB_ID
# Test filtering job name
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-name "cloudos-cli-CI-test" --last-n-jobs 10
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-name "cloudos-cli-CI-test" --last-n-jobs 10
# Test filtering by only mine
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-only-mine --last-n-jobs 10
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-only-mine --last-n-jobs 10
# Test filtering by queue
#cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-queue "cost_saving_standard_nextflow" --last-n-jobs 10
#echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-queue "cost_saving_standard_nextflow" --last-n-jobs 10
job_details_az:
needs: job_run_and_status_az
runs-on: ubuntu-latest
Expand Down
12 changes: 6 additions & 6 deletions .github/workflows/ci_dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
CLOUDOS_WORKSPACE_ID: ${{ secrets.CLOUDOS_WORKSPACE_ID_DEV }}
CLOUDOS_URL: "https://dev.sdlc.lifebit.ai"
run: |
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID
job_list_filtering_dev:
needs: job_run_and_status_dev
runs-on: ubuntu-latest
Expand All @@ -62,15 +62,15 @@ jobs:
run: |
JOB_ID="${{ needs.job_run_and_status_dev.outputs.job_id }}"
# Test filtering by status, project and workflow name
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-status completed --filter-project cloudos-cli-tests --filter-workflow GH-rnatoy --last --last-n-jobs 10
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-status completed --filter-project cloudos-cli-tests --filter-workflow GH-rnatoy --last --last-n-jobs 10
# Test filtering job id
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-id $JOB_ID
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-id $JOB_ID
# Test filtering job name
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-name "cloudos-cli-CI-test" --last-n-jobs 10
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-job-name "cloudos-cli-CI-test" --last-n-jobs 10
# Test filtering by only mine
cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-only-mine --last-n-jobs 10
echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-only-mine --last-n-jobs 10
# Test filtering by queue
#cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-queue "cost_saving_standard_nextflow" --last-n-jobs 10
#echo q | cloudos job list --cloudos-url $CLOUDOS_URL --apikey $CLOUDOS_TOKEN --workspace-id $CLOUDOS_WORKSPACE_ID --filter-queue "cost_saving_standard_nextflow" --last-n-jobs 10
job_details_dev:
needs: job_run_and_status_dev
runs-on: ubuntu-latest
Expand Down
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
## lifebit-ai/cloudos-cli: changelog

## v2.87.0 (2026-03-30)

### Feat

- Adds system queues

## v2.86.0 (2026-03-23)

### Feat
Expand Down
12 changes: 9 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ Job queues are required for running jobs using AWS batch executor. The available

#### List Queues

This command allows you to view available computational queues and their configurations. You can get a summary of all available workspace job queues in three different output formats using the `--output-format` option:
This command allows you to view available computational queues and their configurations. By default, both regular workspace queues and system queues are displayed. You can get a summary of all available job queues in three different output formats using the `--output-format` option:

- **stdout** (default): Displays a rich formatted table directly in the terminal with pagination and visual formatting
- **csv**: Saves queue data to a CSV file with a selection of available queue information, or all information using the `--all-fields` flag
Expand All @@ -415,6 +415,12 @@ cloudos queue list --profile my_profile
cloudos queue list --profile my_profile --output-format stdout
```

To exclude system queues and show only workspace queues:

```bash
cloudos queue list --profile my_profile --exclude-system-queues
```

To save all available job queues in JSON format:

```bash
Expand All @@ -437,7 +443,7 @@ cloudos queue list --profile my_profile --output-format csv

**Job queues for platform workflows**

Platform workflows (those provided by CloudOS in your workspace as modules) run on separate and specific AWS batch queues. Therefore, CloudOS will automatically assign the valid queue and you should not specify any queue using the `--job-queue` parameter. Any attempt to use this parameter will be ignored. Examples of such platform workflows are "System Tools" and "Data Factory" workflows.
Platform workflows (those provided by CloudOS in your workspace as modules) run on separate and specific AWS batch queues (system queues). Therefore, CloudOS will automatically assign the valid queue and you should not specify any queue using the `--job-queue` parameter. Any attempt to use this parameter will be ignored. Examples of such platform workflows are "System Tools" and "Data Factory" workflows.


### Workflow
Expand Down Expand Up @@ -851,7 +857,7 @@ You can find specific jobs within your workspace using the filtering options. Fi
- **`--filter-job-id`**: Filter jobs by specific job ID (exact match required)
- **`--filter-only-mine`**: Show only jobs belonging to the current user
- **`--filter-owner`**: Show only jobs for the specified owner (exact match required, e.g., "John Doe")
- **`--filter-queue`**: Filter jobs by queue name (only applies to batch jobs)
- **`--filter-queue`**: Filter jobs by queue name (works with both regular and system queues; only applies to batch jobs)

**Filtering Examples**

Expand Down
2 changes: 1 addition & 1 deletion cloudos_cli/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.86.0'
__version__ = '2.87.0'
10 changes: 10 additions & 0 deletions cloudos_cli/clos.py
Original file line number Diff line number Diff line change
Expand Up @@ -1257,6 +1257,16 @@ def get_job_list(self, workspace_id, last_n_jobs=None, page=None, page_size=None
if use_pagination_mode and target_job_count != 'all' and isinstance(target_job_count, int) and target_job_count > 0:
all_jobs = all_jobs[:target_job_count]

# --- Adjust pagination metadata for client-side filtering ---
# When filter_queue is applied, we've fetched multiple API pages but we're showing
# the filtered results. Reset pagination to reflect the filtered view.
if filter_queue and last_pagination_metadata:
last_pagination_metadata = {
'Pagination-Count': len(all_jobs), # Total filtered jobs collected
'Pagination-Page': current_page, # Use current_page (guaranteed to be int) instead of page
'Pagination-Limit': current_page_size # Page size
}

return {'jobs': all_jobs, 'pagination_metadata': last_pagination_metadata}

@staticmethod
Expand Down
21 changes: 17 additions & 4 deletions cloudos_cli/jobs/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import rich_click as click
import cloudos_cli.jobs.job as jb
from cloudos_cli.jobs.job import fetch_job_page
from cloudos_cli.clos import Cloudos
from cloudos_cli.utils.errors import BadRequestException
from cloudos_cli.utils.resources import ssl_selector
Expand Down Expand Up @@ -1221,7 +1222,7 @@ def job_details(ctx,
help='Filter to show only jobs belonging to the current user.',
is_flag=True)
@click.option('--filter-queue',
help='Filter jobs by queue name. Only applies to jobs running in batch environment. Non-batch jobs are preserved in results.')
help='Filter jobs by queue name . Only applies to jobs running in batch environment. Non-batch jobs are preserved in results.')
@click.option('--filter-owner',
help='Filter jobs by owner username.')
@click.option('--verbose',
Expand Down Expand Up @@ -1335,7 +1336,13 @@ def list_jobs(ctx,
])
if output_format == 'stdout':
# For stdout, always show a user-friendly message
create_job_list_table([], cloudos_url, pagination_metadata, selected_columns)
# Create callback for interactive pagination
fetch_page = lambda page_num: fetch_job_page(
cl, workspace_id, page_num, page_size, None, archived, verify_ssl,
filter_status, filter_job_name, filter_project, filter_workflow,
filter_job_id, filter_only_mine, filter_owner, filter_queue, last
)
create_job_list_table([], cloudos_url, pagination_metadata, selected_columns, fetch_page_callback=fetch_page)
else:
if filters_used:
print('A total of 0 jobs collected.')
Expand All @@ -1347,8 +1354,14 @@ def list_jobs(ctx,
'does not exist. Please, try a smaller number for --page or collect all the jobs by not ' +
'using --page parameter.')
elif output_format == 'stdout':
# Display as table
create_job_list_table(my_jobs_r, cloudos_url, pagination_metadata, selected_columns)
# Display as table with interactive pagination
# Create callback for interactive pagination
fetch_page = lambda page_num: fetch_job_page(
cl, workspace_id, page_num, page_size, None, archived, verify_ssl,
filter_status, filter_job_name, filter_project, filter_workflow,
filter_job_id, filter_only_mine, filter_owner, filter_queue, last
)
create_job_list_table(my_jobs_r, cloudos_url, pagination_metadata, selected_columns, fetch_page_callback=fetch_page)
elif output_format == 'csv':
my_jobs = cl.process_job_list(my_jobs_r, all_fields)
cl.save_job_list_to_csv(my_jobs, outfile)
Expand Down
65 changes: 65 additions & 0 deletions cloudos_cli/jobs/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -1727,3 +1727,68 @@ def get_branches(self, repository_identifier, owner, workflow_owner_id,

return {"branches": all_branches, "total": total or len(all_branches)}


def fetch_job_page(cl, workspace_id, page_num, page_size, last_n_jobs, archived, verify_ssl,
filter_status, filter_job_name, filter_project, filter_workflow,
filter_job_id, filter_only_mine, filter_owner, filter_queue, last):
"""Helper function to fetch a specific page of jobs.

Parameters
----------
cl : Cloudos
CloudOS API client instance
workspace_id : str
The CloudOS workspace ID
page_num : int
Page number to fetch (1-indexed)
page_size : int
Number of jobs per page
last_n_jobs : int or None
Last N jobs parameter (should be None for pagination mode)
archived : bool
Whether to include archived jobs
verify_ssl : bool or str
SSL verification setting
filter_status : str or None
Status filter
filter_job_name : str or None
Job name filter
filter_project : str or None
Project filter
filter_workflow : str or None
Workflow filter
filter_job_id : str or None
Job ID filter
filter_only_mine : bool
Filter for user's own jobs
filter_owner : str or None
Owner filter
filter_queue : str or None
Queue filter
last : bool
Use latest workflow for duplicates

Returns
-------
dict
Dictionary with 'jobs' list and 'pagination_metadata' dict
"""
result = cl.get_job_list(
workspace_id,
last_n_jobs=last_n_jobs,
page=page_num,
page_size=page_size,
archived=archived,
verify=verify_ssl,
filter_status=filter_status,
filter_job_name=filter_job_name,
filter_project=filter_project,
filter_workflow=filter_workflow,
filter_job_id=filter_job_id,
filter_only_mine=filter_only_mine,
filter_owner=filter_owner,
filter_queue=filter_queue,
last=last
)
return result

6 changes: 5 additions & 1 deletion cloudos_cli/queue/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ def queue():
'just the preconfigured selected fields. Only applicable ' +
'when --output-format=csv'),
is_flag=True)
@click.option('--exclude-system-queues',
help='Exclude system job queues from the list.',
is_flag=True)
@click.option('--disable-ssl-verification',
help=('Disable SSL certificate verification. Please, remember that this option is ' +
'not generally recommended for security reasons.'),
Expand All @@ -63,6 +66,7 @@ def list_queues(ctx,
output_basename,
output_format,
all_fields,
exclude_system_queues,
disable_ssl_verification,
ssl_cert,
profile):
Expand All @@ -72,7 +76,7 @@ def list_queues(ctx,
verify_ssl = ssl_selector(disable_ssl_verification, ssl_cert)
print('Executing list...')
j_queue = Queue(cloudos_url, apikey, None, workspace_id, verify=verify_ssl)
my_queues = j_queue.get_job_queues()
my_queues = j_queue.get_job_queues(exclude_system_queues=exclude_system_queues)
if len(my_queues) == 0:
raise ValueError('No AWS batch queues found. Please, make sure that your CloudOS supports AWS batch queues')
if output_format == 'stdout':
Expand Down
Loading
Loading