diff --git a/fetch_jira.py b/fetch_jira.py index e2900eb..c732ed1 100644 --- a/fetch_jira.py +++ b/fetch_jira.py @@ -5,6 +5,8 @@ from dotenv import load_dotenv from jira import JIRA import hashlib +import requests +from requests.auth import HTTPBasicAuth # Load environment variables load_dotenv() @@ -22,9 +24,13 @@ def __init__(self, server: str, email: str, api_token: str): api_token: API token for authentication """ self.server = server + self.email = email + self.api_token = api_token + self.auth = HTTPBasicAuth(email, api_token) self.jira = JIRA( server=server, - basic_auth=(email, api_token) + basic_auth=(email, api_token), + options={'rest_api_version': '3'} ) def get_projects(self) -> List[Dict[str, Any]]: @@ -57,18 +63,44 @@ def get_issues_for_project(self, project_key: str) -> List[Dict[str, Any]]: # JQL to exclude completed statuses jql = f'project = {project_key} AND status NOT IN (Done, Closed, Resolved, Completed)' + # Use the new /rest/api/3/search/jql endpoint + url = f"{self.server}/rest/api/3/search/jql" + while True: - issues = self.jira.search_issues( - jql, - startAt=start_at, - maxResults=max_results, - expand='names' + params = { + 'jql': jql, + 'startAt': start_at, + 'maxResults': max_results, + 'fields': '*all', + 'expand': 'names' + } + + headers = { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + } + + response = requests.get( + url, + params=params, + headers=headers, + auth=self.auth ) + if response.status_code != 200: + print(f"Error fetching issues for project {project_key}: HTTP {response.status_code}") + print(f"Response: {response.text}") + break + + data = response.json() + issues = data.get('issues', []) + if not issues: break - all_issues.extend([self._serialize_issue(issue) for issue in issues]) + # Convert raw JSON issues to serialized format + for issue_data in issues: + all_issues.append(self._serialize_issue_from_json(issue_data)) if len(issues) < max_results: break @@ -79,6 +111,8 @@ def get_issues_for_project(self, project_key: str) -> List[Dict[str, Any]]: except Exception as e: print(f"Error fetching issues for project {project_key}: {str(e)}") + import traceback + traceback.print_exc() return [] def _serialize_project(self, project) -> Dict[str, Any]: @@ -126,6 +160,41 @@ def _serialize_issue(self, issue) -> Dict[str, Any]: serialized['epic_link'] = str(fields.customfield_10014) return serialized + + def _serialize_issue_from_json(self, issue_data: Dict[str, Any]) -> Dict[str, Any]: + """Convert raw JSON issue data from API to dictionary""" + fields = issue_data.get('fields', {}) + + serialized = { + 'id': issue_data.get('id', ''), + 'key': issue_data.get('key', ''), + 'issue_type': fields.get('issuetype', {}).get('name', '') if fields.get('issuetype') else '', + 'summary': fields.get('summary', ''), + 'status': fields.get('status', {}).get('name', '') if fields.get('status') else '', + 'priority': fields.get('priority', {}).get('name') if fields.get('priority') else None, + 'assignee': fields.get('assignee', {}).get('displayName') if fields.get('assignee') else None, + 'reporter': fields.get('reporter', {}).get('displayName') if fields.get('reporter') else None, + 'created': fields.get('created'), + 'updated': fields.get('updated'), + 'project_key': fields.get('project', {}).get('key', '') if fields.get('project') else '', + 'parent': None, + 'subtasks': [] + } + + # Get parent issue if exists + if fields.get('parent'): + serialized['parent'] = fields['parent'].get('key') + + # Get subtasks + if fields.get('subtasks'): + serialized['subtasks'] = [subtask.get('key', '') for subtask in fields['subtasks']] + + # For Epic relationship (if using Jira Cloud/Server with Epic Link) + if fields.get('customfield_10014'): + # Epic Link (common custom field for epic relationship) + serialized['epic_link'] = str(fields['customfield_10014']) + + return serialized class JiraFetcher: """Main class for fetching data from multiple Jira instances""" diff --git a/requirements.txt b/requirements.txt index 9a98942..c13451f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,4 +2,4 @@ python-redmine==2.3.0 requests==2.26.0 python-dotenv==0.19.0 azure-devops==7.1.0b3 -jira==3.5.0 \ No newline at end of file +jira==3.9.1 \ No newline at end of file diff --git a/sync_projects_new.py b/sync_projects_new.py index e29508a..f1747e1 100644 --- a/sync_projects_new.py +++ b/sync_projects_new.py @@ -41,7 +41,7 @@ def get_timecamp_tasks(): print(f"Unexpected response format: {type(data)}") return [] -def create_timecamp_task(name, parent_id, external_task_id): +def create_timecamp_task(name, parent_id, external_task_id, external_parent_id=None): """Create a new task in TimeCamp""" url = "https://app.timecamp.com/third_party/api/tasks" headers = { @@ -62,6 +62,10 @@ def create_timecamp_task(name, parent_id, external_task_id): 'external_task_id': external_task_id } + # Add external_parent_id if provided + if external_parent_id is not None: + data['external_parent_id'] = external_parent_id + response = requests.post(url, headers=headers, json=data) response.raise_for_status() @@ -99,8 +103,8 @@ def sync_hierarchical_tasks_to_timecamp(): """Main sync function to sync hierarchical task data from tasks.json to TimeCamp""" # Load hierarchical task data from JSON file - azure_tasks = load_tasks_from_json() - if not azure_tasks: + source_tasks = load_tasks_from_json() + if not source_tasks: return # Get existing TimeCamp tasks @@ -110,10 +114,10 @@ def sync_hierarchical_tasks_to_timecamp(): timecamp_tasks_map = {} for entry in timecamp_entries: external_id = entry.get('external_task_id') - if external_id and external_id.startswith('sync_'): + if external_id: timecamp_tasks_map[external_id] = entry - print(f"Found {len(timecamp_tasks_map)} existing sync tasks in TimeCamp") + print(f"Found {len(timecamp_tasks_map)} existing tasks in TimeCamp with external IDs") # Create mapping of source task_id to TimeCamp task_id for newly created items source_to_timecamp_map = {} @@ -142,30 +146,31 @@ def get_hierarchy_level(task, all_tasks): return get_hierarchy_level(parent_task, all_tasks) + 1 # Add hierarchy level to each task and sort by level - for task in azure_tasks: - task['_hierarchy_level'] = get_hierarchy_level(task, azure_tasks) + for task in source_tasks: + task['_hierarchy_level'] = get_hierarchy_level(task, source_tasks) # Sort tasks by hierarchy level (parents before children) - azure_tasks_sorted = sorted(azure_tasks, key=lambda x: (x['_hierarchy_level'], x['task_id'])) + sorted_tasks = sorted(source_tasks, key=lambda x: (x['_hierarchy_level'], x['task_id'])) # Process all tasks in hierarchy order - for task in azure_tasks_sorted: - external_id = f"sync_{task['task_id']}" + for task in sorted_tasks: + external_id = task['task_id'] active_external_ids.add(external_id) - - - # Determine parent TimeCamp task ID + # Determine parent TimeCamp task ID and external parent ID if task['parent_id'] == 0: # Top-level task - parent is the configured TimeCamp task parent_timecamp_id = TIMECAMP_TASK_ID + external_parent_id = None else: # Child task - parent should be mapped from source system parent_timecamp_id = source_to_timecamp_map.get(task['parent_id']) + external_parent_id = task['parent_id'] if not parent_timecamp_id: # If parent wasn't created successfully, make this a top-level task print(f"Warning: Parent task not found for {task['name']}, making it top-level") parent_timecamp_id = TIMECAMP_TASK_ID + external_parent_id = None if external_id not in timecamp_tasks_map: # Determine task type for logging @@ -176,7 +181,8 @@ def get_hierarchy_level(task, all_tasks): new_task = create_timecamp_task( name=task['name'], parent_id=parent_timecamp_id, - external_task_id=external_id + external_task_id=external_id, + external_parent_id=external_parent_id ) source_to_timecamp_map[task['task_id']] = new_task['task_id'] timecamp_tasks_map[external_id] = new_task @@ -203,7 +209,7 @@ def get_hierarchy_level(task, all_tasks): print(f"- Created: {created_tasks} new tasks") print(f"- Existing: {existing_tasks} tasks (no change needed)") print(f"- Archived: {archived_tasks} obsolete tasks") - print(f"- Total processed: {len(azure_tasks_sorted)} tasks") + print(f"- Total processed: {len(sorted_tasks)} tasks") def show_sync_preview(): """Show a preview of what would be synced without making changes"""