diff --git a/recipes/sync_recipe_utils.py b/recipes/sync_recipe_utils.py index ccb65bef..d08d54b7 100644 --- a/recipes/sync_recipe_utils.py +++ b/recipes/sync_recipe_utils.py @@ -86,7 +86,7 @@ def export_recipes_to_yaml(recipes, yml_file): class RecipeDumper(yaml.Dumper): pass - class literal(unicode): + class literal(str): pass def _dict_representer(dumper, data): @@ -100,17 +100,9 @@ def _literal_representer(dumper, data): RecipeDumper.add_representer(dict, _dict_representer) RecipeDumper.add_representer(literal, _literal_representer) - # Needed for python2, - # otherwise: 'item': !!python/unicode "some string" is dumped - if sys.version_info < (3,0): - def represent_unicode(dumper, data): - return dumper.represent_scalar(u'tag:yaml.org,2002:str', data) - - RecipeDumper.add_representer(unicode, represent_unicode) - yaml_recipes = [] for r in recipes: - recipe_expression = literal(unicode(dict(r)['fields'][0]['expression'])) + recipe_expression = literal(str(dict(r)['fields'][0]['expression'])) dict(r)['fields'][0]['expression'] = recipe_expression recipe_details = { "name": dict(r)['name'], diff --git a/recipes/tests/test_recipes_sync.py b/recipes/tests/test_recipes_sync.py index 6126389d..e93b1e5c 100644 --- a/recipes/tests/test_recipes_sync.py +++ b/recipes/tests/test_recipes_sync.py @@ -34,7 +34,6 @@ def retrieve(*args, **kwargs): return usr monkeypatch.setattr(sync_recipes.sb.User, "retrieve", retrieve) -@pytest.mark.skipif(sys.version_info < (3,0), reason="requires python3") def test_sync_recipe(mock_dataset_template_retrieve, mock_user_retrieve): with pytest.raises(SystemExit) as e: diff --git a/requirements-dev.txt b/requirements-dev.txt index 7bdc7f63..d55349f7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,15 +1,15 @@ flake8 mock requests[security] -six +urllib3>=1.26.0 flask percy selenium -dash==1.19.0 -dash_auth==1.4.1 -dash_core_components==1.15.0 -dash_html_components==1.1.2 -dash_renderer==1.9.0 +dash>=2.14.0 +dash_auth>=2.0.0 +dash_core_components>=2.0.0 +dash_html_components>=2.0.0 +dash_renderer>=1.9.1 Werkzeug<=2.0.3 solvebio==2.12.0 pyyaml==5.3.1 diff --git a/setup.cfg b/setup.cfg index b88034e4..08aedd7e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,2 @@ [metadata] -description-file = README.md +description_file = README.md diff --git a/setup.py b/setup.py index 1664905b..e185b65b 100644 --- a/setup.py +++ b/setup.py @@ -1,12 +1,11 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import, print_function from setuptools import setup, find_packages import sys import warnings VERSION = 'undefined' -install_requires = ['six', 'pyprind'] +install_requires = ['pyprind', 'requests>=2.0.0', 'urllib3>=1.26.0'] extra = {} with open('solvebio/version.py') as f: @@ -14,20 +13,6 @@ if row.startswith('VERSION'): exec(row) -if sys.version_info < (2, 6): - warnings.warn( - 'Python 2.5 is no longer officially supported by SolveBio. ' - 'If you have any questions, please file an issue on GitHub or ' - 'contact us at support@solvebio.com.', - DeprecationWarning) - install_requires.append('requests >= 0.8.8, < 0.10.1') - install_requires.append('ssl') -elif sys.version_info < (2, 7): - install_requires.append('ordereddict') -else: - install_requires.append('requests>=2.0.0') - - # solvebio-recipes requires additional packages recipes_requires = [ 'pyyaml==5.3.1', @@ -38,17 +23,6 @@ "recipes": recipes_requires } -# Adjustments for Python 2 vs 3 -if sys.version_info < (3, 0): - # Get simplejson if we don't already have json - try: - import json # noqa - except ImportError: - install_requires.append('simplejson') - - # solvebio-recipes only available in python3 - extras_requires = {} - with open('README.md') as f: long_description = f.read() diff --git a/solvebio/__init__.py b/solvebio/__init__.py index 460b3848..fc808c32 100644 --- a/solvebio/__init__.py +++ b/solvebio/__init__.py @@ -7,11 +7,10 @@ Have questions or comments? email us at: support@solvebio.com """ -from __future__ import absolute_import -from __future__ import print_function __docformat__ = 'restructuredtext' import os as _os +import errno import logging as _logging from typing import Literal from .help import open_help as _open_help @@ -61,7 +60,7 @@ def _init_logging(): _os.makedirs(logdir) except OSError as err: # Re-raise anything other than 'File exists'. - if err[1] != 'File exists': + if err.errno != errno.EEXIST: raise err file_handler = _logging.FileHandler(logfile_path) @@ -74,7 +73,6 @@ def _init_logging(): try: base_logger.addHandler(_logging.NullHandler()) except: - # supports Python < 2.7 class NullHandler(_logging.Handler): def emit(self, record): pass diff --git a/solvebio/annotate.py b/solvebio/annotate.py index 2b12551c..56a2b366 100644 --- a/solvebio/annotate.py +++ b/solvebio/annotate.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import from .client import client diff --git a/solvebio/auth.py b/solvebio/auth.py index 47e78e56..af488fc5 100644 --- a/solvebio/auth.py +++ b/solvebio/auth.py @@ -1,9 +1,7 @@ -from __future__ import absolute_import - import os from typing import Literal, Tuple -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse import logging diff --git a/solvebio/cli/auth.py b/solvebio/cli/auth.py index ba147243..81d4964c 100644 --- a/solvebio/cli/auth.py +++ b/solvebio/cli/auth.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import print_function import solvebio from ..client import client diff --git a/solvebio/cli/credentials.py b/solvebio/cli/credentials.py index 0e2d155d..c98ae66f 100644 --- a/solvebio/cli/credentials.py +++ b/solvebio/cli/credentials.py @@ -1,8 +1,5 @@ # -*- coding: utf-8 -*- -from __future__ import unicode_literals -from __future__ import absolute_import from collections import namedtuple -import six import solvebio @@ -55,11 +52,11 @@ def save(self, path): for host in self.hosts.keys(): attrs = self.hosts[host] rep = ( - rep + "machine " + host + "\n\tlogin " + six.text_type(attrs[0]) + "\n" + rep + "machine " + host + "\n\tlogin " + str(attrs[0]) + "\n" ) if attrs[1]: - rep = rep + "\taccount " + six.text_type(attrs[1]) + "\n" - rep = rep + "\tpassword " + six.text_type(attrs[2]) + "\n" + rep = rep + "\taccount " + str(attrs[1]) + "\n" + rep = rep + "\tpassword " + str(attrs[2]) + "\n" f = open(path, "w") f.write(rep) diff --git a/solvebio/cli/data.py b/solvebio/cli/data.py index 9917ab1b..6d718217 100644 --- a/solvebio/cli/data.py +++ b/solvebio/cli/data.py @@ -1,12 +1,8 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import print_function import concurrent.futures from concurrent.futures import ThreadPoolExecutor -from six.moves import input as raw_input - import os import re import sys @@ -128,7 +124,8 @@ def _upload_folder( dry_run=False, num_processes=1, archive_folder=None, - follow_shortcuts=False + follow_shortcuts=False, + max_retries=3, ): all_folders = [] all_files = [] @@ -183,7 +180,7 @@ def _upload_folder( if should_exclude(local_file_path, exclude_paths, dry_run=dry_run): continue all_files.append((local_file_path, remote_folder_full_path, vault.full_path, - dry_run, archive_folder, client_auth, follow_shortcuts)) + dry_run, archive_folder, client_auth, follow_shortcuts, max_retries)) if num_processes > 1: # Only perform optimization if parallelization is requested by the user @@ -236,12 +233,13 @@ def _create_file_job(args): args[4] (archive_folder): An archive folder to move existing files into args[5] (client_auth): Tuple containing API host, token, and token type args[6] (follow_shortcuts): Boolean to follow shortcuts on the remote_folder_path + args[7] (max_retries): Maximum number of retries per upload part Returns: None or Exception if exception is raised. """ try: - local_file_path, remote_folder_full_path, vault_path, dry_run, archive_folder, client_auth, follow_shortcuts \ - = args + (local_file_path, remote_folder_full_path, vault_path, dry_run, + archive_folder, client_auth, follow_shortcuts, max_retries) = args # Provides the global host, token, token_type client = SolveClient(*client_auth) @@ -274,6 +272,8 @@ def _create_file_job(args): remote_parent.vault.full_path, archive_folder=archive_folder, follow_shortcuts=follow_shortcuts, + num_processes=1, # Default for single file uploads in parallel processing + max_retries=max_retries, client=client ) return @@ -511,7 +511,8 @@ def upload(args): dry_run=args.dry_run, num_processes=args.num_processes, archive_folder=args.archive_folder, - follow_shortcuts=follow_shortcuts + follow_shortcuts=follow_shortcuts, + max_retries=args.max_retries, ) else: if args.dry_run: @@ -519,7 +520,14 @@ def upload(args): "[Dry Run] Uploading {} to {}".format(local_path, path_dict["path"]) ) else: - Object.upload_file(local_path, path_dict["path"], vault.full_path, archive_folder=args.archive_folder) + Object.upload_file( + local_path, + path_dict["path"], + vault.full_path, + archive_folder=args.archive_folder, + num_processes=args.num_processes, + max_retries=args.max_retries, + ) def import_file(args): @@ -1107,7 +1115,7 @@ def tag(args): if not args.no_input: print("") - res = raw_input( + res = input( "Are you sure you want to apply the above changes to " "{} object(s) in {} vault(s)? [y/N] ".format( len(taggable_objects), len(seen_vaults.keys()) diff --git a/solvebio/cli/ipython.py b/solvebio/cli/ipython.py index 89295aa9..b403979c 100644 --- a/solvebio/cli/ipython.py +++ b/solvebio/cli/ipython.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import sys import os diff --git a/solvebio/cli/main.py b/solvebio/cli/main.py index 4951610f..5f136d79 100644 --- a/solvebio/cli/main.py +++ b/solvebio/cli/main.py @@ -1,6 +1,5 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -from __future__ import absolute_import import os import sys import copy @@ -280,6 +279,13 @@ class SolveArgumentParser(argparse.ArgumentParser): "help": "Resolves shortcuts when Uploading.", "action": "store_true", }, + { + "flags": "--max-retries", + "help": "Maximum number of retries per upload part for multipart uploads. " + "Defaults to 3.", + "default": 3, + "type": int, + }, { "name": "local_path", "help": "The path to the local file or directory " "to upload", diff --git a/solvebio/cli/tutorial.py b/solvebio/cli/tutorial.py index 38d6a9c0..ac5a31d0 100644 --- a/solvebio/cli/tutorial.py +++ b/solvebio/cli/tutorial.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import os from pydoc import pager diff --git a/solvebio/client.py b/solvebio/client.py index 4b683cd6..4ce4ecbb 100644 --- a/solvebio/client.py +++ b/solvebio/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import json import time @@ -24,7 +23,7 @@ import ssl import sys -from six.moves.urllib.parse import urljoin +from urllib.parse import urljoin # Try using pyopenssl if available. # Requires: pip install pyopenssl ndg-httpsclient pyasn1 diff --git a/solvebio/contrib/dash/__init__.py b/solvebio/contrib/dash/__init__.py index 305ace6a..c2bc9646 100644 --- a/solvebio/contrib/dash/__init__.py +++ b/solvebio/contrib/dash/__init__.py @@ -1,3 +1,2 @@ -from __future__ import absolute_import from .solvebio_auth import SolveBioAuth # noqa: F401 from .solvebio_dash import SolveBioDash # noqa: F401 diff --git a/solvebio/contrib/dash/solvebio_auth.py b/solvebio/contrib/dash/solvebio_auth.py index ed15f22f..14d2e987 100644 --- a/solvebio/contrib/dash/solvebio_auth.py +++ b/solvebio/contrib/dash/solvebio_auth.py @@ -1,18 +1,16 @@ -from __future__ import absolute_import - import json import flask import requests import os -from six.moves.urllib.parse import urljoin +from urllib.parse import urljoin -from dash_auth.oauth import OAuthBase +from dash_auth.auth import Auth import solvebio -class SolveBioAuth(OAuthBase): +class SolveBioAuth(Auth): """Handles OAuth2 flows with the SolveBio API.""" AUTH_COOKIE_NAME = 'dash_solvebio_auth' TOKEN_COOKIE_NAME = 'solvebio_oauth_token' @@ -26,12 +24,16 @@ class SolveBioAuth(OAuthBase): def __init__(self, app, app_url, client_id, **kwargs): secret_key = kwargs.get('secret_key') or app.server.secret_key - super(SolveBioAuth, self).__init__( - app, - app_url, - client_id, - secret_key=secret_key, - salt=kwargs.get('salt')) + + # Initialize the base Auth class + super().__init__(app, public_routes=kwargs.get('public_routes', [])) + + # Store OAuth configuration + self._app = app + self._app_url = app_url + self._oauth_client_id = client_id + self._secret_key = secret_key + self._salt = kwargs.get('salt') # Add logout URL app.server.add_url_rule( @@ -71,6 +73,22 @@ def __init__(self, app, app_url, client_id, **kwargs): with open(os.path.join(_current_path, 'login.js'), 'r') as f: self.login_bundle = f.read() + def is_authorized(self): + """Check if the user is authorized by looking for a valid token cookie.""" + try: + token = flask.request.cookies.get(self.TOKEN_COOKIE_NAME) + if not token: + return False + + # Verify the token by making a test API call + return self.check_view_access(token) + except: + return False + + def login_request(self): + """Return the login page HTML.""" + return self.html(self.login_bundle) + def auth_wrapper(self, f): def wrap(*args, **kwargs): if not self.is_authorized(): @@ -99,7 +117,7 @@ def wrap(*args, **kwargs): if hasattr(self, 'add_access_token_to_response'): return wrap else: - return super(SolveBioAuth, self).auth_wrapper(f) + return super().auth_wrapper(f) def html(self, script): return (''' diff --git a/solvebio/contrib/dash/solvebio_dash.py b/solvebio/contrib/dash/solvebio_dash.py index 067291eb..284c44d6 100644 --- a/solvebio/contrib/dash/solvebio_dash.py +++ b/solvebio/contrib/dash/solvebio_dash.py @@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function - import dash import flask @@ -23,6 +20,7 @@ def __init__(self, name, *args, **kwargs): app_url = kwargs.pop('app_url', self.APP_URL) solvebio_url = kwargs.pop('solvebio_url', self.SOLVEBIO_URL) + api_host = kwargs.pop('api_host', None) # Extract api_host before passing to Dash # OAuth2 credentials client_id = kwargs.pop('client_id', @@ -46,7 +44,8 @@ def __init__(self, name, *args, **kwargs): salt=salt, client_secret=client_secret, grant_type=grant_type, - solvebio_url=solvebio_url) + solvebio_url=solvebio_url, + api_host=api_host) else: self.auth = None print("WARNING: No SolveBio client ID found. " diff --git a/solvebio/contrib/dash/tests/IntegrationTests.py b/solvebio/contrib/dash/tests/IntegrationTests.py index c791e859..9bbd8fe6 100644 --- a/solvebio/contrib/dash/tests/IntegrationTests.py +++ b/solvebio/contrib/dash/tests/IntegrationTests.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import from selenium import webdriver import multiprocessing import requests diff --git a/solvebio/contrib/dash/tests/test_solvebio_auth.py b/solvebio/contrib/dash/tests/test_solvebio_auth.py index cab5fb6c..7222dc5d 100644 --- a/solvebio/contrib/dash/tests/test_solvebio_auth.py +++ b/solvebio/contrib/dash/tests/test_solvebio_auth.py @@ -1,22 +1,12 @@ -from __future__ import absolute_import import unittest import dash import dash_html_components as html import time -import six -from six.moves import http_cookies -from six import iteritems +from http import cookies +from unittest import mock from solvebio.contrib.dash import SolveBioAuth -from .credentials import OAUTH_CLIENT_ID -from .credentials import OAUTH_TOKEN - -if six.PY3: - from unittest import mock -else: - import mock # noqa - endpoints = { 'protected': { 'get': [ @@ -46,7 +36,7 @@ def get_cookie(res, cookie_name): print(set_cookie_strings) raise e - cookie = http_cookies.SimpleCookie(cookie_string) + cookie = cookies.SimpleCookie(cookie_string) access_granted_cookie = cookie[list(cookie.keys())[0]].value return access_granted_cookie @@ -147,7 +137,7 @@ def get_client(): def test_protected_endpoints_with_auth_cookie(self): apps, auths = create_apps(self._oauth_client_id) - for app_name, app in iteritems(apps): + for app_name, app in apps.items(): if app_name != 'unregistered': app.layout = html.Div() self.check_endpoints( diff --git a/solvebio/errors.py b/solvebio/errors.py index a1cc2318..0aa1debc 100644 --- a/solvebio/errors.py +++ b/solvebio/errors.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import logging logger = logging.getLogger('solvebio') diff --git a/solvebio/global_search.py b/solvebio/global_search.py index 35299849..e46d281a 100644 --- a/solvebio/global_search.py +++ b/solvebio/global_search.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import from .client import client from .resource import Object diff --git a/solvebio/help.py b/solvebio/help.py index 64ffb341..24a39049 100644 --- a/solvebio/help.py +++ b/solvebio/help.py @@ -1,7 +1,4 @@ -from __future__ import absolute_import -from __future__ import print_function - -from six.moves.urllib.parse import urljoin +from urllib.parse import urljoin try: import webbrowser diff --git a/solvebio/query.py b/solvebio/query.py index f8ee1856..b6be93f8 100644 --- a/solvebio/query.py +++ b/solvebio/query.py @@ -1,9 +1,7 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import from abc import ABCMeta -import six import json import uuid @@ -221,8 +219,7 @@ def __repr__(self): return ''.format(self.filters) -@six.add_metaclass(ABCMeta) -class QueryBase(object): +class QueryBase(object, metaclass=ABCMeta): """ A helper abstract mixin class that contains common methods for Query and QueryFile classes. @@ -364,7 +361,7 @@ def __getitem__(self, key): :Parameters: - `key`: The requested slice range or index. """ - if not isinstance(key, (slice,) + six.integer_types): + if not isinstance(key, (slice, int)): raise TypeError if isinstance(key, slice): @@ -681,7 +678,7 @@ def facets(self, *args, **kwargs): raise AttributeError('Faceting requires at least one field') for f in facets.keys(): - if not isinstance(f, six.string_types): + if not isinstance(f, str): raise AttributeError('Facet field arguments must be strings') q = self._clone() diff --git a/solvebio/resource/__init__.py b/solvebio/resource/__init__.py index 041fdffb..53da602d 100644 --- a/solvebio/resource/__init__.py +++ b/solvebio/resource/__init__.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from .apiresource import ListObject from .user import User from .dataset import Dataset diff --git a/solvebio/resource/apiresource.py b/solvebio/resource/apiresource.py index 18c8de78..af56edb5 100644 --- a/solvebio/resource/apiresource.py +++ b/solvebio/resource/apiresource.py @@ -1,9 +1,6 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import -import six -from six.moves import zip -from six.moves.urllib.parse import unquote -from six.moves import input as raw_input + +from urllib.parse import unquote import os import requests @@ -58,7 +55,7 @@ def instance_url(self): base = self.class_url() if id_: - return '/'.join([base, six.text_type(id_)]) + return '/'.join([base, str(id_)]) else: raise Exception( 'Could not determine which URL to request: %s instance ' @@ -173,8 +170,8 @@ def delete(self, **params): printable_name = class_to_api_name( self.class_name(), pluralize=False).replace('_', ' ') if not params.pop('force', False): - res = raw_input('Are you sure you want to delete this %s? ' - '[y/N] ' % printable_name) + res = input('Are you sure you want to delete this %s? ' + '[y/N] ' % printable_name) if res.strip().lower() != 'y': print('Not performing deletion.') return diff --git a/solvebio/resource/manifest.py b/solvebio/resource/manifest.py index 02b55c49..02a57c5e 100644 --- a/solvebio/resource/manifest.py +++ b/solvebio/resource/manifest.py @@ -1,11 +1,9 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import -from __future__ import print_function import os import glob -from six.moves.urllib.parse import urlparse +from urllib.parse import urlparse import solvebio diff --git a/solvebio/resource/object.py b/solvebio/resource/object.py index 0c27f8e8..bc94c751 100644 --- a/solvebio/resource/object.py +++ b/solvebio/resource/object.py @@ -9,8 +9,7 @@ from datetime import datetime import requests -import six -from requests.packages.urllib3.util.retry import Retry +from urllib3.util.retry import Retry from solvebio.errors import SolveError from solvebio.errors import NotFoundError @@ -29,6 +28,75 @@ from .apiresource import DeletableAPIResource from .apiresource import DownloadableAPIResource + +class UploadProgressTracker: + """Simple progress tracking for multipart uploads.""" + + def __init__(self, total_parts, total_size): + self.total_parts = total_parts + self.total_size = total_size + self.completed_parts = 0 + self.completed_bytes = 0 + self.start_time = time.time() + self.progress_line_active = False + + def update_progress(self, part_size, part_duration=None): + """Update progress with new part completion - overwrites same line""" + self.completed_parts += 1 + self.completed_bytes += part_size + + # Calculate metrics + elapsed = time.time() - self.start_time + avg_speed = self.completed_bytes / elapsed if elapsed > 0 else 0 + remaining_bytes = self.total_size - self.completed_bytes + eta = remaining_bytes / avg_speed if avg_speed > 0 else 0 + + # Format bytes for display + completed_mb = self.completed_bytes / 1024 / 1024 + total_mb = self.total_size / 1024 / 1024 + speed_mb = avg_speed / 1024 / 1024 + + # Create simple progress message + progress_msg = f"{self.completed_parts}/{self.total_parts} parts uploaded" + progress_msg += f" ({completed_mb:.1f}MB/{total_mb:.1f}MB)" + + if avg_speed > 0: + progress_msg += f" [{speed_mb:.1f}MB/s" + if eta > 0: + progress_msg += f", ETA: {eta:.0f}s" + progress_msg += "]" + + # Use \r to overwrite the same line (like Unix downloads) + print(f"\r{progress_msg}", end="", flush=True) + self.progress_line_active = True + + # Print newline only when complete + if self.completed_parts >= self.total_parts: + print() # Final newline + self.progress_line_active = False + + def get_completion_percentage(self): + """Get completion percentage""" + return ( + (self.completed_bytes / self.total_size) * 100 if self.total_size > 0 else 0 + ) + + def get_elapsed_time(self): + """Get total elapsed time""" + return time.time() - self.start_time + + def get_average_speed(self): + """Get average upload speed in bytes/second""" + elapsed = time.time() - self.start_time + return self.completed_bytes / elapsed if elapsed > 0 else 0 + + def notify_error(self): + """Notify that an error message will be printed - move to new line""" + if self.progress_line_active: + print() # Move to new line before error message + self.progress_line_active = False + + if sys.version_info >= (3, 9, 0): from collections.abc import Iterable else: @@ -545,14 +613,17 @@ def _upload_single_file(cls, obj, local_path, **kwargs): # Use a session with a retry policy to handle connection errors. session = requests.Session() max_retries = 5 - retry = Retry( - total=max_retries, - read=max_retries, - connect=max_retries, - backoff_factor=2, - status_forcelist=(500, 502, 503, 504, 400), - allowed_methods=["HEAD", "OPTIONS", "GET", "PUT", "POST"] - ) + + retry_kwargs = { + 'total': max_retries, + 'read': max_retries, + 'connect': max_retries, + 'backoff_factor': 2, + 'status_forcelist': (500, 502, 503, 504, 400), + 'allowed_methods': ["HEAD", "OPTIONS", "GET", "PUT", "POST"] + } + + retry = Retry(**retry_kwargs) session.mount( 'https://', requests.adapters.HTTPAdapter(max_retries=retry)) @@ -588,108 +659,356 @@ def _upload_single_file(cls, obj, local_path, **kwargs): return obj + @classmethod + def refresh_presigned_urls(cls, upload_id, key, total_size, part_numbers, **kwargs): + """Refresh presigned URLs for multipart upload + + Args: + upload_id (str): The upload ID from the multipart upload + key (str): The upload key/identifier + total_size (int): Total size of the file being uploaded + part_numbers (list[int]): List of part numbers to refresh URLs for + **kwargs: Additional parameters including client + + Returns: + list: List of presigned URL objects with part information + """ + _client = kwargs.get("client") or cls._client or client + + payload = { + "upload_id": upload_id, + "key": key, + "total_size": total_size, + "part_numbers": part_numbers, + } + + try: + response = _client.post("/v2/presigned_urls", payload) + + if "presigned_urls" in response: + return response["presigned_urls"] + else: + raise FileUploadError( + "Invalid response from presigned URLs API: missing 'presigned_urls' key" + ) + except Exception as e: + raise FileUploadError(f"Failed to refresh presigned URLs: {str(e)}") + @classmethod def _upload_multipart(cls, obj, local_path, local_md5, **kwargs): - """Handle multipart upload for larger files""" + """Enhanced multipart upload with parallel parts and presigned URL refresh""" _client = kwargs.get("client") or cls._client or client - print(f"Notice: Upload ID {obj.upload_id}") + num_processes = kwargs.get("num_processes", 1) + max_retries = kwargs.get("max_retries", 3) + try: - # Get presigned URLs from the object + # Get initial presigned URLs presigned_urls = obj.presigned_urls + total_parts = len(presigned_urls) print( - "Notice: Starting multipart upload with {} parts...".format( - len(presigned_urls) + f"Starting multipart upload with {total_parts} parts using {num_processes} worker(s)..." + ) + + # Initialize progress tracker + progress_tracker = UploadProgressTracker(total_parts, obj.size) + + # Prepare part upload tasks + part_tasks = [] + for i, part_info in enumerate(presigned_urls): + part_tasks.append( + { + "part_number": part_info.part_number, + "start_byte": part_info.start_byte, + "end_byte": part_info.end_byte, + "part_size": part_info.size, + "upload_url": part_info.upload_url, + "part_index": i, + "max_retries": max_retries, + "upload_id": obj.upload_id, + "upload_key": obj.upload_key, + } + ) + + # Upload parts in parallel or sequential + if num_processes > 1: + parts = cls._upload_parts_parallel( + local_path, + part_tasks, + obj, + _client, + num_processes, + progress_tracker, ) + else: + parts = cls._upload_parts_sequential( + local_path, part_tasks, obj, _client, progress_tracker + ) + + # Complete multipart upload + return cls._complete_multipart_upload(obj, parts, _client, local_path) + + except Exception as e: + cls._cleanup_failed_upload(obj, _client) + raise FileUploadError("Multipart upload failed: {}".format(str(e))) + + @classmethod + def _upload_parts_parallel( + cls, local_path, part_tasks, obj, _client, num_processes, progress_tracker + ): + """Upload parts in parallel using ThreadPoolExecutor""" + + parts = [None] * len(part_tasks) # Pre-allocate array for ordered results + + failed_parts = cls._upload_parts_with_threadpool( + local_path, + part_tasks, + obj, + _client, + parts, + progress_tracker, + num_processes, + ) + + # Retry failed parts + if failed_parts: + print(f"Retrying {len(failed_parts)} failed parts in parallel...") + for task in failed_parts: + task["max_retries"] = 3 + retry_failed = cls._upload_parts_with_threadpool( + local_path, + failed_parts, + obj, + _client, + parts, + progress_tracker, + num_processes, ) + if retry_failed: + raise Exception( + f"Failed to upload {len(retry_failed)} parts after retry" + ) - # Step 2: Upload each part using presigned URLs - parts = [] - with open(local_path, "rb") as f: - for part_info in presigned_urls: - part_number = part_info.part_number - start_byte = part_info.start_byte - end_byte = part_info.end_byte - part_size = part_info.size - upload_url = part_info.upload_url + return [part for part in parts if part is not None] + @classmethod + def _upload_parts_with_threadpool( + cls, + local_path, + part_tasks, + obj, + _client, + parts, + progress_tracker, + num_processes, + ): + """Common method for uploading parts with ThreadPoolExecutor""" + from concurrent.futures import ThreadPoolExecutor, as_completed + + failed_parts = [] + + with ThreadPoolExecutor(max_workers=num_processes) as executor: + # Submit all part upload tasks with worker assignment + future_to_part = {} + for i, task in enumerate(part_tasks): + # Assign worker ID to task + task["worker_id"] = f"Worker-{i % num_processes + 1}" + future_to_part[ + executor.submit( + cls._upload_single_part, + local_path, + task, + obj, + _client, + progress_tracker, + ) + ] = task + + # Collect results as they complete + for future in as_completed(future_to_part): + task = future_to_part[future] + try: + part_result = future.result() + parts[task["part_index"]] = part_result + # Update progress with part size + progress_tracker.update_progress(task["part_size"]) + except Exception as e: + # Notify progress tracker about error and print error message + progress_tracker.notify_error() print( - "Notice: Uploading part {}/{}... (bytes {}-{})".format( - part_number, len(presigned_urls), start_byte, end_byte - ) + f"ERROR: {task['worker_id']} failed part {task['part_number']}: {e}" ) + failed_parts.append(task) + + return failed_parts - # Seek to start position and read the exact part size - f.seek(start_byte) - chunk_data = f.read(part_size) - if not chunk_data: - break - - # Upload part with retry logic - session = requests.Session() - retry = Retry( - total=3, - backoff_factor=2, - status_forcelist=(500, 502, 503, 504), - allowed_methods=["PUT"], + @classmethod + def _upload_parts_sequential( + cls, local_path, part_tasks, obj, _client, progress_tracker + ): + """Upload parts sequentially with retry logic for failed parts""" + parts = [None] * len(part_tasks) # Pre-allocate for consistency + failed_parts = [] + + with open(local_path, "rb") as f: + for i, task in enumerate(part_tasks): + try: + part_result = cls._upload_single_part( + local_path, task, obj, _client, progress_tracker, file_handle=f ) - session.mount( - "https://", requests.adapters.HTTPAdapter(max_retries=retry) + parts[i] = part_result + # Update progress with part size + progress_tracker.update_progress(task["part_size"]) + except Exception as e: + # Notify progress tracker about error and print error message + progress_tracker.notify_error() + print( + f"ERROR: Sequential worker failed part {task['part_number']}: {e}" + ) + failed_parts.append(task) + + # Retry failed parts sequentially + if failed_parts: + print(f"Retrying {len(failed_parts)} failed parts sequentially...") + for task in failed_parts: + try: + part_result = cls._upload_single_part( + local_path, task, obj, _client, progress_tracker ) + parts[task["part_index"]] = part_result + # Update progress with part size + progress_tracker.update_progress(task["part_size"]) + except Exception as e: + # Notify progress tracker about error and print error message + progress_tracker.notify_error() + print( + f"FINAL ERROR: Sequential worker part {task['part_number']} failed after all retries: {e}" + ) + raise e - headers = { - "Content-Length": str(len(chunk_data)), - } + return [part for part in parts if part is not None] - upload_resp = session.put( - upload_url, data=chunk_data, headers=headers + @classmethod + def _upload_single_part( + cls, local_path, task, obj, _client, progress_tracker=None, file_handle=None + ): + """Upload a single part with retry logic and presigned URL refresh""" + part_number = task["part_number"] + start_byte = task["start_byte"] + part_size = task["part_size"] + max_retries = task["max_retries"] + upload_id = task["upload_id"] + upload_key = task["upload_key"] + worker_id = task.get("worker_id", "Sequential worker") + + for attempt in range(max_retries): + try: + # Get fresh presigned URL if this is a retry + if attempt > 0: + print( + f"{worker_id} retrying part {part_number} (attempt {attempt + 1}/{max_retries})" ) + # Refresh presigned URLs for this specific part + fresh_urls = cls.refresh_presigned_urls( + upload_id=upload_id, + key=upload_key, + total_size=obj.size, + part_numbers=[part_number], + client=_client, + ) + upload_url = fresh_urls[0]["upload_url"] + else: + upload_url = task["upload_url"] - if upload_resp.status_code != 200: - raise FileUploadError( - "Failed to upload part {}: {}".format( - part_number, upload_resp.content - ) - ) + # Read part data + if file_handle: + # Use provided file handle (sequential mode) + file_handle.seek(start_byte) + chunk_data = file_handle.read(part_size) + else: + # Open file for this part (parallel mode) + with open(local_path, "rb") as f: + f.seek(start_byte) + chunk_data = f.read(part_size) - # Get ETag from response - etag = upload_resp.headers.get("ETag", "").strip('"') - parts.append({"part_number": part_number, "etag": etag}) - - # Step 3: Complete multipart upload - print("Notice: Completing multipart upload....") - complete_data = { - "upload_id": obj.upload_id, - "physical_object_id": obj.upload_key, - "parts": parts, - } + if not chunk_data: + break - print(f"Notice: {complete_data}") + # Upload without requests-level retry (let our custom retry handle it) + session = requests.Session() + + headers = {"Content-Length": str(len(chunk_data))} + + # Calculate timeout based on part size + part_size_mb = len(chunk_data) / (1024 * 1024) + # Timeout scaling for large parts + # Formula: 20min base + 30s per MB to handle very large parts + # This ensures adequate timeout even with slow connections + base_timeout = 1200 # 20 minutes base + scaling_factor = 30 # 30 seconds per 1MB + total_timeout = base_timeout + part_size_mb * scaling_factor + + upload_resp = session.put( + upload_url, + data=chunk_data, + headers=headers, + timeout=total_timeout, + ) - complete_resp = _client.post("/v2/complete_multi_part", complete_data) + if upload_resp.status_code == 200: + etag = upload_resp.headers.get("ETag", "").strip('"') + return {"part_number": part_number, "etag": etag} + else: + raise FileUploadError( + f"{worker_id} failed part {part_number}: {upload_resp.status_code} - {upload_resp.content}" + ) - if "message" in complete_resp: - print( - "Notice: Successfully uploaded {0} to {1} with multipart upload.".format( - local_path, obj.path + except Exception as e: + if attempt == max_retries - 1: # Last attempt + raise FileUploadError( + f"{worker_id} part {part_number} failed after {max_retries} attempts: {e}" ) - ) - return obj - else: - raise Exception(complete_resp) - except Exception as e: - # Clean up failed upload - best effort cleanup - try: - _client.delete( - obj.instance_url() + "/multipart-upload", - {}, + # Wait before retry (exponential backoff) + wait_time = 2**attempt + if progress_tracker: + progress_tracker.notify_error() + print( + f"{worker_id} part {part_number} failed \ + (attempt {attempt + 1}/{max_retries}): {str(e)}, retrying in {wait_time}s..." ) - except Exception: - pass # Best effort cleanup + time.sleep(wait_time) - obj.delete(force=True) - raise FileUploadError("Multipart upload failed: {}".format(str(e))) + @classmethod + def _complete_multipart_upload(cls, obj, parts, _client, local_path): + """Complete the multipart upload""" + print("Completing multipart upload...") + complete_data = { + "upload_id": obj.upload_id, + "physical_object_id": obj.upload_key, + "parts": parts, + } + complete_resp = _client.post("/v2/complete_multi_part", complete_data) + + if "message" in complete_resp: + print( + f"Successfully uploaded {local_path} to {obj.path} with multipart upload using {len(parts)} parts." + ) + return obj + else: + raise Exception(complete_resp) + + @classmethod + def _cleanup_failed_upload(cls, obj, _client): + """Clean up failed upload - best effort cleanup""" + try: + _client.delete( + obj.instance_url() + "/multipart-upload", + {}, + ) + except Exception: + pass # Best effort cleanup + obj.delete(force=True) def _object_list_helper(self, **params): """Helper method to get objects within""" @@ -832,7 +1151,7 @@ def tag(self, tags, remove=False, dry_run=False, apply_save=True): def is_iterable_non_string(arg): """python2/python3 compatible way to check if arg is an iterable but not string""" - return isinstance(arg, Iterable) and not isinstance(arg, six.string_types) + return isinstance(arg, Iterable) and not isinstance(arg, (str, bytes)) if not is_iterable_non_string(tags): tags = [str(tags)] diff --git a/solvebio/resource/solveobject.py b/solvebio/resource/solveobject.py index ac82ebe5..fb0a81fe 100644 --- a/solvebio/resource/solveobject.py +++ b/solvebio/resource/solveobject.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import -import six import sys @@ -18,7 +16,7 @@ def convert_to_solve_object(resp, **kwargs): elif isinstance(resp, dict) and not isinstance(resp, SolveObject): resp = resp.copy() klass_name = resp.get('class_name') - if isinstance(klass_name, six.string_types): + if isinstance(klass_name, str): klass = types.get(klass_name, SolveObject) else: klass = SolveObject @@ -75,7 +73,7 @@ def refresh_from(self, values): self.clear() self._unsaved_values = set() - for k, v in six.iteritems(values): + for k, v in values.items(): super(SolveObject, self).__setitem__( k, convert_to_solve_object(v, client=self._client)) @@ -84,7 +82,7 @@ def request(self, method, url, **kwargs): return convert_to_solve_object(response, client=self._client) def __repr__(self): - if isinstance(self.get('class_name'), six.string_types): + if isinstance(self.get('class_name'), str): ident_parts = [self.get('class_name')] else: ident_parts = [type(self).__name__] diff --git a/solvebio/resource/util.py b/solvebio/resource/util.py index 45d77321..d9836f2c 100644 --- a/solvebio/resource/util.py +++ b/solvebio/resource/util.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import re try: diff --git a/solvebio/test/client_mocks.py b/solvebio/test/client_mocks.py index c122bfca..4b19c900 100644 --- a/solvebio/test/client_mocks.py +++ b/solvebio/test/client_mocks.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import from solvebio.resource.solveobject import convert_to_solve_object diff --git a/solvebio/test/helper.py b/solvebio/test/helper.py index a42e76ee..fc23d484 100644 --- a/solvebio/test/helper.py +++ b/solvebio/test/helper.py @@ -1,14 +1,7 @@ -from __future__ import absolute_import -import six - import os import re import sys - -if (sys.version_info >= (2, 7, 0)): - import unittest # NOQA -else: - import unittest2 as unittest # NOQA +import unittest import solvebio @@ -41,7 +34,7 @@ def assertRaisesRegexp(self, exception, regexp, callable, *args, **kwargs): if regexp is None: return True - if isinstance(regexp, six.string_types): + if isinstance(regexp, str): regexp = re.compile(regexp) if not regexp.search(str(err)): raise self.failureException('\'%s\' does not match \'%s\'' % diff --git a/solvebio/test/test_annotate.py b/solvebio/test/test_annotate.py index ec940304..a25bfe9c 100644 --- a/solvebio/test/test_annotate.py +++ b/solvebio/test/test_annotate.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import from .helper import SolveBioTestCase diff --git a/solvebio/test/test_apiresource.py b/solvebio/test/test_apiresource.py index 17e5db5d..aacf165f 100644 --- a/solvebio/test/test_apiresource.py +++ b/solvebio/test/test_apiresource.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import uuid from .helper import SolveBioTestCase diff --git a/solvebio/test/test_beacon.py b/solvebio/test/test_beacon.py index 80fa0dff..0ca9fde5 100644 --- a/solvebio/test/test_beacon.py +++ b/solvebio/test/test_beacon.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from .helper import SolveBioTestCase diff --git a/solvebio/test/test_client.py b/solvebio/test/test_client.py index a4490832..f9801a1b 100644 --- a/solvebio/test/test_client.py +++ b/solvebio/test/test_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import from .helper import SolveBioTestCase diff --git a/solvebio/test/test_conversion.py b/solvebio/test/test_conversion.py index ec55d746..f52db7e8 100644 --- a/solvebio/test/test_conversion.py +++ b/solvebio/test/test_conversion.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import unittest from solvebio.resource.util import class_to_api_name diff --git a/solvebio/test/test_credentials.py b/solvebio/test/test_credentials.py index ae2fdab6..cfdde1ee 100644 --- a/solvebio/test/test_credentials.py +++ b/solvebio/test/test_credentials.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import os import shutil diff --git a/solvebio/test/test_dataset.py b/solvebio/test/test_dataset.py index 837560c6..a9649dc8 100644 --- a/solvebio/test/test_dataset.py +++ b/solvebio/test/test_dataset.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from .helper import SolveBioTestCase diff --git a/solvebio/test/test_dataset_migrations.py b/solvebio/test/test_dataset_migrations.py index 397bf56a..f916f89c 100644 --- a/solvebio/test/test_dataset_migrations.py +++ b/solvebio/test/test_dataset_migrations.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import mock diff --git a/solvebio/test/test_errors.py b/solvebio/test/test_errors.py index 710b46f8..14e34016 100644 --- a/solvebio/test/test_errors.py +++ b/solvebio/test/test_errors.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from .helper import SolveBioTestCase from solvebio.errors import SolveError from solvebio.resource import DatasetImport diff --git a/solvebio/test/test_exports.py b/solvebio/test/test_exports.py index 9b5d48fd..ac3a2b24 100644 --- a/solvebio/test/test_exports.py +++ b/solvebio/test/test_exports.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import mock from solvebio.test.client_mocks import fake_export_create diff --git a/solvebio/test/test_filter.py b/solvebio/test/test_filter.py index 1d6d60df..1ae21631 100644 --- a/solvebio/test/test_filter.py +++ b/solvebio/test/test_filter.py @@ -1,6 +1,3 @@ -from __future__ import absolute_import -from __future__ import print_function - import unittest import solvebio diff --git a/solvebio/test/test_login.py b/solvebio/test/test_login.py index 40271127..9a46e9a4 100644 --- a/solvebio/test/test_login.py +++ b/solvebio/test/test_login.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import contextlib import sys diff --git a/solvebio/test/test_lookup.py b/solvebio/test/test_lookup.py index a7e43941..acf0a986 100644 --- a/solvebio/test/test_lookup.py +++ b/solvebio/test/test_lookup.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from .helper import SolveBioTestCase diff --git a/solvebio/test/test_object.py b/solvebio/test/test_object.py index 0a931e87..527e5c83 100644 --- a/solvebio/test/test_object.py +++ b/solvebio/test/test_object.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import unittest import uuid import os diff --git a/solvebio/test/test_query.py b/solvebio/test/test_query.py index 733d8c3c..abd13ac3 100644 --- a/solvebio/test/test_query.py +++ b/solvebio/test/test_query.py @@ -1,13 +1,9 @@ -from __future__ import absolute_import - import unittest from solvebio.query import Filter from solvebio import SolveError from .helper import SolveBioTestCase -from six.moves import map -from six.moves import range class BaseQueryTest(SolveBioTestCase): diff --git a/solvebio/test/test_query_batch.py b/solvebio/test/test_query_batch.py index fee86727..8d664f20 100644 --- a/solvebio/test/test_query_batch.py +++ b/solvebio/test/test_query_batch.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from .helper import SolveBioTestCase diff --git a/solvebio/test/test_ratelimit.py b/solvebio/test/test_ratelimit.py index 9c09b2b5..d5e21895 100644 --- a/solvebio/test/test_ratelimit.py +++ b/solvebio/test/test_ratelimit.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import time from mock import patch diff --git a/solvebio/test/test_shortcuts.py b/solvebio/test/test_shortcuts.py index 94c78ec0..21109df8 100644 --- a/solvebio/test/test_shortcuts.py +++ b/solvebio/test/test_shortcuts.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import sys import os import json diff --git a/solvebio/test/test_tabulate.py b/solvebio/test/test_tabulate.py index d14ef865..e4b2caec 100644 --- a/solvebio/test/test_tabulate.py +++ b/solvebio/test/test_tabulate.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import unittest from solvebio.utils import tabulate as t diff --git a/solvebio/test/test_utils.py b/solvebio/test/test_utils.py index eff032b8..19afcd21 100644 --- a/solvebio/test/test_utils.py +++ b/solvebio/test/test_utils.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import import os from .helper import SolveBioTestCase diff --git a/solvebio/test/test_vault.py b/solvebio/test/test_vault.py index d36fb1d4..12ab16d3 100644 --- a/solvebio/test/test_vault.py +++ b/solvebio/test/test_vault.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import unittest from .helper import SolveBioTestCase diff --git a/solvebio/utils/printing.py b/solvebio/utils/printing.py index cc49316a..e408fad6 100644 --- a/solvebio/utils/printing.py +++ b/solvebio/utils/printing.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -from __future__ import absolute_import import os import sys @@ -63,10 +62,7 @@ def set_from_env(name, default_value): def pretty_int(num): - if sys.version_info[0] == 2 or (sys.version_info[0] == 3 and sys.version_info[1] < 7): - return locale.format("%d", int(num), grouping=True) - else: - return locale.format_string("%d", int(num), grouping=True) + return locale.format_string("%d", int(num), grouping=True) # Basic color support diff --git a/solvebio/utils/tabulate.py b/solvebio/utils/tabulate.py index e066f258..c549d29b 100644 --- a/solvebio/utils/tabulate.py +++ b/solvebio/utils/tabulate.py @@ -22,34 +22,17 @@ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -from __future__ import print_function -from __future__ import absolute_import - -from six.moves import map -from six.moves import range -from six.moves import zip -from six import string_types - from collections import namedtuple -from platform import python_version_tuple import re from .printing import TTY_COLS +from itertools import zip_longest +from functools import reduce -if python_version_tuple()[0] < "3": - from itertools import izip_longest - _none_type = type(None) - _int_type = int - _float_type = float - _text_type = str - _binary_type = str -else: - from itertools import zip_longest as izip_longest - from functools import reduce - _none_type = type(None) - _int_type = int - _float_type = float - _text_type = str - _binary_type = bytes +_none_type = type(None) +_int_type = int +_float_type = float +_text_type = str +_binary_type = bytes __all__ = ["tabulate"] @@ -166,7 +149,7 @@ def _isint(string): """ return type(string) is int or \ (isinstance(string, _binary_type) or - isinstance(string, string_types)) and \ + isinstance(string, str)) and \ _isconvertible(int, string) @@ -423,7 +406,7 @@ def _normalize_tabular_data(tabular_data, headers, sort=True): # likely a conventional dict keys = list(tabular_data.keys()) # columns have to be transposed - rows = list(izip_longest(*list(tabular_data.values()))) + rows = list(zip_longest(*list(tabular_data.values()))) elif hasattr(tabular_data, "index"): # values is a property, has .index then # it's likely a pandas.DataFrame (pandas 0.11.0) diff --git a/solvebio/version.py b/solvebio/version.py index 0c123836..40f308e4 100644 --- a/solvebio/version.py +++ b/solvebio/version.py @@ -3,4 +3,4 @@ # This file should define a variable VERSION which we use as the # debugger version number. -VERSION = '2.32.0' +VERSION = '2.32.1rc0'