diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a808d8b..163d425 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -15,6 +15,10 @@ jobs: run: | pip install flake8-pyproject flake8 + - name: Check formatting with black + run: | + pip install -r requirements.d/codestyle.txt + black --check . test: needs: lint runs-on: ${{ matrix.os }} @@ -58,9 +62,9 @@ jobs: pip install codecov pip install -e . - name: Run tox - run: | + run: | tox --skip-missing-interpreters - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 with: - token: ${{ secrets.CODECOV_TOKEN }} + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..1ac3ad1 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,19 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + +- repo: https://github.com/psf/black + rev: 24.8.0 + hooks: + - id: black + +- repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + additional_dependencies: [flake8-pyproject] diff --git a/LICENSE b/LICENSE index bcf0a94..758429c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (C) 2016-2022 The Borg Collective (see AUTHORS file) +Copyright (C) 2016-2025 The Borg Collective (see AUTHORS file) All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/docs/conf.py b/docs/conf.py index c57d5d8..41f42d6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,9 +20,9 @@ import os import sys -sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath(".")) -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' +on_rtd = os.environ.get("READTHEDOCS", None) == "True" # -- General configuration ------------------------------------------------ @@ -33,40 +33,37 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [ - 'sphinx.ext.autodoc', - 'usage', -] +extensions = ["sphinx.ext.autodoc", "usage"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = 'Borg - Import' -copyright = '2016-2017 The Borg Collective (see AUTHORS file)' -author = 'The Borg Collective' +project = "Borg - Import" +copyright = "2016-2025 The Borg Collective (see AUTHORS file)" +author = "The Borg Collective" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '0.0' +version = "0.0" # The full version, including alpha/beta/rc tags. -release = '0.0.0' +release = "0.0.0" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -83,12 +80,12 @@ # Else, today_fmt is used as the format for a strftime call. # # today_fmt = '%B %d, %Y' -today_fmt = '%Y-%m-%d' +today_fmt = "%Y-%m-%d" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -110,7 +107,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -129,16 +126,17 @@ # if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' + + html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - html_style = 'css/borg.css' + html_style = "css/borg.css" else: html_context = { - 'css_files': [ - 'https://media.readthedocs.org/css/sphinx_rtd_theme.css', - 'https://media.readthedocs.org/css/readthedocs-doc-embed.css', - '_static/css/borg.css', - ], + "css_files": [ + "https://media.readthedocs.org/css/sphinx_rtd_theme.css", + "https://media.readthedocs.org/css/readthedocs-doc-embed.css", + "_static/css/borg.css", + ] } # Theme options are theme-specific and customize the look and feel of a theme @@ -163,20 +161,20 @@ # of the sidebar. # # html_logo = None -html_logo = '_static/logo.png' +html_logo = "_static/logo.png" # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # # html_favicon = None -html_favicon = '_static/favicon.ico' +html_favicon = "_static/favicon.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] -html_static_path = ['borg_theme'] +# html_static_path = ['_static'] +html_static_path = ["borg_theme"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -189,7 +187,7 @@ # The empty string is equivalent to '%b %d, %Y'. # # html_last_updated_fmt = None -html_last_updated_fmt = '%Y-%m-%d' +html_last_updated_fmt = "%Y-%m-%d" # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. @@ -201,8 +199,8 @@ # html_sidebars = {} # Custom sidebar templates, maps document names to template names. html_sidebars = { - 'index': ['sidebarlogo.html', 'sidebarusefullinks.html', 'searchbox.html'], - '**': ['sidebarlogo.html', 'relations.html', 'searchbox.html', 'localtoc.html', 'sidebarusefullinks.html'] + "index": ["sidebarlogo.html", "sidebarusefullinks.html", "searchbox.html"], + "**": ["sidebarlogo.html", "relations.html", "searchbox.html", "localtoc.html", "sidebarusefullinks.html"], } # Additional templates that should be rendered to pages, maps page names to @@ -262,35 +260,29 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'borg-importdoc' +htmlhelp_basename = "borg-importdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'borg-import.tex', 'borg-import Documentation', - 'The Borg Collective', 'manual'), -] +latex_documents = [(master_doc, "borg-import.tex", "borg-import Documentation", "The Borg Collective", "manual")] # The name of an image file (relative to this directory) to place at the top of # the title page. @@ -329,10 +321,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'borg-import', 'borg-import Documentation', - [author], 1) -] +man_pages = [(master_doc, "borg-import", "borg-import Documentation", [author], 1)] # If true, show URL addresses after external links. # @@ -345,9 +334,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'borg-import', 'borg-import Documentation', - author, 'borg-import', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "borg-import", + "borg-import Documentation", + author, + "borg-import", + "One line description of project.", + "Miscellaneous", + ) ] # Documents to append as an appendix to all manuals. diff --git a/docs/installation.rst b/docs/installation.rst index 307e6f9..57630e7 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -14,3 +14,19 @@ Open a terminal in the borg-import directory and execute the following to instal If you have */home/user/.local/bin/* in your ``PATH`` variable, you can then start using Borg-Import. Otherwise, you will need to add *.local/bin/* to your ``PATH``. + +For Developers +-------------- + +If you're planning to contribute to Borg-Import, you should set up the development environment: + +1. Install development dependencies: + + :code:`pip install -r requirements.d/development.txt` + +2. Set up pre-commit hooks: + + :code:`pre-commit install` + +This will automatically run code formatting (black) and linting (flake8) checks before each commit. +The pre-commit hooks will ensure your code follows the project's style guidelines. diff --git a/docs/usage.py b/docs/usage.py index 75774f4..c488090 100644 --- a/docs/usage.py +++ b/docs/usage.py @@ -16,21 +16,21 @@ class GenerateUsageDirective(Directive): @staticmethod def _get_command_parser(parser, command): for action in parser._actions: - if action.choices is not None and 'SubParsersAction' in str(action.__class__): + if action.choices is not None and "SubParsersAction" in str(action.__class__): return action.choices[command] - raise ValueError('No parser for %s found' % command) + raise ValueError("No parser for %s found" % command) def write_options_group(self, group, contents, with_title=True): def is_positional_group(group): return any(not o.option_strings for o in group._group_actions) def get_help(option): - text = dedent((option.help or '') % option.__dict__) - return '\n'.join('| ' + line for line in text.splitlines()) + text = dedent((option.help or "") % option.__dict__) + return "\n".join("| " + line for line in text.splitlines()) def shipout(text): for line in text: - contents.append(indent(line, ' ' * 4)) + contents.append(indent(line, " " * 4)) if not group._group_actions: return @@ -42,20 +42,20 @@ def shipout(text): if is_positional_group(group): for option in group._group_actions: text.append(option.metavar) - text.append(indent(option.help or '', ' ' * 4)) + text.append(indent(option.help or "", " " * 4)) shipout(text) return options = [] for option in group._group_actions: if option.metavar: - option_fmt = '``%%s %s``' % option.metavar + option_fmt = "``%%s %s``" % option.metavar else: - option_fmt = '``%s``' - option_str = ', '.join(option_fmt % s for s in option.option_strings) + option_fmt = "``%s``" + option_str = ", ".join(option_fmt % s for s in option.option_strings) options.append((option_str, option)) for option_str, option in options: - help = indent(get_help(option), ' ' * 4) + help = indent(get_help(option), " " * 4) text.append(option_str) text.append(help) @@ -66,18 +66,18 @@ def run(self): command = self.arguments[0] parser = self._get_command_parser(build_parser(), command) - full_command = 'borg-import ' + command - headline = '::\n\n ' + full_command + full_command = "borg-import " + command + headline = "::\n\n " + full_command if any(len(o.option_strings) for o in parser._actions): - headline += ' ' + headline += " " # Add the metavars of the parameters to the synopsis line for option in parser._actions: if not option.option_strings: - headline += ' ' + option.metavar + headline += " " + option.metavar - headline += '\n\n' + headline += "\n\n" # Final result will look like: # borg-import something FOO_BAR REPOSITORY @@ -87,23 +87,23 @@ def run(self): self.write_options_group(group, contents) if parser.epilog: - contents.append('Description') - contents.append('~~~~~~~~~~~') - contents.append('') + contents.append("Description") + contents.append("~~~~~~~~~~~") + contents.append("") node = nodes.paragraph() nested_parse_with_titles(self.state, StringList(contents), node) gen_nodes = [node] if parser.epilog: - paragraphs = parser.epilog.split('\n\n') + paragraphs = parser.epilog.split("\n\n") for paragraph in paragraphs: node = nodes.paragraph() - nested_parse_with_titles(self.state, StringList(paragraph.split('\n')), node) + nested_parse_with_titles(self.state, StringList(paragraph.split("\n")), node) gen_nodes.append(node) return gen_nodes def setup(app: sphinx.application.Sphinx): - app.add_directive('generate-usage', GenerateUsageDirective) + app.add_directive("generate-usage", GenerateUsageDirective) diff --git a/pyproject.toml b/pyproject.toml index fc5ff90..855baa2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,12 +70,12 @@ exclude_lines = [ ignore_errors = true [tool.tox] -env_list = ["py39", "py310", "py311", "py312", "py313", "flake8"] +env_list = ["py39", "py310", "py311", "py312", "py313", "flake8", "black"] [tool.tox.env_run_base] package = "editable-legacy" commands = [["pytest", "-v", "-rs", "--cov=borg_import", "--cov-config=pyproject.toml", "--pyargs", "{posargs:borg_import}"]] -deps = ["-rrequirements.d/development.txt"] +deps = ["-rrequirements.d/development.txt", "-rrequirements.d/codestyle.txt"] passenv = ["*"] [tool.tox.env_pkg_base] @@ -85,3 +85,10 @@ passenv = ["*"] [tool.tox.env.flake8] commands = [["flake8"]] + +[tool.tox.env.black] +commands = [["black", "--check", "."]] + +[tool.black] +line-length = 120 +skip-magic-trailing-comma = true diff --git a/requirements.d/codestyle.txt b/requirements.d/codestyle.txt new file mode 100644 index 0000000..4a92e2c --- /dev/null +++ b/requirements.d/codestyle.txt @@ -0,0 +1 @@ +black >=24.0, <25 diff --git a/requirements.d/development.txt b/requirements.d/development.txt index 3b5c583..a9b54e3 100644 --- a/requirements.d/development.txt +++ b/requirements.d/development.txt @@ -3,3 +3,4 @@ tox pytest pytest-cov flake8-pyproject +pre-commit diff --git a/src/borg_import/borg.py b/src/borg_import/borg.py index 407ecb6..afe23a7 100644 --- a/src/borg_import/borg.py +++ b/src/borg_import/borg.py @@ -6,20 +6,16 @@ def get_borg_archives(repository): """Get all archive metadata discovered in the Borg repository.""" # Get list of archives with their timestamps - borg_cmdline = ['borg', 'list', '--format', '{name}{TAB}{time}{NL}', repository] + borg_cmdline = ["borg", "list", "--format", "{name}{TAB}{time}{NL}", repository] output = subprocess.check_output(borg_cmdline).decode() for line in output.splitlines(): if not line.strip(): continue - parts = line.split('\t', 1) + parts = line.split("\t", 1) if len(parts) == 2: name, timestamp_str = parts timestamp = datetime_from_string(timestamp_str) - meta = dict( - name=name, - timestamp=timestamp, - original_repository=repository, - ) + meta = dict(name=name, timestamp=timestamp, original_repository=repository) yield meta diff --git a/src/borg_import/helpers/discover.py b/src/borg_import/helpers/discover.py index beffa63..c7db21b 100644 --- a/src/borg_import/helpers/discover.py +++ b/src/borg_import/helpers/discover.py @@ -6,6 +6,7 @@ def discover(root, depth): """ recurse starting from path and yield relative dir paths with wanted . """ + def _discover(root, current_dir, current_depth, wanted_depth): entries = sorted(os.listdir(current_dir)) for entry in entries: diff --git a/src/borg_import/helpers/names.py b/src/borg_import/helpers/names.py index 725a7da..a6ec379 100644 --- a/src/borg_import/helpers/names.py +++ b/src/borg_import/helpers/names.py @@ -1,7 +1,7 @@ from datetime import datetime -def make_name(*args, dt_format='%Y-%m-%dT%H:%M:%S'): +def make_name(*args, dt_format="%Y-%m-%dT%H:%M:%S"): """ assemble borg archive names from components. @@ -23,17 +23,17 @@ def make_name(*args, dt_format='%Y-%m-%dT%H:%M:%S'): components = [] for arg in args: if isinstance(arg, bytes): - s = arg.decode('utf-8', errors='surrogateescape') + s = arg.decode("utf-8", errors="surrogateescape") elif isinstance(arg, datetime): s = arg.strftime(dt_format) else: s = str(arg) # we don't want to have blanks for practical shell-usage reasons: - s = s.replace(' ', '_') + s = s.replace(" ", "_") # the slash is not allowed in archive names # (archive name = FUSE directory name) - s = s.replace('/', '!') + s = s.replace("/", "!") # :: is repo::archive separator, not allowed in archive names - s = s.replace('::', ':') + s = s.replace("::", ":") components.append(s) - return '-'.join(components) + return "-".join(components) diff --git a/src/borg_import/helpers/testsuite/test_discover.py b/src/borg_import/helpers/testsuite/test_discover.py index 22d7576..351e5be 100644 --- a/src/borg_import/helpers/testsuite/test_discover.py +++ b/src/borg_import/helpers/testsuite/test_discover.py @@ -4,17 +4,17 @@ def test_discover(tmpdir): - root_dir = tmpdir.mkdir('root') - host1_dir = root_dir.mkdir('hostname1') - host1_dir.mkdir('0') - host1_dir.mkdir('1') - host2_dir = root_dir.mkdir('hostname2') - host2_dir.mkdir('3') + root_dir = tmpdir.mkdir("root") + host1_dir = root_dir.mkdir("hostname1") + host1_dir.mkdir("0") + host1_dir.mkdir("1") + host2_dir = root_dir.mkdir("hostname2") + host2_dir.mkdir("3") dirs = list(discover(str(root_dir), 2)) - assert dirs == ['hostname1/0', 'hostname1/1', 'hostname2/3'] + assert dirs == ["hostname1/0", "hostname1/1", "hostname2/3"] def test_parser(): - rx = re.compile(r'(?P.+)/(?P.+)') - assert parser('host1/gen0', rx) == dict(hostname='host1', generation='gen0') - assert parser('foo', rx) is None + rx = re.compile(r"(?P.+)/(?P.+)") + assert parser("host1/gen0", rx) == dict(hostname="host1", generation="gen0") + assert parser("foo", rx) is None diff --git a/src/borg_import/helpers/testsuite/test_names.py b/src/borg_import/helpers/testsuite/test_names.py index 75a352e..efbe500 100644 --- a/src/borg_import/helpers/testsuite/test_names.py +++ b/src/borg_import/helpers/testsuite/test_names.py @@ -5,27 +5,27 @@ def test_make_name(): # str (some with invalid/unwanted chars) - assert make_name('backup name') == 'backup_name' - assert make_name('backup/name') == 'backup!name' - assert make_name('backup::name') == 'backup:name' + assert make_name("backup name") == "backup_name" + assert make_name("backup/name") == "backup!name" + assert make_name("backup::name") == "backup:name" # int - assert make_name(1, 2, 3) == '1-2-3' + assert make_name(1, 2, 3) == "1-2-3" # datetime ts = datetime(1999, 12, 31, 23, 59, 59) - assert make_name(ts) == '1999-12-31T23:59:59' + assert make_name(ts) == "1999-12-31T23:59:59" # edge case - assert make_name() == '' + assert make_name() == "" # bytes and safe decoding - assert make_name(b'bytestring') == 'bytestring' - s = 'äöü' - b_utf8 = s.encode('utf-8') - b_iso = s.encode('iso-8859-1') + assert make_name(b"bytestring") == "bytestring" + s = "äöü" + b_utf8 = s.encode("utf-8") + b_iso = s.encode("iso-8859-1") assert make_name(b_utf8) == s assert make_name(b_iso) # shall not raise, surrogateescaped # mixed - assert make_name(s, b_utf8, 1) == 'äöü-äöü-1' + assert make_name(s, b_utf8, 1) == "äöü-äöü-1" diff --git a/src/borg_import/helpers/testsuite/test_timestamps.py b/src/borg_import/helpers/testsuite/test_timestamps.py index 1a747af..78959fb 100644 --- a/src/borg_import/helpers/testsuite/test_timestamps.py +++ b/src/borg_import/helpers/testsuite/test_timestamps.py @@ -18,7 +18,7 @@ def test_datetime_from_mtime(tmpdir): def test_datetime_from_string(): - dfs = datetime_from_string('1999-12-31T23:59:59') + dfs = datetime_from_string("1999-12-31T23:59:59") dt_trg = datetime(1999, 12, 31, 23, 59, 59).astimezone(tz=timezone.utc) assert dfs == dt_trg # Of course, two datetimes can be equal in different timezones. Make @@ -29,16 +29,16 @@ def test_datetime_from_string(): # strptime discards timezone info, and creates a naive time. # UTC is handled specially inside datetime_from_string to accommodate # strptime's quirks; local conversions using this format may or may not work. - dfs = datetime_from_string('Mon Oct 31 23:35:50 UTC 2016') + dfs = datetime_from_string("Mon Oct 31 23:35:50 UTC 2016") dt_trg = datetime(2016, 10, 31, 23, 35, 50, tzinfo=timezone.utc) assert dfs == dt_trg assert dfs.tzinfo == dt_trg.tzinfo == timezone.utc # rsync-time-backup format. - dfs = datetime_from_string('2022-12-21-063019') + dfs = datetime_from_string("2022-12-21-063019") dt_trg = datetime(2022, 12, 21, 6, 30, 19).astimezone(tz=timezone.utc) assert dfs == dt_trg assert dfs.tzinfo == dt_trg.tzinfo == timezone.utc with pytest.raises(ValueError): - datetime_from_string('total crap') + datetime_from_string("total crap") diff --git a/src/borg_import/helpers/timestamps.py b/src/borg_import/helpers/timestamps.py index 058721f..7ecc82f 100644 --- a/src/borg_import/helpers/timestamps.py +++ b/src/borg_import/helpers/timestamps.py @@ -25,21 +25,21 @@ def datetime_from_string(s): """ s = s.strip() for ts_format in [ - # ISO-8601-like: - '%Y-%m-%dT%H:%M:%S', - '%Y-%m-%dT%H:%M', - '%Y-%m-%d %H:%M:%S', - '%Y-%m-%d %H:%M', - # date tool output [C / en_US locale]: - '%a %b %d %H:%M:%S %Z %Y', - # borg format with day of week - '%a, %Y-%m-%d %H:%M:%S', - # rsync-time-backup format - '%Y-%m-%d-%H%M%S' - # for more, see https://xkcd.com/1179/ - ]: + # ISO-8601-like: + "%Y-%m-%dT%H:%M:%S", + "%Y-%m-%dT%H:%M", + "%Y-%m-%d %H:%M:%S", + "%Y-%m-%d %H:%M", + # date tool output [C / en_US locale]: + "%a %b %d %H:%M:%S %Z %Y", + # borg format with day of week + "%a, %Y-%m-%d %H:%M:%S", + # rsync-time-backup format + "%Y-%m-%d-%H%M%S", + # for more, see https://xkcd.com/1179/ + ]: try: - if ts_format in ('%a %b %d %H:%M:%S %Z %Y',) and 'UTC' in s: + if ts_format in ("%a %b %d %H:%M:%S %Z %Y",) and "UTC" in s: # %Z returns a naive datetime, despite a timezone being specified. # However, strptime %Z only tends to work on local times and # UTC. @@ -62,7 +62,7 @@ def datetime_from_string(s): # didn't work with this format, try next pass else: - raise ValueError('could not parse %r' % s) + raise ValueError("could not parse %r" % s) def datetime_from_file(path): diff --git a/src/borg_import/main.py b/src/borg_import/main.py index 32933ba..87bcd95 100755 --- a/src/borg_import/main.py +++ b/src/borg_import/main.py @@ -17,35 +17,35 @@ def borg_import(args, archive_name, path, timestamp=None): - borg_cmdline = ['borg', 'create', '--numeric-ids', '--files-cache=mtime,size'] + borg_cmdline = ["borg", "create", "--numeric-ids", "--files-cache=mtime,size"] if timestamp: - borg_cmdline += '--timestamp', timestamp.isoformat() + borg_cmdline += "--timestamp", timestamp.isoformat() if args.create_options: borg_cmdline += args.create_options.split() borg_cmdline.append(args.repository + "::" + archive_name) - borg_cmdline.append('.') + borg_cmdline.append(".") - log.debug('Borg command line: %r', borg_cmdline) - log.debug('Borg working directory: %s', path) + log.debug("Borg command line: %r", borg_cmdline) + log.debug("Borg working directory: %s", path) try: subprocess.check_call(borg_cmdline, cwd=str(path)) except subprocess.CalledProcessError as cpe: if cpe.returncode != 1: raise - log.debug('Borg exited with a warning (being quiet about it since Borg spoke already)') + log.debug("Borg exited with a warning (being quiet about it since Borg spoke already)") def list_borg_archives(args): - borg_cmdline = ['borg', 'list', '--short'] + borg_cmdline = ["borg", "list", "--short"] borg_cmdline.append(args.repository) return subprocess.check_output(borg_cmdline).decode().splitlines() class Importer: - name = 'name-of-command' - description = 'descriptive description describing this importer' - epilog = 'epilog-y epilog epiloging about this importer (docstringy for multiple lines)' + name = "name-of-command" + description = "descriptive description describing this importer" + epilog = "epilog-y epilog epiloging about this importer (docstringy for multiple lines)" def populate_parser(self, parser): """ @@ -63,8 +63,8 @@ def import_something(self, args): class rsnapshotImporter(Importer): - name = 'rsnapshot' - description = 'import rsnapshot backups' + name = "rsnapshot" + description = "import rsnapshot backups" epilog = """ Imports from rsnapshot backup sets by renaming each snapshot to a common name independent of the snapshot (and the backup set), @@ -80,65 +80,73 @@ class rsnapshotImporter(Importer): """ def populate_parser(self, parser): - parser.add_argument('--backup-set', help='Only consider given backup set (can be given multiple times).', - action='append', dest='backup_sets') - parser.add_argument('rsnapshot_root', metavar='RSNAPSHOT_ROOT', - help='Path to rsnapshot root directory', type=Path) + parser.add_argument( + "--backup-set", + help="Only consider given backup set (can be given multiple times).", + action="append", + dest="backup_sets", + ) + parser.add_argument( + "rsnapshot_root", metavar="RSNAPSHOT_ROOT", help="Path to rsnapshot root directory", type=Path + ) # TODO: support the full wealth of borg possibilities - parser.add_argument('repository', metavar='BORG_REPOSITORY', - help='Borg repository (must be an absolute local path or a remote repo specification)') + parser.add_argument( + "repository", + metavar="BORG_REPOSITORY", + help="Borg repository (must be an absolute local path or a remote repo specification)", + ) parser.set_defaults(function=self.import_rsnapshot) def import_rsnapshot(self, args): existing_archives = list_borg_archives(args) - import_path = args.rsnapshot_root / 'borg-import-dir' - import_journal = args.rsnapshot_root / 'borg-import-dir.snapshot' + import_path = args.rsnapshot_root / "borg-import-dir" + import_journal = args.rsnapshot_root / "borg-import-dir.snapshot" if import_path.exists(): - print('{} exists. Cannot continue.'.format(import_path)) + print("{} exists. Cannot continue.".format(import_path)) return 1 for rsnapshot in get_snapshots(args.rsnapshot_root): - timestamp = rsnapshot['timestamp'].replace(microsecond=0) - snapshot_original_path = rsnapshot['path'] - name = rsnapshot['name'] + timestamp = rsnapshot["timestamp"].replace(microsecond=0) + snapshot_original_path = rsnapshot["path"] + name = rsnapshot["name"] archive_name = args.prefix + name - if args.backup_sets and rsnapshot['backup_set'] not in args.backup_sets: - print('Skipping (backup set is not selected):', name) + if args.backup_sets and rsnapshot["backup_set"] not in args.backup_sets: + print("Skipping (backup set is not selected):", name) continue if archive_name in existing_archives: - print('Skipping (already exists in repository):', name) + print("Skipping (already exists in repository):", name) continue - print('Importing {} (timestamp {}) '.format(name, timestamp), end='') + print("Importing {} (timestamp {}) ".format(name, timestamp), end="") if archive_name != name: - print('as', archive_name) + print("as", archive_name) else: print() - log.debug(' Moving {} -> {}'.format(rsnapshot['path'], import_path)) + log.debug(" Moving {} -> {}".format(rsnapshot["path"], import_path)) # We move the snapshots to import_path so that the files cache in Borg can work effectively. - with import_journal.open('w') as fd: - fd.write('Current snapshot: %s\n' % rsnapshot['name']) - fd.write('Original path: %s\n' % snapshot_original_path) + with import_journal.open("w") as fd: + fd.write("Current snapshot: %s\n" % rsnapshot["name"]) + fd.write("Original path: %s\n" % snapshot_original_path) snapshot_original_path.rename(import_path) try: borg_import(args, archive_name, import_path, timestamp=timestamp) finally: - log.debug(' Moving {} -> {}'.format(import_path, rsnapshot['path'])) + log.debug(" Moving {} -> {}".format(import_path, rsnapshot["path"])) import_path.rename(snapshot_original_path) import_journal.unlink() class rsynchlImporter(Importer): - name = 'rsynchl' - description = 'import rsync+hardlink backups' + name = "rsynchl" + description = "import rsync+hardlink backups" epilog = """ Imports from rsync backup sets by renaming each snapshot to a common name independent of the snapshot, which allows the Borg files cache @@ -159,59 +167,61 @@ class rsynchlImporter(Importer): """ def populate_parser(self, parser): - parser.add_argument('rsync_root', metavar='RSYNC_ROOT', - help='Path to root directory', type=Path) + parser.add_argument("rsync_root", metavar="RSYNC_ROOT", help="Path to root directory", type=Path) # TODO: support the full wealth of borg possibilities - parser.add_argument('repository', metavar='BORG_REPOSITORY', - help='Borg repository (must be an absolute local path or a remote repo specification)') + parser.add_argument( + "repository", + metavar="BORG_REPOSITORY", + help="Borg repository (must be an absolute local path or a remote repo specification)", + ) parser.set_defaults(function=self.import_rsynchl) def import_rsynchl(self, args): existing_archives = list_borg_archives(args) - import_path = args.rsync_root / 'borg-import-dir' - import_journal = args.rsync_root / 'borg-import-dir.snapshot' + import_path = args.rsync_root / "borg-import-dir" + import_journal = args.rsync_root / "borg-import-dir.snapshot" if import_path.exists(): - print('{} exists. Cannot continue.'.format(import_path)) + print("{} exists. Cannot continue.".format(import_path)) return 1 for rsnapshot in get_rsyncsnapshots(args.rsync_root): - timestamp = rsnapshot['timestamp'].replace(microsecond=0) - snapshot_original_path = rsnapshot['path'] - name = rsnapshot['name'] + timestamp = rsnapshot["timestamp"].replace(microsecond=0) + snapshot_original_path = rsnapshot["path"] + name = rsnapshot["name"] archive_name = args.prefix + name if archive_name in existing_archives: - print('Skipping (already exists in repository):', name) + print("Skipping (already exists in repository):", name) continue - print('Importing {} (timestamp {}) '.format(name, timestamp), end='') + print("Importing {} (timestamp {}) ".format(name, timestamp), end="") if archive_name != name: - print('as', archive_name) + print("as", archive_name) else: print() - log.debug(' Moving {} -> {}'.format(rsnapshot['path'], import_path)) + log.debug(" Moving {} -> {}".format(rsnapshot["path"], import_path)) # We move the snapshots to import_path so that the files cache in Borg can work effectively. - with import_journal.open('w') as fd: - fd.write('Current snapshot: %s\n' % rsnapshot['name']) - fd.write('Original path: %s\n' % snapshot_original_path) + with import_journal.open("w") as fd: + fd.write("Current snapshot: %s\n" % rsnapshot["name"]) + fd.write("Original path: %s\n" % snapshot_original_path) snapshot_original_path.rename(import_path) try: borg_import(args, archive_name, import_path, timestamp=timestamp) finally: - log.debug(' Moving {} -> {}'.format(import_path, rsnapshot['path'])) + log.debug(" Moving {} -> {}".format(import_path, rsnapshot["path"])) import_path.rename(snapshot_original_path) import_journal.unlink() class rsyncTmBackupImporter(Importer): - name = 'rsync_tmbackup' - description = 'import rsync-time-backup backups' + name = "rsync_tmbackup" + description = "import rsync-time-backup backups" epilog = """ Imports from rsync-time-backup backup sets by renaming each snapshot to a common name independent of the snapshot, which allows the Borg files cache @@ -235,21 +245,23 @@ class rsyncTmBackupImporter(Importer): """ def populate_parser(self, parser): - parser.add_argument('rsync_root', metavar='RSYNC_ROOT', - help='Path to root directory', type=Path) + parser.add_argument("rsync_root", metavar="RSYNC_ROOT", help="Path to root directory", type=Path) # TODO: support the full wealth of borg possibilities - parser.add_argument('repository', metavar='BORG_REPOSITORY', - help='Borg repository (must be an absolute local path or a remote repo specification)') + parser.add_argument( + "repository", + metavar="BORG_REPOSITORY", + help="Borg repository (must be an absolute local path or a remote repo specification)", + ) parser.set_defaults(function=self.import_rsync_tmbackup) def import_rsync_tmbackup(self, args): existing_archives = list_borg_archives(args) - import_path = args.rsync_root / 'borg-import-dir' - import_journal = args.rsync_root / 'borg-import-dir.snapshot' + import_path = args.rsync_root / "borg-import-dir" + import_journal = args.rsync_root / "borg-import-dir.snapshot" if import_path.exists(): - print('{} exists. Cannot continue.'.format(import_path)) + print("{} exists. Cannot continue.".format(import_path)) return 1 if not args.prefix: @@ -257,36 +269,36 @@ def import_rsync_tmbackup(self, args): return 1 for rsnapshot in get_tmbackup_snapshots(args.rsync_root, args.prefix): - timestamp = rsnapshot['timestamp'].replace(microsecond=0) - snapshot_original_path = rsnapshot['path'] - name = rsnapshot['name'] + timestamp = rsnapshot["timestamp"].replace(microsecond=0) + snapshot_original_path = rsnapshot["path"] + name = rsnapshot["name"] if name in existing_archives: - print('Skipping (already exists in repository):', name) + print("Skipping (already exists in repository):", name) continue - print('Importing {} (timestamp {}) '.format(name, timestamp)) - log.debug(' Moving {} -> {}'.format(rsnapshot['path'], import_path)) + print("Importing {} (timestamp {}) ".format(name, timestamp)) + log.debug(" Moving {} -> {}".format(rsnapshot["path"], import_path)) # We move the snapshots to import_path so that the files cache in Borg can work effectively. - with import_journal.open('w') as fd: - fd.write('Current snapshot: %s\n' % rsnapshot['name']) - fd.write('Original path: %s\n' % snapshot_original_path) + with import_journal.open("w") as fd: + fd.write("Current snapshot: %s\n" % rsnapshot["name"]) + fd.write("Original path: %s\n" % snapshot_original_path) snapshot_original_path.rename(import_path) try: borg_import(args, name, import_path, timestamp=timestamp) finally: - log.debug(' Moving {} -> {}'.format(import_path, rsnapshot['path'])) + log.debug(" Moving {} -> {}".format(import_path, rsnapshot["path"])) import_path.rename(snapshot_original_path) import_journal.unlink() class borgImporter(Importer): - name = 'borg' - description = 'import archives from another Borg repository' + name = "borg" + description = "import archives from another Borg repository" epilog = """ Imports archives from an existing Borg repository into a new one. @@ -310,10 +322,16 @@ class borgImporter(Importer): """ def populate_parser(self, parser): - parser.add_argument('source_repository', metavar='SOURCE_REPOSITORY', - help='Source Borg repository (must be a valid Borg repository spec)') - parser.add_argument('repository', metavar='DESTINATION_REPOSITORY', - help='Destination Borg repository (must be a valid Borg repository spec)') + parser.add_argument( + "source_repository", + metavar="SOURCE_REPOSITORY", + help="Source Borg repository (must be a valid Borg repository spec)", + ) + parser.add_argument( + "repository", + metavar="DESTINATION_REPOSITORY", + help="Destination Borg repository (must be a valid Borg repository spec)", + ) parser.set_defaults(function=self.import_borg) def import_borg(self, args): @@ -325,38 +343,38 @@ def import_borg(self, args): try: for archive in get_borg_archives(args.source_repository): - name = archive['name'] - timestamp = archive['timestamp'].replace(microsecond=0) + name = archive["name"] + timestamp = archive["timestamp"].replace(microsecond=0) archive_name = args.prefix + name if archive_name in existing_archives: - print('Skipping (already exists in repository):', name) + print("Skipping (already exists in repository):", name) continue - print('Importing {} (timestamp {}) '.format(name, timestamp), end='') + print("Importing {} (timestamp {}) ".format(name, timestamp), end="") if archive_name != name: - print('as', archive_name) + print("as", archive_name) else: print() try: # Extract the archive from the source repository - extract_cmdline = ['borg', 'extract', '--numeric-ids'] - extract_cmdline.append(args.source_repository + '::' + name) + extract_cmdline = ["borg", "extract", "--numeric-ids"] + extract_cmdline.append(args.source_repository + "::" + name) - print(' Extracting archive to import directory...') + print(" Extracting archive to import directory...") subprocess.check_call(extract_cmdline, cwd=str(import_path)) # Create a new archive in the destination repository borg_import(args, archive_name, str(import_path), timestamp=timestamp) # Empty the directory after importing the archive - print(' Cleaning import directory...') + print(" Cleaning import directory...") shutil.rmtree(import_path) import_path.mkdir(exist_ok=True) except subprocess.CalledProcessError as cpe: - print('Error during import of {}: {}'.format(name, cpe)) + print("Error during import of {}: {}".format(name, cpe)) if cpe.returncode != 1: # Borg returns 1 for warnings raise finally: @@ -367,49 +385,55 @@ def import_borg(self, args): def build_parser(): common_parser = argparse.ArgumentParser(add_help=False) - common_group = common_parser.add_argument_group('Common options') + common_group = common_parser.add_argument_group("Common options") - common_group.add_argument("--create-options", "-o", - help="Additional borg create options " - "(note: Use -o=\"--foo --bar\" syntax to avoid parser confusion).") - common_group.add_argument("--prefix", help="Add prefix to imported archive names", default='') + common_group.add_argument( + "--create-options", + "-o", + help="Additional borg create options " '(note: Use -o="--foo --bar" syntax to avoid parser confusion).', + ) + common_group.add_argument("--prefix", help="Add prefix to imported archive names", default="") - common_group.add_argument("--debug", action='store_const', dest='log_level', const=logging.DEBUG, - help='Display debug/trace messages.') + common_group.add_argument( + "--debug", action="store_const", dest="log_level", const=logging.DEBUG, help="Display debug/trace messages." + ) - parser = argparse.ArgumentParser(description='Import existing backups from other software to Borg') + parser = argparse.ArgumentParser(description="Import existing backups from other software to Borg") parser.set_defaults(log_level=logging.WARNING) subparsers = parser.add_subparsers() for importer_class in Importer.__subclasses__(): importer = importer_class() - subparser = subparsers.add_parser(importer.name, - help=importer.description, epilog=textwrap.dedent(importer.epilog), - formatter_class=argparse.RawDescriptionHelpFormatter, - parents=[common_parser]) + subparser = subparsers.add_parser( + importer.name, + help=importer.description, + epilog=textwrap.dedent(importer.epilog), + formatter_class=argparse.RawDescriptionHelpFormatter, + parents=[common_parser], + ) importer.populate_parser(subparser) return parser def main(): - if not shutil.which('borg'): - print('The \'borg\' command can\'t be found in the PATH. Please correctly install borgbackup first.') - print('See instructions at https://borgbackup.readthedocs.io/en/stable/installation.html') + if not shutil.which("borg"): + print("The 'borg' command can't be found in the PATH. Please correctly install borgbackup first.") + print("See instructions at https://borgbackup.readthedocs.io/en/stable/installation.html") return 1 parser = build_parser() args = parser.parse_args() - logging.basicConfig(level=args.log_level, format='%(message)s') + logging.basicConfig(level=args.log_level, format="%(message)s") - if 'function' not in args: + if "function" not in args: return parser.print_help() try: return args.function(args) except subprocess.CalledProcessError as cpe: - print('{} invocation failed with status {}'.format(cpe.cmd[0], cpe.returncode)) - print('Command line was:', *[shlex.quote(s) for s in cpe.cmd]) + print("{} invocation failed with status {}".format(cpe.cmd[0], cpe.returncode)) + print("Command line was:", *[shlex.quote(s) for s in cpe.cmd]) return cpe.returncode diff --git a/src/borg_import/rsnapshots.py b/src/borg_import/rsnapshots.py index 92daee4..aaf540d 100644 --- a/src/borg_import/rsnapshots.py +++ b/src/borg_import/rsnapshots.py @@ -7,14 +7,14 @@ def get_snapshots(root): """Get all snapshot metadata discovered in the rsnapshot root directory.""" - regex = re.compile(r'(?P.+)/(?P.+)') + regex = re.compile(r"(?P.+)/(?P.+)") for path in discover(str(root), 2): parsed = parser(path, regex) if parsed is not None: abs_path = root / path meta = dict( - name=make_name(parsed['backup_set'], parsed['snapshot_id']), - backup_set=parsed['backup_set'], + name=make_name(parsed["backup_set"], parsed["snapshot_id"]), + backup_set=parsed["backup_set"], path=abs_path, timestamp=datetime_from_mtime(abs_path), ) diff --git a/src/borg_import/rsync_tmbackup.py b/src/borg_import/rsync_tmbackup.py index 3c6ca54..ec96e7d 100644 --- a/src/borg_import/rsync_tmbackup.py +++ b/src/borg_import/rsync_tmbackup.py @@ -8,30 +8,28 @@ def get_tmbackup_snapshots(root, prefix): """Get all snapshot metadata discovered in the rsync root directory.""" - regex = re.compile(r'(?P.+)') + regex = re.compile(r"(?P.+)") if not Path("backup.marker").exists(): raise FileNotFoundError("backup.marker file should exist for rsync-time-backup import") for path in discover(str(root), 1): parsed = parser(path, regex) - if parsed is not None and parsed['snapshot_date'] not in ("latest",): + if parsed is not None and parsed["snapshot_date"] not in ("latest",): abs_path = root / path meta = dict( - name=make_name("".join([prefix, parsed['snapshot_date']])), + name=make_name("".join([prefix, parsed["snapshot_date"]])), path=abs_path, timestamp=datetime_from_string(path), ) yield meta - elif parsed['snapshot_date'] in ("latest",): + elif parsed["snapshot_date"] in ("latest",): # latest is a symlink to the most recent build. Import it anyway # in case user wants to do borg mount/has existing references # to latest. abs_path = root / path timestamp = Path("latest").resolve().name meta = dict( - name=make_name("".join([prefix, "latest"])), - path=abs_path, - timestamp=datetime_from_string(timestamp), + name=make_name("".join([prefix, "latest"])), path=abs_path, timestamp=datetime_from_string(timestamp) ) yield meta diff --git a/src/borg_import/rsynchl.py b/src/borg_import/rsynchl.py index 28f354d..67f8486 100644 --- a/src/borg_import/rsynchl.py +++ b/src/borg_import/rsynchl.py @@ -7,14 +7,10 @@ def get_rsyncsnapshots(root): """Get all snapshot metadata discovered in the rsync root directory.""" - regex = re.compile(r'(?P.+)') + regex = re.compile(r"(?P.+)") for path in discover(str(root), 1): parsed = parser(path, regex) if parsed is not None: abs_path = root / path - meta = dict( - name=make_name(parsed['snapshot_name']), - path=abs_path, - timestamp=datetime_from_mtime(abs_path), - ) + meta = dict(name=make_name(parsed["snapshot_name"]), path=abs_path, timestamp=datetime_from_mtime(abs_path)) yield meta diff --git a/src/borg_import/testsuite/test_borg.py b/src/borg_import/testsuite/test_borg.py index dbd6b70..d96c830 100644 --- a/src/borg_import/testsuite/test_borg.py +++ b/src/borg_import/testsuite/test_borg.py @@ -24,28 +24,15 @@ def test_borg_import(tmpdir, monkeypatch): subprocess.check_call(["borg", "init", "--encryption=none", str(source_repo)]) # Create archives in the source repository - subprocess.check_call([ - "borg", "create", - f"{source_repo}::archive1", - "." - ], cwd=str(archive1_data)) - - subprocess.check_call([ - "borg", "create", - f"{source_repo}::archive2", - "." - ], cwd=str(archive2_data)) + subprocess.check_call(["borg", "create", f"{source_repo}::archive1", "."], cwd=str(archive1_data)) + + subprocess.check_call(["borg", "create", f"{source_repo}::archive2", "."], cwd=str(archive2_data)) # Initialize the target repository subprocess.check_call(["borg", "init", "--encryption=none", str(target_repo)]) # Set up command line arguments for borg-import - monkeypatch.setattr("sys.argv", [ - "borg-import", - "borg", - str(source_repo), - str(target_repo) - ]) + monkeypatch.setattr("sys.argv", ["borg-import", "borg", str(source_repo), str(target_repo)]) # Run the borg-import command main() @@ -61,15 +48,9 @@ def test_borg_import(tmpdir, monkeypatch): extract_dir1 = tmpdir.mkdir("extract1") extract_dir2 = tmpdir.mkdir("extract2") - subprocess.check_call([ - "borg", "extract", - f"{target_repo}::archive1" - ], cwd=str(extract_dir1)) + subprocess.check_call(["borg", "extract", f"{target_repo}::archive1"], cwd=str(extract_dir1)) - subprocess.check_call([ - "borg", "extract", - f"{target_repo}::archive2" - ], cwd=str(extract_dir2)) + subprocess.check_call(["borg", "extract", f"{target_repo}::archive2"], cwd=str(extract_dir2)) # Verify the contents of the extracted archives assert extract_dir1.join("file1.txt").read() == "This is file 1 in archive 1"