From 8e7e1424e5d962938cf2ad4e7699fe95e6273714 Mon Sep 17 00:00:00 2001 From: Kian-Meng Ang Date: Sun, 24 Dec 2023 13:19:07 +0800 Subject: [PATCH] docs: fix typos Found via `codespell -L nd` --- .github/workflows/generate.yaml | 2 +- .github/workflows/quick-docs.yaml | 2 +- .github/workflows/test.yaml | 4 +-- .pylintrc | 2 +- CHANGELOG.rst | 22 +++++++-------- CONTRIBUTING.md | 4 +-- Dockerfile | 2 +- docs/conf.py | 2 +- docs/contents/10_introduction.rst | 2 +- docs/contents/30_usage/index.rst | 2 +- .../guide_quick_docs_release.rst | 2 +- docs/index.rst | 2 +- docs/spelling_wordlist.txt | 2 +- pyproject.toml | 4 +-- scripts/update-snapshot-interactive.sh | 20 ++++++------- scripts/update-snapshot.sh | 20 ++++++------- .../backend/hosting_services/check_engine.py | 2 +- .../backend/hosting_services/check_service.py | 6 ++-- .../backend/hosting_services/extract_name.py | 2 +- .../backend/load_config.py | 2 +- src/cookiecutter_python/backend/main.py | 4 +-- .../handle/dialogs/lib/interpreters_dialog.py | 4 +-- .../hooks/post_gen_project.py | 9 +++--- .../.github/workflows/test.yaml | 4 +-- .../{{ cookiecutter.project_slug }}/.pylintrc | 2 +- .../CONTRIBUTING.md | 2 +- .../docs-mkdocs/build-process_DAG.md | 6 ++-- .../docs-mkdocs/dev_guides/index.md | 6 ++-- .../docs-sphinx/contents/10_introduction.rst | 2 +- .../docs-sphinx/spelling_wordlist.txt | 2 +- .../{{ cookiecutter.project_slug }}/tox.ini | 12 ++++---- tests/biskotaki_ci/conftest.py | 4 +-- .../test_matches_biskotaki_runtime_gen.py | 2 +- tests/conftest.py | 28 +++++++++---------- .../.github/workflows/test.yaml | 4 +-- .../biskotaki-gold-standard/.pylintrc | 2 +- .../biskotaki-gold-standard/CONTRIBUTING.md | 2 +- .../docs/build-process_DAG.md | 2 +- .../snapshots/biskotaki-gold-standard/tox.ini | 12 ++++---- .../.github/workflows/test.yaml | 4 +-- .../snapshots/biskotaki-interactive/.pylintrc | 2 +- .../biskotaki-interactive/CONTRIBUTING.md | 2 +- .../docs/contents/10_introduction.rst | 2 +- .../docs/spelling_wordlist.txt | 2 +- .../snapshots/biskotaki-interactive/tox.ini | 12 ++++---- .../.github/workflows/test.yaml | 4 +-- .../snapshots/biskotaki-no-input/.pylintrc | 2 +- .../biskotaki-no-input/CONTRIBUTING.md | 2 +- .../docs/contents/10_introduction.rst | 2 +- .../docs/spelling_wordlist.txt | 2 +- .../data/snapshots/biskotaki-no-input/tox.ini | 12 ++++---- .../test_docs_settings.py | 6 ++-- tests/test_cli.py | 8 +++--- tests/test_gold_standard.py | 6 ++-- tests/test_post_gen_hook_regression.py | 2 +- tests/test_post_hook.py | 8 +++--- tox.ini | 14 +++++----- 57 files changed, 153 insertions(+), 154 deletions(-) diff --git a/.github/workflows/generate.yaml b/.github/workflows/generate.yaml index d3f8a2f2..775f7926 100644 --- a/.github/workflows/generate.yaml +++ b/.github/workflows/generate.yaml @@ -68,7 +68,7 @@ jobs: tox -e "py310{-sdist, -wheel, -dev}" tox -e coverage - ## Verify Development Scripts operate as intented out of the box + ## Verify Development Scripts operate as intended out of the box - name: 'Ruff: Fast Python Linter -> PASS' run: | cd gen/biskotaki diff --git a/.github/workflows/quick-docs.yaml b/.github/workflows/quick-docs.yaml index bdb00729..d243589a 100644 --- a/.github/workflows/quick-docs.yaml +++ b/.github/workflows/quick-docs.yaml @@ -87,7 +87,7 @@ jobs: - run: git fetch - run: git log --graph --decorate --color --all --stat - - run: echo "Expect above log to proove local and origin empemeral-doc-updates branches are deleted" + - run: echo "Expect above log to prove local and origin empemeral-doc-updates branches are deleted" ### WORKFLOW No 2 - EMBEDDED ### diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 22a0daad..eb2f7610 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -44,7 +44,7 @@ env: # Override Docker Policy-dependent decision-making and # Accept any ALL (branch/build) to Publish to Dockerhub # if true, it will push image and ignore DOCKER_JOB_POLICY - ALWAYS_BUILD_N_PUBLSIH_DOCKER: "false" + ALWAYS_BUILD_N_PUBLISH_DOCKER: "false" DOCKER_JOB_POLICY: "CDeployment" # - CDeployment : Builds and Publishes only if Tests ran and passed @@ -86,7 +86,7 @@ jobs: echo "PUBLISH_ON_PYPI=$PUBLISH_ON_PYPI" >> $GITHUB_OUTPUT ## Docker - Pipeline Settings ## - id: derive_docker_policy - run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLSIH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT + run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLISH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT ## Static Code Analysis - Pipeline Settings ## - id: derive_sqa_policy run: echo "POL=${{ (env.RUN_LINT_CHECKS != 'true' && '0') || (env.ALWAYS_LINT == 'true' && '1') || env.LINT_JOB_POLICY }}" >> $GITHUB_OUTPUT diff --git a/.pylintrc b/.pylintrc index 883eae7a..6a5edcb0 100644 --- a/.pylintrc +++ b/.pylintrc @@ -60,7 +60,7 @@ confidence= # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 56bc3363..4607ca52 100755 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -21,10 +21,10 @@ out-of-the-box produced by the `Generator` as part of the **CI/CD Pipeline** configuration YAML files, to be **Configurable by Policy**. *Design* a **High-Level** interface, for *configuring the CI/CD Behaviour*, allowing: -- seemless switching between **Policies**, on a per-Job level +- seamless switching between **Policies**, on a per-Job level - Easy **Override** to `"shutdown" Job"`, ie to *prevent upload*, by-passing `decision-making` - Easy **Override** to `"force Job"`, ie for *quick trial*, by-passing `decision-making` - governing desicion-making, on the Workflows/Jobs. + governing decision-making, on the Workflows/Jobs. A **Policy** governs how a Job behaves (ie if it should trigger), and each one yiels a *distinct* behaviour. @@ -65,7 +65,7 @@ feature """"""" - introduce `Ruff`, Fast Python Linter, in updated `Developer's Tool chain `_ - run `Ruff` against code, as part of the Tools included in the `Static Code Analysis` CI Workflow, produced by the **Generator's** Template -- run legacy `Pylint` agaist code, in dedicated Job, for easier potential retirement +- run legacy `Pylint` against code, in dedicated Job, for easier potential retirement - make Template Code pass Ruff Checks/Evaluations - add `Ruff`, `tox -e ruff`, as available `tox command`` for fast `Static Code Check` @@ -150,7 +150,7 @@ Changes ci "" - remove unused workflow -- fix workflow syntax due to merge confict resolution artifacts +- fix workflow syntax due to merge conflict resolution artifacts 1.8.1 (2023-12-15) @@ -191,7 +191,7 @@ test - verify `No Regression` of Generator, with exhaustive comparison of Runtime result to `Gold Standard` - test `Gold Standard` passes `tox -e lint` - ignore Tests inside Snapshots, during `Test Discovery` of Pytest -- verify `User Config` backwords compatibility, with regard to new `Docs` Generator Feature +- verify `User Config` backwards compatibility, with regard to new `Docs` Generator Feature - test default gen behaviour related to Docs, is same as before adding mkdocs option - yaml validation and required/expected workflow vars checks @@ -368,7 +368,7 @@ Changes test """" -- manually covert gitpython outputted string paths into Path instances +- manually convert gitpython outputted string paths into Path instances - use Path from pathlib instead of the os.path module refactor @@ -379,7 +379,7 @@ refactor ci "" -- excplicitly use bash as the shell for some job steps +- explicitly use bash as the shell for some job steps 1.6.0 (2022-06-28) @@ -407,7 +407,7 @@ Changes feature """"""" - add 'project_type' Variable allowing for 'module', 'module+cli' or 'pytest-plugin' Projects -- conditionaly populate 'test' dependencies, ie based on whether there is a cli entrypoint +- conditionally populate 'test' dependencies, ie based on whether there is a cli entrypoint 1.5.2 (2022-06-22) @@ -551,7 +551,7 @@ to support and be tested on. The generator then creates the Test Matrix in the CI config file, which factors in the Python Interpreter versions supplied by the user. -Consistent with the currect behaviour of the cli, passing the '--no-input' flag, +Consistent with the current behaviour of the cli, passing the '--no-input' flag, instructs the Generator to try find the selected interpreters in a config yaml file, if given, or else to use the information in the cookiecutter.json. @@ -565,7 +565,7 @@ Development ----------- All tox environments related to 'Linting' now all do by default a 'check'. -Doing a 'check' means returning a 0 as exit code in case the check is successfull +Doing a 'check' means returning a 0 as exit code in case the check is successful and it is suitable for local and remote running on a CI server. The aforementioned environments are 'lint', 'black', 'isort': @@ -606,7 +606,7 @@ build 1.2.1 (2022-05-27) ================== -Compeltely migrate away from *setup.cfg*. +Completely migrate away from *setup.cfg*. Add Issue Templates, as markdown files, to help create well documented Issues on github. Changes diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index cbd19acf..a4de196b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -19,7 +19,7 @@ You can contribute in many ways: Report bugs at [https://github.com/boromir674/cookiecutter-python-package/issues](https://github.com/boromir674/cookiecutter-python-package/issues). -Stmbling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following infromation by filling in [the template](https://github.com/boromir674/cookiecutter-python-package/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). +Stmbling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following information by filling in [the template](https://github.com/boromir674/cookiecutter-python-package/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. @@ -90,7 +90,7 @@ git checkout -b name-of-your-bugfix-or-feature Now you can make your changes locally. -Probably, you should start by writting test case(s) and then the production code. +Probably, you should start by writing test case(s) and then the production code. 1. When you're done making changes, check that your changes pass the tests locally. diff --git a/Dockerfile b/Dockerfile index 00d7a161..c296c42e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ FROM python:3.9.16-slim-bullseye as builder # for inspiration COPY poetry.lock pyproject.toml ./ -# Envrironment Configuration +# Environment Configuration ## See https://github.com/alejandro-angulo/poetry/blob/master/docs/configuration.md # Determine where to install poetry ENV POETRY_HOME=/opt/poetry diff --git a/docs/conf.py b/docs/conf.py index 0afdae13..7bb3a297 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -21,7 +21,7 @@ sys.path.insert(0, os.path.abspath('../src/cookiecutter_python')) -# Please use the Sphinx format for writting docstrings (other fornats include Google and Numpy which require the 'napoleon' extension). +# Please use the Sphinx format for writing docstrings (other formats include Google and Numpy which require the 'napoleon' extension). # -- Project information ----------------------------------------------------- diff --git a/docs/contents/10_introduction.rst b/docs/contents/10_introduction.rst index 5aa289e4..9602d441 100644 --- a/docs/contents/10_introduction.rst +++ b/docs/contents/10_introduction.rst @@ -6,7 +6,7 @@ Introduction | The Template is implemented as a *cookiecutter* and it is available both as source code and as a Python Package in itself. | Goal of this project is to automate the process of creating a new Python Package, by providing the user with a "bootstrap" method, -| to quickly set up all the *support* files required to seemlessly build and publish the package on pypi.org (the official Python Pcakge Index public server). +| to quickly set up all the *support* files required to seamlessly build and publish the package on pypi.org (the official Python Pcakge Index public server). | Additionally, it instruments a basic **Test Suite**, multiple **Commands**, as well as a **CI** pipeline, with parallel execution of the *build matrix*, running on *Github Actions*. | This documentation aims to help people understand what are the features of the library and how they can use diff --git a/docs/contents/30_usage/index.rst b/docs/contents/30_usage/index.rst index c2ed3307..83c11611 100644 --- a/docs/contents/30_usage/index.rst +++ b/docs/contents/30_usage/index.rst @@ -24,7 +24,7 @@ Generated Python Project Use Cases .. include:: use_cases.rst -.. Reminders on how to be consistent accross sectoining and their semantics +.. Reminders on how to be consistent across sectoining and their semantics ======== DocTitle ======== diff --git a/docs/contents/35_development/guide_quick_docs_release.rst b/docs/contents/35_development/guide_quick_docs_release.rst index fa0d78e9..1796a665 100644 --- a/docs/contents/35_development/guide_quick_docs_release.rst +++ b/docs/contents/35_development/guide_quick_docs_release.rst @@ -2,7 +2,7 @@ This should not either. Content of the 'How to do quick Docs Release Guide' As a guide it is a sequence of steps, that one must follow to achieve a goal. - As a guide, each step's expected resuilt, can be described (soft requirement, but hard on tutorials). + As a guide, each step's expected result, can be described (soft requirement, but hard on tutorials). Where applicable, the effect of each step is described, so user knows what to expect. can be used with .. include:: diff --git a/docs/index.rst b/docs/index.rst index 00c4954b..58faff6f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -35,7 +35,7 @@ Indices and tables * :ref:`modindex` * :ref:`search` -.. Reminders on how to be consistent accross sectoining and their semantics +.. Reminders on how to be consistent across sectoining and their semantics ======== DocTitle ======== diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index ae61b26a..7f65c4ee 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -32,7 +32,7 @@ deserialization discretization interpretable pre -accomodate +accommodate eg ie iterable diff --git a/pyproject.toml b/pyproject.toml index fe3b4e13..128cacb7 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -185,7 +185,7 @@ typing = [ minversion = "6.2" # hard-inject args, when invoking `pytest` CLI addopts = "--strict-markers --ignore=tests/data" -# for explict --run-integration -> pip install pytest-explicit +# for explicit --run-integration -> pip install pytest-explicit markers = [ "slow: Marks a slow test", "integration: Tests applicable to a newly Generated Project, running with tox", @@ -206,7 +206,7 @@ explicit-only = [ line-length = 95 include = '\.pyi?$' extend-exclude = ''' -# A regex preceeded with ^/ will apply only to files and directories +# A regex preceded with ^/ will apply only to files and directories # in the root of the project. # ^/foo.py # exclude a file named foo.py in the root of the project (in addition to the defaults) tests/smoke_test.py| diff --git a/scripts/update-snapshot-interactive.sh b/scripts/update-snapshot-interactive.sh index de9b40f4..8cf7a743 100644 --- a/scripts/update-snapshot-interactive.sh +++ b/scripts/update-snapshot-interactive.sh @@ -17,32 +17,32 @@ set -e echo .tox/dev/bin/generate-python --config-file .github/biskotaki.yaml -o /tmp/ -### UPDATE SHAPSHOT, by Copying all Generated files and folders recursively ### -INTERACTIVE_SHAPSHOT=${INTERACTIVE_SHAPSHOT:-tests/data/snapshots/biskotaki-interactive} +### UPDATE SNAPSHOT, by Copying all Generated files and folders recursively ### +INTERACTIVE_SNAPSHOT=${INTERACTIVE_SNAPSHOT:-tests/data/snapshots/biskotaki-interactive} set +e -rm -rf "${INTERACTIVE_SHAPSHOT}" +rm -rf "${INTERACTIVE_SNAPSHOT}" set -e # copy generated biskotaki to 'biskotaki-no-input' test Snapshot -cp -r /tmp/biskotaki/ "${INTERACTIVE_SHAPSHOT}" +cp -r /tmp/biskotaki/ "${INTERACTIVE_SNAPSHOT}" # show diff of biskotaki-interactive echo -git diff --stat "${INTERACTIVE_SHAPSHOT}" +git diff --stat "${INTERACTIVE_SNAPSHOT}" -# get only last part of path from NO_INPUT_SHAPSHOT -INTERACTIVE_SHAPSHOT_NAME=$(echo "${INTERACTIVE_SHAPSHOT}" | awk -F/ '{print $NF}') +# get only last part of path from NO_INPUT_SNAPSHOT +INTERACTIVE_SNAPSHOT_NAME=$(echo "${INTERACTIVE_SNAPSHOT}" | awk -F/ '{print $NF}') echo echo "Next steps:" echo -echo "git add ${INTERACTIVE_SHAPSHOT}" -echo "git commit -m \"tests(data): update ${INTERACTIVE_SHAPSHOT_NAME} Snapshot, used in Regression Testing\"" +echo "git add ${INTERACTIVE_SNAPSHOT}" +echo "git commit -m \"tests(data): update ${INTERACTIVE_SNAPSHOT_NAME} Snapshot, used in Regression Testing\"" echo ## GIT ADD ## -# git add "${INTERACTIVE_SHAPSHOT}" +# git add "${INTERACTIVE_SNAPSHOT}" # echo ## GIT COMMIT ## diff --git a/scripts/update-snapshot.sh b/scripts/update-snapshot.sh index 37a75b2b..97dab9c5 100644 --- a/scripts/update-snapshot.sh +++ b/scripts/update-snapshot.sh @@ -17,32 +17,32 @@ echo .tox/dev/bin/generate-python --no-input --config-file .github/biskotaki.yaml -o /tmp/ -### UPDATE SHAPSHOT, by Copying all Generated files and folders recursively ### -NO_INPUT_SHAPSHOT=${NO_INPUT_SHAPSHOT:-tests/data/snapshots/biskotaki-no-input} +### UPDATE SNAPSHOT, by Copying all Generated files and folders recursively ### +NO_INPUT_SNAPSHOT=${NO_INPUT_SNAPSHOT:-tests/data/snapshots/biskotaki-no-input} set +e -rm -rf "${NO_INPUT_SHAPSHOT}" +rm -rf "${NO_INPUT_SNAPSHOT}" set -e # copy generated biskotaki to 'biskotaki-no-input' test Snapshot -cp -r /tmp/biskotaki/ "${NO_INPUT_SHAPSHOT}" +cp -r /tmp/biskotaki/ "${NO_INPUT_SNAPSHOT}" # show diff of biskotaki-no-input echo -git diff --stat "${NO_INPUT_SHAPSHOT}" +git diff --stat "${NO_INPUT_SNAPSHOT}" -# get only last part of path from NO_INPUT_SHAPSHOT -NO_INPUT_SHAPSHOT_NAME=$(echo "${NO_INPUT_SHAPSHOT}" | awk -F/ '{print $NF}') +# get only last part of path from NO_INPUT_SNAPSHOT +NO_INPUT_SNAPSHOT_NAME=$(echo "${NO_INPUT_SNAPSHOT}" | awk -F/ '{print $NF}') echo echo "Next steps:" echo -echo "git add ${NO_INPUT_SHAPSHOT}" -echo "git commit -m \"tests(data): update ${NO_INPUT_SHAPSHOT_NAME} Snapshot, used in Regression Testing\"" +echo "git add ${NO_INPUT_SNAPSHOT}" +echo "git commit -m \"tests(data): update ${NO_INPUT_SNAPSHOT_NAME} Snapshot, used in Regression Testing\"" echo ## GIT ADD ## -# git add "${NO_INPUT_SHAPSHOT}" +# git add "${NO_INPUT_SNAPSHOT}" # echo ## GIT COMMIT ## diff --git a/src/cookiecutter_python/backend/hosting_services/check_engine.py b/src/cookiecutter_python/backend/hosting_services/check_engine.py index 3032763d..bd0dde4a 100644 --- a/src/cookiecutter_python/backend/hosting_services/check_engine.py +++ b/src/cookiecutter_python/backend/hosting_services/check_engine.py @@ -57,6 +57,6 @@ def create(config_file, default_config): return Engine( config_file, default_config, - # load implementations and automatically instatiate all + # load implementations and automatically instantiate all tuple((HostingServices.create(x) for x in ('pypi', 'readthedocs'))), ) diff --git a/src/cookiecutter_python/backend/hosting_services/check_service.py b/src/cookiecutter_python/backend/hosting_services/check_service.py index d2123a2d..7c8921bd 100644 --- a/src/cookiecutter_python/backend/hosting_services/check_service.py +++ b/src/cookiecutter_python/backend/hosting_services/check_service.py @@ -25,7 +25,7 @@ class ServiceChecker: config_file_path: str def __call__(self): - """Check the remote server for existing resource, if feture is enabled. + """Check the remote server for existing resource, if feature is enabled. Returns: Optional[CheckResult]: result of the check operation @@ -40,8 +40,8 @@ def __call__(self): # we assume that client deliberately had the activate flag on # only because they know that the way the Generator has been # parametrized (ie from CLI), - # accounting for User Config or Default Config precedance, is such - # that on Generator call the User Config will have precendence. + # accounting for User Config or Default Config precedence, is such + # that on Generator call the User Config will have precedence. # But this is design to be call in pre_main, so rendering has # not happened yet, so we can't rely on the User Config. diff --git a/src/cookiecutter_python/backend/hosting_services/extract_name.py b/src/cookiecutter_python/backend/hosting_services/extract_name.py index 0fb90230..9bf3284c 100644 --- a/src/cookiecutter_python/backend/hosting_services/extract_name.py +++ b/src/cookiecutter_python/backend/hosting_services/extract_name.py @@ -32,7 +32,7 @@ def __call__(self, config_file: str) -> str: except KeyError as error: raise ContextVariableDoesNotExist( "{msg}: {data}".format( - msg="Attempted to retrieve non-existant variable", + msg="Attempted to retrieve non-existent variable", data=json.dumps( { 'variable_name': str(self.name_extractor), diff --git a/src/cookiecutter_python/backend/load_config.py b/src/cookiecutter_python/backend/load_config.py index d8630cad..2d6f59d4 100644 --- a/src/cookiecutter_python/backend/load_config.py +++ b/src/cookiecutter_python/backend/load_config.py @@ -34,7 +34,7 @@ def get_interpreters_from_yaml(config_file: str) -> t.Optional[GivenInterpreters UserYamlDesignError: if yaml does not contain the 'default_context' key Returns: - GivenInterpreters: dictionary with intepreters as a sequence of strings, + GivenInterpreters: dictionary with interpreters as a sequence of strings, mapped to the 'supported-interpreters' key """ data = load_yaml(config_file) diff --git a/src/cookiecutter_python/backend/main.py b/src/cookiecutter_python/backend/main.py index a99dd5ad..9cc6e368 100644 --- a/src/cookiecutter_python/backend/main.py +++ b/src/cookiecutter_python/backend/main.py @@ -36,7 +36,7 @@ def generate( print('Start Python Generator !') # Initialize Generation Request: # - store the CI Test Matrix Python Interpreters versions list - # - prompt for user input in interactive or atempt to read from yaml otherwise + # - prompt for user input in interactive or attempt to read from yaml otherwise # - prepare Cookiecutter extra context: # - add interpreters versions list # - store 'docs' folder, per docs builder, that Generator supports @@ -82,7 +82,7 @@ def generate( ) ## POST GENERATION ## # Check if out-of-the-box Generated Project, coincidentally, requires slight modifications - # for automatic and seemless "PyPI Upload" and "ReadTheDocs Build" process to + # for automatic and seamless "PyPI Upload" and "ReadTheDocs Build" process to # work. This can happen if the project name is already taken by another project # on PyPI or ReadTheDocs. post_main(request) diff --git a/src/cookiecutter_python/handle/dialogs/lib/interpreters_dialog.py b/src/cookiecutter_python/handle/dialogs/lib/interpreters_dialog.py index fd566388..e1d09616 100644 --- a/src/cookiecutter_python/handle/dialogs/lib/interpreters_dialog.py +++ b/src/cookiecutter_python/handle/dialogs/lib/interpreters_dialog.py @@ -5,7 +5,7 @@ from ..dialog import InteractiveDialog QuestionaryQuestion = Mapping[str, Optional[Union[str, Mapping, Callable]]] -QuestionaryPromtQuestions = Union[QuestionaryQuestion, Sequence[QuestionaryQuestion]] +QuestionaryPromptQuestions = Union[QuestionaryQuestion, Sequence[QuestionaryQuestion]] QuestionaryAnswers = Mapping[str, Any] @@ -14,7 +14,7 @@ # except ImportError: # def prompt( -# questions: PyInquirerPromtQuestions, answers: PyInquirerAnswers = None, **kwargs: Any +# questions: PyInquirerPromptQuestions, answers: PyInquirerAnswers = None, **kwargs: Any # ) -> PyInquirerAnswers: # return {} diff --git a/src/cookiecutter_python/hooks/post_gen_project.py b/src/cookiecutter_python/hooks/post_gen_project.py index e77b2a57..f73ff854 100644 --- a/src/cookiecutter_python/hooks/post_gen_project.py +++ b/src/cookiecutter_python/hooks/post_gen_project.py @@ -27,7 +27,6 @@ # ie: if we scaffold new Project at /data/my-project/README.md, /data/my-project/src # then GEN_PROJ_LOC = /data/my-project GEN_PROJ_LOC = os.path.realpath(os.path.curdir) - # Doc Builders docs default location, after Generation DOCS: t.Dict[str, str] = get_docs_gen_internal_config() @@ -93,9 +92,9 @@ class PostFileRemovalError(Exception): ('tests', 'test_cli.py'), ('tests', 'test_invoking_cli.py'), ] -# Pytest plugin must use the legacy setuptools backend (no poetry) +# Pytest plugins must use the legacy setuptools backend (no poetry) # thus the setup.cfg and MANIFEST.in files are required -# Pytest pluging usually declare their public API in fixtures.py +# Pytest plugin usually declare their public API in fixtures.py PYTEST_PLUGIN_ONLY = lambda x: [ ('src', x.module_name, 'fixtures.py'), ('tests', 'conftest.py'), @@ -159,7 +158,7 @@ def post_file_removal(request): # file is created inside the Generated Project Folder. # Note: at Generator runtime, the user should still expect Captured Logs to - # be written a File in their Shell's PWD, as designed and intented. + # be written a File in their Shell's PWD, as designed and intended. # remove the log file, if it exists and it is empty logs_file: Path = Path(request.project_dir) / FILE_TARGET_LOGS @@ -169,7 +168,7 @@ def post_file_removal(request): # safely remove the empty log file try: logs_file.unlink() - # windows erro reported on CI + # windows error reported on CI # PermissionError: [WinError 32] The process cannot access the file because it is being used by another process except PermissionError as e: print(f"[WARNING]: {e}") diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.github/workflows/test.yaml b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.github/workflows/test.yaml index 2d3824f1..71b325c7 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.github/workflows/test.yaml +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.github/workflows/test.yaml @@ -46,7 +46,7 @@ env: # Override Docker Policy-dependent decision-making and # Accept any ALL (branch/build) to Publish to Dockerhub # if true, it will push image and ignore DOCKER_JOB_POLICY - ALWAYS_BUILD_N_PUBLSIH_DOCKER: "false" + ALWAYS_BUILD_N_PUBLISH_DOCKER: "false" DOCKER_JOB_POLICY: "CDeployment" # - CDeployment : Builds and Publishes only if Tests ran and passed @@ -93,7 +93,7 @@ jobs: echo "PUBLISH_ON_PYPI=$PUBLISH_ON_PYPI" >> $GITHUB_OUTPUT ## Docker - Pipeline Settings ## - id: derive_docker_policy - run: echo "POL=${{ "{{" }} (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLSIH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') {{ "}}" }}" >> $GITHUB_OUTPUT + run: echo "POL=${{ "{{" }} (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLISH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') {{ "}}" }}" >> $GITHUB_OUTPUT ## Static Code Analysis - Pipeline Settings ## - id: derive_sqa_policy run: echo "POL=${{ "{{" }} (env.RUN_LINT_CHECKS != 'true' && '0') || (env.ALWAYS_LINT == 'true' && '1') || env.LINT_JOB_POLICY {{ "}}" }}" >> $GITHUB_OUTPUT diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.pylintrc b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.pylintrc index b46c19e3..79e24834 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.pylintrc +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/.pylintrc @@ -60,7 +60,7 @@ confidence= # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/CONTRIBUTING.md b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/CONTRIBUTING.md index c2501017..3f4fbb0a 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/CONTRIBUTING.md +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/CONTRIBUTING.md @@ -19,7 +19,7 @@ You can contribute in many ways: Report bugs at [https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}/issues](https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}/issues). W -Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following infromation by filling in [the template](https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). +Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following information by filling in [the template](https://github.com/{{ cookiecutter.github_username }}/{{ cookiecutter.repo_name }}/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/build-process_DAG.md b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/build-process_DAG.md index 30d2ae1a..88dfba2c 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/build-process_DAG.md +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/build-process_DAG.md @@ -2,16 +2,16 @@ `docker build` possible execution paths. -Flow Chart, of how exection navigates docker stages (see --target of docker build). +Flow Chart, of how execution navigates docker stages (see --target of docker build). If you run `docker build .` the `target` used by default is the `default_with_demo` Stage in the Graph. **Dockerfile: ./Dockerfile** -{# we have include 'dockerfile_mermaid.md' statment below #} +{# we have include 'dockerfile_mermaid.md' statement below #} {# intention is to leverage markdown imports, on docs build time #} {# it should not affect dynamically the Generator behaviour #} -{# so we must enusre that jinja does, treats below as literal, and not try to interpret #} +{# so we must ensure that jinja does, treats below as literal, and not try to interpret #} {% raw %}{% include 'dockerfile_mermaid.md' %}{% endraw %} diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/dev_guides/index.md b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/dev_guides/index.md index 7949cf41..c1f9ba5c 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/dev_guides/index.md +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-mkdocs/dev_guides/index.md @@ -7,11 +7,11 @@ a Docker Image and **publish it to Dockerhub**, how to do `Static Code Analysis` ## How to prevent any Image from being published to Dockerhub -1. Open your `.github/workflows/test.yaml`, and look for the **Worfklow Variables** +1. Open your `.github/workflows/test.yaml`, and look for the **Workflow Variables** - **Worfklow Variables** are defined in the `env` *section* + **Workflow Variables** are defined in the `env` *section* -2. Check the *value* of the `DOCKER_JOB_ON` **Worfklow Variable** +2. Check the *value* of the `DOCKER_JOB_ON` **Workflow Variable** [this is line is not rendered; markdown comment]: # diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/contents/10_introduction.rst b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/contents/10_introduction.rst index b3730a70..3a6eab57 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/contents/10_introduction.rst +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/contents/10_introduction.rst @@ -2,7 +2,7 @@ Introduction ============ -| This is **{{ cookiecutter.project_name }}**, a *Python Package* desinged to ... +| This is **{{ cookiecutter.project_name }}**, a *Python Package* designed to ... | Goal of this project is to TODO Document | Additionally, TODO Document diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/spelling_wordlist.txt b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/spelling_wordlist.txt index 57ddc574..c5d44903 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/spelling_wordlist.txt +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/docs-sphinx/spelling_wordlist.txt @@ -29,7 +29,7 @@ deserialization discretization interpretable pre -accomodate +accommodate eg ie iterable diff --git a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/tox.ini b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/tox.ini index 38c1d9b4..4affc91f 100644 --- a/src/cookiecutter_python/{{ cookiecutter.project_slug }}/tox.ini +++ b/src/cookiecutter_python/{{ cookiecutter.project_slug }}/tox.ini @@ -37,7 +37,7 @@ passenv = codecov: CI codecov: TRAVIS TRAVIS_* setenv = -# It will overide variables in passenv in case of collision +# It will override variables in passenv in case of collision PYTHONPATH = {toxinidir}{/}tests PYTHONBUFFERED = yes TEST_RESULTS_DIR = {toxinidir}{/}test-results @@ -103,7 +103,7 @@ commands = --cov-report=xml:{toxworkdir}/coverage.{envname}.xml \ {posargs:-n auto} tests --run-integration --run-network_bound -# Designed for local developement +# Designed for local development [testenv:dev] description = Using `python3` in PATH: Install in 'edit' mode & Test basepython = {env:TOXPYTHON:python3} @@ -238,8 +238,8 @@ depends = build ## DEPLOYMENT [testenv:deploy] -# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy -# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy +# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy +# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy description = Deploy the python package to be hosted in a pypi server. Requires to authenticate with the pypi server, so please set the TWINE_PASSWORD and TWINE_PASSWORD environment variables. Also, requires the PACKAGE_DIST_VERSION variable to explicitly indicate which distribution @@ -261,9 +261,9 @@ commands_pre = python -c 'import os; n = "TWINE_PASSWORD"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PACKAGE_DIST_VERSION"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PYPI_SERVER"; exec("if n in os.environ:\n v = os.environ[n]\n if v != \"pypi\":\n print(\"Environment variable PYPI_SERVER detected, but was not set to pypi. Please set to pypi or run tox -e deploy from an environment where the PYPI_SERVER variable is NOT present at all.\")\n exit(1)");' - python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* + python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* commands = - python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose + python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose ## COVERAGE diff --git a/tests/biskotaki_ci/conftest.py b/tests/biskotaki_ci/conftest.py index 1ffe5b26..08f1c746 100644 --- a/tests/biskotaki_ci/conftest.py +++ b/tests/biskotaki_ci/conftest.py @@ -64,7 +64,7 @@ def biskotaki_ci_project( # this commit somehow makes CI on Linux to break. But not on dev machine - # issue a pytest warning whever the Log File is not created as it should + # issue a pytest warning wherever the Log File is not created as it should if not INTENTIONALLY_PLACED_LOG_FILE.exists(): pytest.warns( UserWarning, @@ -72,7 +72,7 @@ def biskotaki_ci_project( ) ###### Document kind of Bug ###### - # Expected but probably unintented behaviour: + # Expected but probably unintended behaviour: # - empty log file gets created inside the gen project dir # Log file is placed inside the generated project dir, after generation diff --git a/tests/biskotaki_ci/snapshot/test_matches_biskotaki_runtime_gen.py b/tests/biskotaki_ci/snapshot/test_matches_biskotaki_runtime_gen.py index 6871e77a..0ac86f03 100644 --- a/tests/biskotaki_ci/snapshot/test_matches_biskotaki_runtime_gen.py +++ b/tests/biskotaki_ci/snapshot/test_matches_biskotaki_runtime_gen.py @@ -112,7 +112,7 @@ def test_snapshot_matches_runtime(snapshot, biskotaki_ci_project, test_root): # so, we hard exclude the line starting with the '0.0.1' string, to avoid # comparing rolling date with the static one in the snapshot - # first compare CHANGLOG files, then all other files + # first compare CHANGELOG files, then all other files snapshot_changelog = snapshot_dir / 'CHANGELOG.rst' # the expectation runtime_changelog = runtime_biskotaki / 'CHANGELOG.rst' # the reality diff --git a/tests/conftest.py b/tests/conftest.py index ed70779c..5a0a4eb3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -65,7 +65,7 @@ def generate_project() -> t.Callable[[ProjectGenerationRequestDataProtocol], str def _generate_project(generate_request: ProjectGenerationRequestDataProtocol) -> str: assert isinstance( generate_request.template, str - ), f"Expexted str for template, got {type(generate_request.template)}" + ), f"Expected str for template, got {type(generate_request.template)}" return cookiecutter( generate_request.template, no_input=True, @@ -119,7 +119,7 @@ def hook_request_new(distro_loc): MUST be kept in SYNC with the 'pre' and 'post' hook scripts, and their interface. - Before and after the actual generation process (ie read the termplate files, + Before and after the actual generation process (ie read the template files, generate the output files, etc), there 2 scripts that run. The 'pre' script (implemented as src/cookiecutter/hooks/pre_gen_project.py) and the 'post' script (implemented as src/cookiecutter/hooks/post_gen_project.py) run @@ -326,7 +326,7 @@ def __new__(cls): cls._instance.future_session_mock_instance = future_session_mock( url_2_code ) - # when 'get' method is called, the insted of real futures behavior + # when 'get' method is called, the instead of real futures behavior # our mock will be called instead, which immediately returns # an object, which provides the 'result' attribute, which can # immediately be evaluated. @@ -394,7 +394,7 @@ def mock_futures_session(self): overrides={'FuturesSession': lambda: futures_session_class_mock}, ) # EFFECT: - # when client code invokes the __call__ method of a WebHostingServiceChecker isntance obj + # when client code invokes the __call__ method of a WebHostingServiceChecker instance obj # then 1. will create a session, not (as in prod) as an instance of FuturesSession class, # but as an instance of our mock class (singleton) # 2. will call the 'get' method of our mock session (not the prod instance method of FuturesSession class) @@ -419,7 +419,7 @@ def __call__(self, service_name: str, found: bool): "cannot determine 'name' (ie pypi, readthedocs) from config file", ) ) - # Emulate singal emitted by the WebHostingServiceChecker + # Emulate signal emitted by the WebHostingServiceChecker self.futures_session_instance_mock.url_2_code[url] = 200 if found else 404 return MockCheck() @@ -502,7 +502,7 @@ def parameters( Generate, positional and/or optional (ie flags) cli arguments. - Input kwargs can be used to overide the default values for the flags + Input kwargs can be used to override the default values for the flags specified in class Args (see above). Args: @@ -797,7 +797,7 @@ def _get_expected_generated_files(config): pkg_name: str = config.data['pkg_name'] assert ( 'docs_builder' in config.data - ), f"Missing 'docs_builder' in {config.data}. Probaly, user config Yaml supplied is missing templated values, required by cookiecutter.json." + ), f"Missing 'docs_builder' in {config.data}. Probably, user config Yaml supplied is missing templated values, required by cookiecutter.json." user_docs_builder_id: str = config.data['docs_builder'] expected_gen_files: t.Set[Path] = set() @@ -956,7 +956,7 @@ def _get_expected_generated_files(config): ] ) # Regression Test - # assert no .pyc files apear as has reported on sdist installation + # assert no .pyc files appear as has reported on sdist installation assert not any( [str(x).endswith('.pyc') for x in expected_gen_files] ), f"Sanity check fail: {expected_gen_files}" @@ -1072,7 +1072,7 @@ def is_nested_file_committed(rel_path, tree): blobs_set = {Path(blob.path) for blob in parent_tree} return rel_path in blobs_set - def _assert_files_commited(folder, config): + def _assert_files_committed(folder, config): print("\n HERE") try: repo = assert_initialized_git(folder) @@ -1081,7 +1081,7 @@ def _assert_files_commited(folder, config): assert head tree = repo.heads.master.commit.tree - def file_commited(relative_path: Path): + def file_committed(relative_path: Path): assert str(relative_path)[-1] != '/' splitted = path.split(relative_path) @@ -1106,10 +1106,10 @@ def file_commited(relative_path: Path): assert 0, "Print Runtime Generated Files: " + '\n'.join( [str(f) for f in runtime_generated_files] ) - # below we assert that all the expected files have been commited: - # 1st assert all generated runtime project files have been commited + # below we assert that all the expected files have been committed: + # 1st assert all generated runtime project files have been committed for f in sorted(runtime_generated_files): - assert file_commited(f) + assert file_committed(f) # 2nd assert the generated files exactly match the expected ones expected_generated_files = get_expected_generated_files(config) assert set(runtime_generated_files) == set(expected_generated_files) @@ -1129,4 +1129,4 @@ def file_commited(relative_path: Path): except InvalidGitRepositoryError: return - return _assert_files_commited + return _assert_files_committed diff --git a/tests/data/snapshots/biskotaki-gold-standard/.github/workflows/test.yaml b/tests/data/snapshots/biskotaki-gold-standard/.github/workflows/test.yaml index 067d047c..9511d223 100644 --- a/tests/data/snapshots/biskotaki-gold-standard/.github/workflows/test.yaml +++ b/tests/data/snapshots/biskotaki-gold-standard/.github/workflows/test.yaml @@ -46,7 +46,7 @@ env: # Override Docker Policy-dependent decision-making and # Accept any ALL (branch/build) to Publish to Dockerhub # if true, it will push image and ignore DOCKER_JOB_POLICY - ALWAYS_BUILD_N_PUBLSIH_DOCKER: "false" + ALWAYS_BUILD_N_PUBLISH_DOCKER: "false" DOCKER_JOB_POLICY: "CDeployment" # - CDeployment : Builds and Publishes only if Tests ran and passed @@ -93,7 +93,7 @@ jobs: echo "PUBLISH_ON_PYPI=$PUBLISH_ON_PYPI" >> $GITHUB_OUTPUT ## Docker - Pipeline Settings ## - id: derive_docker_policy - run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLSIH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT + run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLISH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT ## Static Code Analysis - Pipeline Settings ## - id: derive_sqa_policy run: echo "POL=${{ (env.RUN_LINT_CHECKS != 'true' && '0') || (env.ALWAYS_LINT == 'true' && '1') || env.LINT_JOB_POLICY }}" >> $GITHUB_OUTPUT diff --git a/tests/data/snapshots/biskotaki-gold-standard/.pylintrc b/tests/data/snapshots/biskotaki-gold-standard/.pylintrc index b46c19e3..79e24834 100644 --- a/tests/data/snapshots/biskotaki-gold-standard/.pylintrc +++ b/tests/data/snapshots/biskotaki-gold-standard/.pylintrc @@ -60,7 +60,7 @@ confidence= # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes diff --git a/tests/data/snapshots/biskotaki-gold-standard/CONTRIBUTING.md b/tests/data/snapshots/biskotaki-gold-standard/CONTRIBUTING.md index b0b6a218..eb170b61 100644 --- a/tests/data/snapshots/biskotaki-gold-standard/CONTRIBUTING.md +++ b/tests/data/snapshots/biskotaki-gold-standard/CONTRIBUTING.md @@ -19,7 +19,7 @@ You can contribute in many ways: Report bugs at [https://github.com/boromir674/biskotaki-gold/issues](https://github.com/boromir674/biskotaki-gold/issues). W -Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following infromation by filling in [the template](https://github.com/boromir674/biskotaki-gold/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). +Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following information by filling in [the template](https://github.com/boromir674/biskotaki-gold/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. diff --git a/tests/data/snapshots/biskotaki-gold-standard/docs/build-process_DAG.md b/tests/data/snapshots/biskotaki-gold-standard/docs/build-process_DAG.md index a2c75529..011cd6eb 100644 --- a/tests/data/snapshots/biskotaki-gold-standard/docs/build-process_DAG.md +++ b/tests/data/snapshots/biskotaki-gold-standard/docs/build-process_DAG.md @@ -2,7 +2,7 @@ `docker build` possible execution paths. -Flow Chart, of how exection navigates docker stages (see --target of docker build). +Flow Chart, of how execution navigates docker stages (see --target of docker build). If you run `docker build .` the `target` used by default is the `default_with_demo` Stage in the Graph. diff --git a/tests/data/snapshots/biskotaki-gold-standard/tox.ini b/tests/data/snapshots/biskotaki-gold-standard/tox.ini index cf85e18b..d212308b 100644 --- a/tests/data/snapshots/biskotaki-gold-standard/tox.ini +++ b/tests/data/snapshots/biskotaki-gold-standard/tox.ini @@ -37,7 +37,7 @@ passenv = codecov: CI codecov: TRAVIS TRAVIS_* setenv = -# It will overide variables in passenv in case of collision +# It will override variables in passenv in case of collision PYTHONPATH = {toxinidir}{/}tests PYTHONBUFFERED = yes TEST_RESULTS_DIR = {toxinidir}{/}test-results @@ -103,7 +103,7 @@ commands = --cov-report=xml:{toxworkdir}/coverage.{envname}.xml \ {posargs:-n auto} tests --run-integration --run-network_bound -# Designed for local developement +# Designed for local development [testenv:dev] description = Using `python3` in PATH: Install in 'edit' mode & Test basepython = {env:TOXPYTHON:python3} @@ -238,8 +238,8 @@ depends = build ## DEPLOYMENT [testenv:deploy] -# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy -# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy +# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy +# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy description = Deploy the python package to be hosted in a pypi server. Requires to authenticate with the pypi server, so please set the TWINE_PASSWORD and TWINE_PASSWORD environment variables. Also, requires the PACKAGE_DIST_VERSION variable to explicitly indicate which distribution @@ -261,9 +261,9 @@ commands_pre = python -c 'import os; n = "TWINE_PASSWORD"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PACKAGE_DIST_VERSION"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PYPI_SERVER"; exec("if n in os.environ:\n v = os.environ[n]\n if v != \"pypi\":\n print(\"Environment variable PYPI_SERVER detected, but was not set to pypi. Please set to pypi or run tox -e deploy from an environment where the PYPI_SERVER variable is NOT present at all.\")\n exit(1)");' - python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* + python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* commands = - python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose + python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose ## COVERAGE diff --git a/tests/data/snapshots/biskotaki-interactive/.github/workflows/test.yaml b/tests/data/snapshots/biskotaki-interactive/.github/workflows/test.yaml index 46e929cb..98c9db44 100644 --- a/tests/data/snapshots/biskotaki-interactive/.github/workflows/test.yaml +++ b/tests/data/snapshots/biskotaki-interactive/.github/workflows/test.yaml @@ -46,7 +46,7 @@ env: # Override Docker Policy-dependent decision-making and # Accept any ALL (branch/build) to Publish to Dockerhub # if true, it will push image and ignore DOCKER_JOB_POLICY - ALWAYS_BUILD_N_PUBLSIH_DOCKER: "false" + ALWAYS_BUILD_N_PUBLISH_DOCKER: "false" DOCKER_JOB_POLICY: "CDeployment" # - CDeployment : Builds and Publishes only if Tests ran and passed @@ -93,7 +93,7 @@ jobs: echo "PUBLISH_ON_PYPI=$PUBLISH_ON_PYPI" >> $GITHUB_OUTPUT ## Docker - Pipeline Settings ## - id: derive_docker_policy - run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLSIH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT + run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLISH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT ## Static Code Analysis - Pipeline Settings ## - id: derive_sqa_policy run: echo "POL=${{ (env.RUN_LINT_CHECKS != 'true' && '0') || (env.ALWAYS_LINT == 'true' && '1') || env.LINT_JOB_POLICY }}" >> $GITHUB_OUTPUT diff --git a/tests/data/snapshots/biskotaki-interactive/.pylintrc b/tests/data/snapshots/biskotaki-interactive/.pylintrc index b46c19e3..79e24834 100644 --- a/tests/data/snapshots/biskotaki-interactive/.pylintrc +++ b/tests/data/snapshots/biskotaki-interactive/.pylintrc @@ -60,7 +60,7 @@ confidence= # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes diff --git a/tests/data/snapshots/biskotaki-interactive/CONTRIBUTING.md b/tests/data/snapshots/biskotaki-interactive/CONTRIBUTING.md index 0267d962..3022c999 100644 --- a/tests/data/snapshots/biskotaki-interactive/CONTRIBUTING.md +++ b/tests/data/snapshots/biskotaki-interactive/CONTRIBUTING.md @@ -19,7 +19,7 @@ You can contribute in many ways: Report bugs at [https://github.com/boromir674/biskotaki/issues](https://github.com/boromir674/biskotaki/issues). W -Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following infromation by filling in [the template](https://github.com/boromir674/biskotaki/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). +Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following information by filling in [the template](https://github.com/boromir674/biskotaki/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. diff --git a/tests/data/snapshots/biskotaki-interactive/docs/contents/10_introduction.rst b/tests/data/snapshots/biskotaki-interactive/docs/contents/10_introduction.rst index 288ca296..1171d2ea 100644 --- a/tests/data/snapshots/biskotaki-interactive/docs/contents/10_introduction.rst +++ b/tests/data/snapshots/biskotaki-interactive/docs/contents/10_introduction.rst @@ -2,7 +2,7 @@ Introduction ============ -| This is **Biskotaki**, a *Python Package* desinged to ... +| This is **Biskotaki**, a *Python Package* designed to ... | Goal of this project is to TODO Document | Additionally, TODO Document diff --git a/tests/data/snapshots/biskotaki-interactive/docs/spelling_wordlist.txt b/tests/data/snapshots/biskotaki-interactive/docs/spelling_wordlist.txt index ed8ee0d1..36807407 100644 --- a/tests/data/snapshots/biskotaki-interactive/docs/spelling_wordlist.txt +++ b/tests/data/snapshots/biskotaki-interactive/docs/spelling_wordlist.txt @@ -29,7 +29,7 @@ deserialization discretization interpretable pre -accomodate +accommodate eg ie iterable diff --git a/tests/data/snapshots/biskotaki-interactive/tox.ini b/tests/data/snapshots/biskotaki-interactive/tox.ini index fde9bee9..82fb5607 100644 --- a/tests/data/snapshots/biskotaki-interactive/tox.ini +++ b/tests/data/snapshots/biskotaki-interactive/tox.ini @@ -37,7 +37,7 @@ passenv = codecov: CI codecov: TRAVIS TRAVIS_* setenv = -# It will overide variables in passenv in case of collision +# It will override variables in passenv in case of collision PYTHONPATH = {toxinidir}{/}tests PYTHONBUFFERED = yes TEST_RESULTS_DIR = {toxinidir}{/}test-results @@ -103,7 +103,7 @@ commands = --cov-report=xml:{toxworkdir}/coverage.{envname}.xml \ {posargs:-n auto} tests --run-integration --run-network_bound -# Designed for local developement +# Designed for local development [testenv:dev] description = Using `python3` in PATH: Install in 'edit' mode & Test basepython = {env:TOXPYTHON:python3} @@ -238,8 +238,8 @@ depends = build ## DEPLOYMENT [testenv:deploy] -# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy -# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy +# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy +# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy description = Deploy the python package to be hosted in a pypi server. Requires to authenticate with the pypi server, so please set the TWINE_PASSWORD and TWINE_PASSWORD environment variables. Also, requires the PACKAGE_DIST_VERSION variable to explicitly indicate which distribution @@ -261,9 +261,9 @@ commands_pre = python -c 'import os; n = "TWINE_PASSWORD"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PACKAGE_DIST_VERSION"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PYPI_SERVER"; exec("if n in os.environ:\n v = os.environ[n]\n if v != \"pypi\":\n print(\"Environment variable PYPI_SERVER detected, but was not set to pypi. Please set to pypi or run tox -e deploy from an environment where the PYPI_SERVER variable is NOT present at all.\")\n exit(1)");' - python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* + python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* commands = - python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose + python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose ## COVERAGE diff --git a/tests/data/snapshots/biskotaki-no-input/.github/workflows/test.yaml b/tests/data/snapshots/biskotaki-no-input/.github/workflows/test.yaml index 46e929cb..98c9db44 100644 --- a/tests/data/snapshots/biskotaki-no-input/.github/workflows/test.yaml +++ b/tests/data/snapshots/biskotaki-no-input/.github/workflows/test.yaml @@ -46,7 +46,7 @@ env: # Override Docker Policy-dependent decision-making and # Accept any ALL (branch/build) to Publish to Dockerhub # if true, it will push image and ignore DOCKER_JOB_POLICY - ALWAYS_BUILD_N_PUBLSIH_DOCKER: "false" + ALWAYS_BUILD_N_PUBLISH_DOCKER: "false" DOCKER_JOB_POLICY: "CDeployment" # - CDeployment : Builds and Publishes only if Tests ran and passed @@ -93,7 +93,7 @@ jobs: echo "PUBLISH_ON_PYPI=$PUBLISH_ON_PYPI" >> $GITHUB_OUTPUT ## Docker - Pipeline Settings ## - id: derive_docker_policy - run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLSIH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT + run: echo "POL=${{ (env.DOCKER_JOB_ON != 'true' && '0') || (env.ALWAYS_BUILD_N_PUBLISH_DOCKER == 'true' && '1') || (env.DOCKER_JOB_POLICY == 'CDeployment' && '2') || (env.DOCKER_JOB_POLICY == 'CDelivery' && '3') }}" >> $GITHUB_OUTPUT ## Static Code Analysis - Pipeline Settings ## - id: derive_sqa_policy run: echo "POL=${{ (env.RUN_LINT_CHECKS != 'true' && '0') || (env.ALWAYS_LINT == 'true' && '1') || env.LINT_JOB_POLICY }}" >> $GITHUB_OUTPUT diff --git a/tests/data/snapshots/biskotaki-no-input/.pylintrc b/tests/data/snapshots/biskotaki-no-input/.pylintrc index b46c19e3..79e24834 100644 --- a/tests/data/snapshots/biskotaki-no-input/.pylintrc +++ b/tests/data/snapshots/biskotaki-no-input/.pylintrc @@ -60,7 +60,7 @@ confidence= # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes diff --git a/tests/data/snapshots/biskotaki-no-input/CONTRIBUTING.md b/tests/data/snapshots/biskotaki-no-input/CONTRIBUTING.md index 0267d962..3022c999 100644 --- a/tests/data/snapshots/biskotaki-no-input/CONTRIBUTING.md +++ b/tests/data/snapshots/biskotaki-no-input/CONTRIBUTING.md @@ -19,7 +19,7 @@ You can contribute in many ways: Report bugs at [https://github.com/boromir674/biskotaki/issues](https://github.com/boromir674/biskotaki/issues). W -Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following infromation by filling in [the template](https://github.com/boromir674/biskotaki/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). +Stambling upon a Bug means encountering different behaviour than the expected/advertised one. When you are reporting a bug, please include the following information by filling in [the template](https://github.com/boromir674/biskotaki/.github/blob/master/.github/ISSUE_TEMPLATE/bug_report.md). * Your operating system name and version. * Any details about your local setup that might be helpful in troubleshooting. diff --git a/tests/data/snapshots/biskotaki-no-input/docs/contents/10_introduction.rst b/tests/data/snapshots/biskotaki-no-input/docs/contents/10_introduction.rst index 288ca296..1171d2ea 100644 --- a/tests/data/snapshots/biskotaki-no-input/docs/contents/10_introduction.rst +++ b/tests/data/snapshots/biskotaki-no-input/docs/contents/10_introduction.rst @@ -2,7 +2,7 @@ Introduction ============ -| This is **Biskotaki**, a *Python Package* desinged to ... +| This is **Biskotaki**, a *Python Package* designed to ... | Goal of this project is to TODO Document | Additionally, TODO Document diff --git a/tests/data/snapshots/biskotaki-no-input/docs/spelling_wordlist.txt b/tests/data/snapshots/biskotaki-no-input/docs/spelling_wordlist.txt index ed8ee0d1..36807407 100644 --- a/tests/data/snapshots/biskotaki-no-input/docs/spelling_wordlist.txt +++ b/tests/data/snapshots/biskotaki-no-input/docs/spelling_wordlist.txt @@ -29,7 +29,7 @@ deserialization discretization interpretable pre -accomodate +accommodate eg ie iterable diff --git a/tests/data/snapshots/biskotaki-no-input/tox.ini b/tests/data/snapshots/biskotaki-no-input/tox.ini index fde9bee9..82fb5607 100644 --- a/tests/data/snapshots/biskotaki-no-input/tox.ini +++ b/tests/data/snapshots/biskotaki-no-input/tox.ini @@ -37,7 +37,7 @@ passenv = codecov: CI codecov: TRAVIS TRAVIS_* setenv = -# It will overide variables in passenv in case of collision +# It will override variables in passenv in case of collision PYTHONPATH = {toxinidir}{/}tests PYTHONBUFFERED = yes TEST_RESULTS_DIR = {toxinidir}{/}test-results @@ -103,7 +103,7 @@ commands = --cov-report=xml:{toxworkdir}/coverage.{envname}.xml \ {posargs:-n auto} tests --run-integration --run-network_bound -# Designed for local developement +# Designed for local development [testenv:dev] description = Using `python3` in PATH: Install in 'edit' mode & Test basepython = {env:TOXPYTHON:python3} @@ -238,8 +238,8 @@ depends = build ## DEPLOYMENT [testenv:deploy] -# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy -# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy +# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy +# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy description = Deploy the python package to be hosted in a pypi server. Requires to authenticate with the pypi server, so please set the TWINE_PASSWORD and TWINE_PASSWORD environment variables. Also, requires the PACKAGE_DIST_VERSION variable to explicitly indicate which distribution @@ -261,9 +261,9 @@ commands_pre = python -c 'import os; n = "TWINE_PASSWORD"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PACKAGE_DIST_VERSION"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PYPI_SERVER"; exec("if n in os.environ:\n v = os.environ[n]\n if v != \"pypi\":\n print(\"Environment variable PYPI_SERVER detected, but was not set to pypi. Please set to pypi or run tox -e deploy from an environment where the PYPI_SERVER variable is NOT present at all.\")\n exit(1)");' - python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* + python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* commands = - python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose + python -m twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose ## COVERAGE diff --git a/tests/generator_defaults_shift/test_docs_settings.py b/tests/generator_defaults_shift/test_docs_settings.py index 1369dabd..960746bf 100644 --- a/tests/generator_defaults_shift/test_docs_settings.py +++ b/tests/generator_defaults_shift/test_docs_settings.py @@ -25,7 +25,7 @@ def distro_gen_docs_defaults( ): """The officially recognized defaults for the Docs Generator Feature. - Confidentlly, advertize that this is the default, which the generator will use, + Confidentlly, advertise that this is the default, which the generator will use, in case the user does not provide any input, for the Docs Generator Feature. """ # Read Gen Doc Defaults Settings from cookiecutter.json @@ -83,13 +83,13 @@ def test_gen_parametrized_only_from_user_config_defaults_to_sphinx_builder_n_py3 # have parsed to attempt gathering the required information for URL resolution config = user_config[user_config_yaml] - # the below allows URL resolutoin, same as in prod, (ie same bug should appear, if syntax error in user yaml) + # the below allows URL resolution, same as in prod, (ie same bug should appear, if syntax error in user yaml) # and also allow mocking the 'web checks' feature, which is enabled automatically and independently, per web hosting service, mock_check.config = config # Emulate Asynchronous (Future) Responses, in case 'web checks' feature is enabled # feature is enabled automatically and independently, per web hosting service, - # in case it finds all the required information, whn doing URL resolution + # in case it finds all the required information, when doing URL resolution # we make sure no network calls are made, independently of URL resolution! diff --git a/tests/test_cli.py b/tests/test_cli.py index 23c47797..03bd8d30 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -73,7 +73,7 @@ def test_cli_offline( # GIVEN the CLI main entrypoint from cookiecutter_python.cli import main as cli_main - # GIVEN a way to compute expectatoins, either in case + # GIVEN a way to compute expectations, either in case # - user config yaml, was passed as input to the CLI # - or in case no yaml, but default config was passed as input to the CLI config = user_config[config_file] @@ -116,7 +116,7 @@ def test_cli_offline( project_dir: str = path.abspath(path.join(gen_proj_dir, config.project_slug)) - # our code introduced WARNING logs due to git commit from issued to GEnerator + # our code introduced WARNING logs due to git commit from issued to Generator # assert config.data['initialize_git_repo'] is True and path.exists( # path.join(project_dir, 'cookie-py.log') # ) @@ -207,7 +207,7 @@ def _check_web_server(cli_stdout: str, expected_messages) -> t.Optional[bool]: @pytest.fixture def check_web_server_expected_result(): - webserver_2_templaet_variable = { + webserver_2_template_variable = { 'pypi': 'pkg_name', 'readthedocs': 'readthedocs_project_slug', } @@ -216,7 +216,7 @@ def _build_get_check_web_server_expected_result(webserver: str): def _get_check_web_server_expected_result(config, mock_flag: bool): if ( config.config_file is not None - and webserver_2_templaet_variable[webserver] in config.data + and webserver_2_template_variable[webserver] in config.data ): return mock_flag return None diff --git a/tests/test_gold_standard.py b/tests/test_gold_standard.py index 77f3c5f0..04294788 100644 --- a/tests/test_gold_standard.py +++ b/tests/test_gold_standard.py @@ -65,7 +65,7 @@ def gen_gs_project( # assert INTENTIONALLY_PLACED_LOG_FILE.is_file() # this commit somehow makes CI on Linux to break. But not on dev machine - # issue a pytest warning whever the Log File is not created as it should + # issue a pytest warning wherever the Log File is not created as it should if not INTENTIONALLY_PLACED_LOG_FILE.exists(): pytest.warns( UserWarning, @@ -76,7 +76,7 @@ def gen_gs_project( # assert INTENTIONALLY_PLACED_LOG_FILE.stat().st_size > 0 ###### Document kind of Bug ###### - # Expected but probably unintented behaviour: + # Expected but probably unintended behaviour: # - empty log file gets created inside the gen project dir # Log file is placed inside the generated project dir, after generation @@ -190,7 +190,7 @@ def test_gs_matches_runtime(gen_gs_project, test_root): # so, we hard exclude the line starting with the '0.0.1' string, to avoid # comparing rolling date with the static one in the snapshot - # first compare CHANGLOG files, then all other files + # first compare CHANGELOG files, then all other files snapshot_changelog = snapshot_dir / 'CHANGELOG.rst' # the expectation runtime_changelog = runtime_gs / 'CHANGELOG.rst' # the reality diff --git a/tests/test_post_gen_hook_regression.py b/tests/test_post_gen_hook_regression.py index 571efa53..03c294ce 100644 --- a/tests/test_post_gen_hook_regression.py +++ b/tests/test_post_gen_hook_regression.py @@ -1,6 +1,6 @@ # verify that post_gen_hook knows the docs builder initial docs location # this will make sure the PostGenProject hook can the necessary file removals -# and replacements for Docs. If Gneration docs features get update, but ie we forget to +# and replacements for Docs. If Generation docs features get update, but ie we forget to # update the post_gen_hook, this test will fail and remind us to update the hook # Regressoin Test, if you will def test_post_gen_hook_docs_builder_initial_docs_location(): diff --git a/tests/test_post_hook.py b/tests/test_post_hook.py index 6a2ea3c3..2013d2ff 100644 --- a/tests/test_post_hook.py +++ b/tests/test_post_hook.py @@ -52,7 +52,7 @@ def _emulated_generated_project(project_dir: str, name: str = 'biskotaki', **kwa ) # Automatically, discover what files to create for an accurate emulated project - ## Project Type Dependend Files ## + ## Project Type Dependent Files ## # Types class RuntimeRequest(Protocol): module_name: str # runtime value for {{ cookiecutter.pkg_name }} @@ -104,12 +104,12 @@ def generate_all_extra_files( # Sanity check that no-one inputs the same file twice assert len(extra_files_declared) == expected_unique_files - ## Docs Builder Type Dependend Files ## + ## Docs Builder Type Dependent Files ## from cookiecutter_python.hooks.post_gen_project import ( builder_id_2_files as builder_id_2_extra_files_map, ) - # theoritically, it should suffice for us to create 'emulated' files, as: + # theoretically, it should suffice for us to create 'emulated' files, as: # Excluding the Docs Builder defined in the Request, create file for all # builders in the map requested_docs_builder_id: str = emulated_post_gen_request.docs_website['builder'] @@ -154,7 +154,7 @@ def mock_get_request(): # to avoid bugs we require empty project dir, before emulated generation absolute_proj_dir = Path(project_dir).absolute() assert len(list(absolute_proj_dir.iterdir())) == 0 - # EMULATE a GEN Project, by craeting minimal dummy files and folders + # EMULATE a GEN Project, by creating minimal dummy files and folders emulated_request = emulated_generated_project( project_dir, name=name, project_type='module+cli' if add_cli else 'module' ) diff --git a/tox.ini b/tox.ini index 8765f619..ff6fd069 100644 --- a/tox.ini +++ b/tox.ini @@ -34,7 +34,7 @@ passenv = codecov: CI codecov: TRAVIS TRAVIS_* setenv = -# It will overide variables in passenv in case of collision +# It will override variables in passenv in case of collision PYTHONPATH = {toxinidir}{/}tests PYTHONBUFFERED = yes TEST_RESULTS_DIR = {toxinidir}{/}test-results @@ -69,7 +69,7 @@ commands = description = Install in 'edit' mode & Test usedevelop = true -# Designed for local developement +# Designed for local development [testenv:dev] description = Using `python3` in PATH: Install in 'edit' mode & Test basepython = {env:TOXPYTHON:python3} @@ -162,7 +162,7 @@ usedevelop = true changedir = {toxinidir} commands_pre = # mypy does not like, by default, multiple conftest.py (ses pytest) files - # trick mypy into believing that tests is a package, beucase it wants to be + # trick mypy into believing that tests is a package, because it wants to be # able to distinguish our 2 conftest.py files # create empty __init__.py in tests, temporarily python -c 'open("tests/__init__.py", "a").close();' @@ -239,8 +239,8 @@ depends = build ## DEPLOYMENT [testenv:deploy] -# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy -# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWROD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy +# Deploy to test.pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 tox -e deploy +# Deploy to pypi.org : TWINE_USERNAME=user TWINE_PASSWORD=pass PACKAGE_DIST_VERSION=1.0.0 PYPI_SERVER=pypi tox -e deploy description = Deploy the python package to be hosted in a pypi server. Requires to authenticate with the pypi server, so please set the TWINE_PASSWORD and TWINE_PASSWORD environment variables. Also, requires the PACKAGE_DIST_VERSION variable to explicitly indicate which distribution @@ -262,9 +262,9 @@ commands_pre = python -c 'import os; n = "TWINE_PASSWORD"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PACKAGE_DIST_VERSION"; v = os.environ.get(n); exec("if not v:\n print(\"Please set the \" + str(n) + \" variable.\")\n exit(1)");' python -c 'import os; n = "PYPI_SERVER"; exec("if n in os.environ:\n v = os.environ[n]\n if v != \"pypi\":\n print(\"Environment variable PYPI_SERVER detected, but was not set to pypi. Please set to pypi or run tox -e deploy from an environment where the PYPI_SERVER variable is NOT present at all.\")\n exit(1)");' - python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* + python -m twine check {env:DIST_DIR}/{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* commands = - twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISSMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose + twine {posargs:upload --non-interactive} --repository {env:PYPI_SERVER:testpypi --skip-existing} {env:DIST_DIR}{/}{env:PY_PACKAGE}-{env:PACKAGE_DIST_VERSION:MISMATCHED_PACKAGE_DIST_VERSION_ERROR}* --verbose ## COVERAGE