diff --git a/.docker/compose.yaml b/.docker/compose.yaml index 73d8639..8df77cd 100644 --- a/.docker/compose.yaml +++ b/.docker/compose.yaml @@ -1,4 +1,3 @@ -version: "3" services: api: container_name: manwha-reader-api diff --git a/backend/Dockerfile b/backend/Dockerfile index ca1707c..063f0b0 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -10,10 +10,10 @@ RUN wget -q https://dl.google.com/linux/direct/google-chrome-stable_current_amd6 RUN apt-get install -y ./google-chrome-stable_current_amd64.deb # install chromedriver -ENV CHROMEDRIVER_VERSION=130.0.6723.58 +ENV CHROMEDRIVER_VERSION=133.0.6943.53 RUN wget https://storage.googleapis.com/chrome-for-testing-public/$CHROMEDRIVER_VERSION/linux64/chromedriver-linux64.zip \ && unzip chromedriver-linux64.zip \ - && rm -rf chromedriver_linux64.zip \ + && rm -rf chromedriver-linux64.zip \ && mv ./chromedriver-linux64/chromedriver $DEPLOY_PATH/chromedriver \ && rm -rf chromedriver-linux64/ \ && chmod +x $DEPLOY_PATH/chromedriver diff --git a/backend/poetry.lock b/backend/poetry.lock index e4f7c3b..1024219 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.0 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -67,46 +67,6 @@ six = "*" [package.extras] visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] -[[package]] -name = "black" -version = "23.11.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-23.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dbea0bb8575c6b6303cc65017b46351dc5953eea5c0a59d7b7e3a2d2f433a911"}, - {file = "black-23.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:412f56bab20ac85927f3a959230331de5614aecda1ede14b373083f62ec24e6f"}, - {file = "black-23.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d136ef5b418c81660ad847efe0e55c58c8208b77a57a28a503a5f345ccf01394"}, - {file = "black-23.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:6c1cac07e64433f646a9a838cdc00c9768b3c362805afc3fce341af0e6a9ae9f"}, - {file = "black-23.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cf57719e581cfd48c4efe28543fea3d139c6b6f1238b3f0102a9c73992cbb479"}, - {file = "black-23.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:698c1e0d5c43354ec5d6f4d914d0d553a9ada56c85415700b81dc90125aac244"}, - {file = "black-23.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760415ccc20f9e8747084169110ef75d545f3b0932ee21368f63ac0fee86b221"}, - {file = "black-23.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:58e5f4d08a205b11800332920e285bd25e1a75c54953e05502052738fe16b3b5"}, - {file = "black-23.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:45aa1d4675964946e53ab81aeec7a37613c1cb71647b5394779e6efb79d6d187"}, - {file = "black-23.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c44b7211a3a0570cc097e81135faa5f261264f4dfaa22bd5ee2875a4e773bd6"}, - {file = "black-23.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a9acad1451632021ee0d146c8765782a0c3846e0e0ea46659d7c4f89d9b212b"}, - {file = "black-23.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:fc7f6a44d52747e65a02558e1d807c82df1d66ffa80a601862040a43ec2e3142"}, - {file = "black-23.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7f622b6822f02bfaf2a5cd31fdb7cd86fcf33dab6ced5185c35f5db98260b055"}, - {file = "black-23.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:250d7e60f323fcfc8ea6c800d5eba12f7967400eb6c2d21ae85ad31c204fb1f4"}, - {file = "black-23.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5133f5507007ba08d8b7b263c7aa0f931af5ba88a29beacc4b2dc23fcefe9c06"}, - {file = "black-23.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:421f3e44aa67138ab1b9bfbc22ee3780b22fa5b291e4db8ab7eee95200726b07"}, - {file = "black-23.11.0-py3-none-any.whl", hash = "sha256:54caaa703227c6e0c87b76326d0862184729a69b73d3b7305b6288e1d830067e"}, - {file = "black-23.11.0.tar.gz", hash = "sha256:4c68855825ff432d197229846f971bc4d6666ce90492e5b02013bcaca4d9ab05"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "boto3" version = "1.33.4" @@ -447,22 +407,6 @@ docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1 testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] typing = ["typing-extensions (>=4.8)"] -[[package]] -name = "flake8" -version = "6.1.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" -pyflakes = ">=3.1.0,<3.2.0" - [[package]] name = "greenlet" version = "3.0.1" @@ -789,28 +733,6 @@ html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=0.29.35)"] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - [[package]] name = "outcome" version = "1.3.0.post0" @@ -854,17 +776,6 @@ lxml = "*" packaging = "*" w3lib = ">=1.19.0" -[[package]] -name = "pathspec" -version = "0.11.2" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, -] - [[package]] name = "pillow" version = "10.1.0" @@ -932,21 +843,6 @@ files = [ docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -[[package]] -name = "platformdirs" -version = "4.0.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.7" -files = [ - {file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"}, - {file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] - [[package]] name = "pluggy" version = "1.4.0" @@ -1079,17 +975,6 @@ files = [ [package.dependencies] pyasn1 = ">=0.4.6,<0.6.0" -[[package]] -name = "pycodestyle" -version = "2.11.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, -] - [[package]] name = "pycparser" version = "2.21" @@ -1262,17 +1147,6 @@ files = [ [package.extras] dev = ["tox"] -[[package]] -name = "pyflakes" -version = "3.1.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, - {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, -] - [[package]] name = "pyopenssl" version = "23.3.0" @@ -1386,6 +1260,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1467,6 +1342,33 @@ files = [ requests = ">=1.0.0" six = "*" +[[package]] +name = "ruff" +version = "0.9.5" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.9.5-py3-none-linux_armv6l.whl", hash = "sha256:d466d2abc05f39018d53f681fa1c0ffe9570e6d73cde1b65d23bb557c846f442"}, + {file = "ruff-0.9.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38840dbcef63948657fa7605ca363194d2fe8c26ce8f9ae12eee7f098c85ac8a"}, + {file = "ruff-0.9.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d56ba06da53536b575fbd2b56517f6f95774ff7be0f62c80b9e67430391eeb36"}, + {file = "ruff-0.9.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7cb2a01da08244c50b20ccfaeb5972e4228c3c3a1989d3ece2bc4b1f996001"}, + {file = "ruff-0.9.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:96d5c76358419bc63a671caac70c18732d4fd0341646ecd01641ddda5c39ca0b"}, + {file = "ruff-0.9.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:deb8304636ed394211f3a6d46c0e7d9535b016f53adaa8340139859b2359a070"}, + {file = "ruff-0.9.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:df455000bf59e62b3e8c7ba5ed88a4a2bc64896f900f311dc23ff2dc38156440"}, + {file = "ruff-0.9.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de92170dfa50c32a2b8206a647949590e752aca8100a0f6b8cefa02ae29dce80"}, + {file = "ruff-0.9.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d28532d73b1f3f627ba88e1456f50748b37f3a345d2be76e4c653bec6c3e393"}, + {file = "ruff-0.9.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c746d7d1df64f31d90503ece5cc34d7007c06751a7a3bbeee10e5f2463d52d2"}, + {file = "ruff-0.9.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:11417521d6f2d121fda376f0d2169fb529976c544d653d1d6044f4c5562516ee"}, + {file = "ruff-0.9.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b9d71c3879eb32de700f2f6fac3d46566f644a91d3130119a6378f9312a38e1"}, + {file = "ruff-0.9.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2e36c61145e70febcb78483903c43444c6b9d40f6d2f800b5552fec6e4a7bb9a"}, + {file = "ruff-0.9.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2f71d09aeba026c922aa7aa19a08d7bd27c867aedb2f74285a2639644c1c12f5"}, + {file = "ruff-0.9.5-py3-none-win32.whl", hash = "sha256:134f958d52aa6fdec3b294b8ebe2320a950d10c041473c4316d2e7d7c2544723"}, + {file = "ruff-0.9.5-py3-none-win_amd64.whl", hash = "sha256:78cc6067f6d80b6745b67498fb84e87d32c6fc34992b52bffefbdae3442967d6"}, + {file = "ruff-0.9.5-py3-none-win_arm64.whl", hash = "sha256:18a29f1a005bddb229e580795627d297dfa99f16b30c7039e73278cf6b5f9fa9"}, + {file = "ruff-0.9.5.tar.gz", hash = "sha256:11aecd7a633932875ab3cb05a484c99970b9d52606ce9ea912b690b02653d56c"}, +] + [[package]] name = "s3transfer" version = "0.8.2" @@ -2195,4 +2097,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "384626ff7d011f363cc27ea33778af92f891c70cfb8b98ef120b144d8f35eb96" +content-hash = "8dfcc8d9b26341577d28e3edfd5946006ca7a647f4547ccbf82352fc735afaa9" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index dc9ca6a..40516b8 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -4,6 +4,7 @@ version = "0.1.0" description = "" authors = ["christopherfrige "] readme = "README.md" +package-mode = false [tool.poetry.dependencies] python = "^3.11" @@ -21,14 +22,26 @@ webdriver-manager = "^4.0.1" requests = "^2.31.0" [tool.poetry.group.dev.dependencies] -black = "^23.11.0" -flake8 = "^6.1.0" pytest = "^8.1.1" +ruff = "^0.9.5" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" -[tool.black] -line-length = 99 -target-version = ['py311'] \ No newline at end of file +[tool.ruff] +line-length = 100 + +[tool.ruff.lint.per-file-ignores] +"*.py" = ["E712"] +"__init__.py" = ["F403", "F401"] # https://www.flake8rules.com/rules/F403.html +"conftest.py" = ["F401"] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +[tool.ruff.lint] +extend-select = ["I"] \ No newline at end of file diff --git a/backend/src/domain/entities/alternative_name.py b/backend/src/domain/entities/alternative_name.py index 6489090..a7219c4 100644 --- a/backend/src/domain/entities/alternative_name.py +++ b/backend/src/domain/entities/alternative_name.py @@ -1,7 +1,8 @@ -from sqlalchemy import Column, ForeignKey, Integer, TIMESTAMP, Text -from src.domain.entities import Base +from sqlalchemy import TIMESTAMP, Column, ForeignKey, Integer, Text from sqlalchemy.sql.functions import now +from src.domain.entities import Base + class AlternativeNameSchema: __table_args__ = {"schema": "alternative_name"} diff --git a/backend/src/domain/entities/artist.py b/backend/src/domain/entities/artist.py index 0c36d8e..1d36c25 100644 --- a/backend/src/domain/entities/artist.py +++ b/backend/src/domain/entities/artist.py @@ -1,7 +1,8 @@ -from sqlalchemy import Column, Integer, TIMESTAMP, Text -from src.domain.entities import Base +from sqlalchemy import TIMESTAMP, Column, Integer, Text from sqlalchemy.sql.functions import now +from src.domain.entities import Base + class ArtistSchema: __table_args__ = {"schema": "artist"} diff --git a/backend/src/domain/entities/author.py b/backend/src/domain/entities/author.py index d7abb78..9a31048 100644 --- a/backend/src/domain/entities/author.py +++ b/backend/src/domain/entities/author.py @@ -1,7 +1,8 @@ -from sqlalchemy import Column, Integer, TIMESTAMP, Text -from src.domain.entities import Base +from sqlalchemy import TIMESTAMP, Column, Integer, Text from sqlalchemy.sql.functions import now +from src.domain.entities import Base + class AuthorSchema: __table_args__ = {"schema": "author"} diff --git a/backend/src/domain/entities/chapter.py b/backend/src/domain/entities/chapter.py index f766c64..b029a8e 100644 --- a/backend/src/domain/entities/chapter.py +++ b/backend/src/domain/entities/chapter.py @@ -1,7 +1,16 @@ -from sqlalchemy import Boolean, Column, Integer, ForeignKey, TIMESTAMP, Float, Text -from src.domain.entities import Base +from sqlalchemy import ( + TIMESTAMP, + Boolean, + Column, + Float, + ForeignKey, + Integer, + Text, +) from sqlalchemy.sql.functions import now +from src.domain.entities import Base + class ChapterSchema: __table_args__ = {"schema": "chapter"} diff --git a/backend/src/domain/entities/genre.py b/backend/src/domain/entities/genre.py index 88fb461..8a58fd9 100644 --- a/backend/src/domain/entities/genre.py +++ b/backend/src/domain/entities/genre.py @@ -1,7 +1,8 @@ -from sqlalchemy import Column, Integer, Text, TIMESTAMP -from src.domain.entities import Base +from sqlalchemy import TIMESTAMP, Column, Integer, Text from sqlalchemy.sql.functions import now +from src.domain.entities import Base + class GenreSchema: __table_args__ = {"schema": "genre"} diff --git a/backend/src/domain/entities/manwha.py b/backend/src/domain/entities/manwha.py index 784897b..6b50164 100644 --- a/backend/src/domain/entities/manwha.py +++ b/backend/src/domain/entities/manwha.py @@ -1,5 +1,6 @@ -from sqlalchemy import Column, Integer, Text, TIMESTAMP, ForeignKey +from sqlalchemy import TIMESTAMP, Column, ForeignKey, Integer, Text from sqlalchemy.sql.functions import now + from src.domain.entities import Base diff --git a/backend/src/domain/entities/scraper.py b/backend/src/domain/entities/scraper.py index 9f28309..bca99cf 100644 --- a/backend/src/domain/entities/scraper.py +++ b/backend/src/domain/entities/scraper.py @@ -1,7 +1,8 @@ -from sqlalchemy import Boolean, Column, Integer, Text, TIMESTAMP, ForeignKey -from src.domain.entities import Base +from sqlalchemy import TIMESTAMP, Boolean, Column, ForeignKey, Integer, Text from sqlalchemy.sql.functions import now +from src.domain.entities import Base + class ScraperSchema: __table_args__ = {"schema": "scraper"} diff --git a/backend/src/domain/exceptions/client.py b/backend/src/domain/exceptions/client.py index 601c125..645f456 100644 --- a/backend/src/domain/exceptions/client.py +++ b/backend/src/domain/exceptions/client.py @@ -1,17 +1,13 @@ from src.domain.exceptions import DefaultException -class BadRequestException(DefaultException): - ... +class BadRequestException(DefaultException): ... -class NotFoundException(DefaultException): - ... +class NotFoundException(DefaultException): ... -class NotAcceptableException(DefaultException): - ... +class NotAcceptableException(DefaultException): ... -class ConflictException(DefaultException): - ... +class ConflictException(DefaultException): ... diff --git a/backend/src/domain/exceptions/server.py b/backend/src/domain/exceptions/server.py index d1c33f2..ad9e98a 100644 --- a/backend/src/domain/exceptions/server.py +++ b/backend/src/domain/exceptions/server.py @@ -1,5 +1,4 @@ from src.domain.exceptions import DefaultException -class BadGatewayException(DefaultException): - ... +class BadGatewayException(DefaultException): ... diff --git a/backend/src/domain/repository/__init__.py b/backend/src/domain/repository/__init__.py index d6db806..6dd729c 100644 --- a/backend/src/domain/repository/__init__.py +++ b/backend/src/domain/repository/__init__.py @@ -1,7 +1,8 @@ from abc import ABC -from sqlalchemy.ext.declarative import DeclarativeMeta -from sqlalchemy.orm import Session, Query + from sqlalchemy import update +from sqlalchemy.ext.declarative import DeclarativeMeta +from sqlalchemy.orm import Query, Session class BaseRepository(ABC): @@ -10,7 +11,9 @@ def __init__(self, session: Session, model: DeclarativeMeta): self.model = model def get( - self, field: str | None = None, value: str | int | float | bool | list | None = None + self, + field: str | None = None, + value: str | int | float | bool | list | None = None, ) -> Query: if field and value: field = getattr(self.model, field) diff --git a/backend/src/domain/repository/alternative_name.py b/backend/src/domain/repository/alternative_name.py index ef8c04f..e3da227 100644 --- a/backend/src/domain/repository/alternative_name.py +++ b/backend/src/domain/repository/alternative_name.py @@ -1,7 +1,8 @@ -from src.domain.repository import BaseRepository -from src.domain.entities.alternative_name import AlternativeName from sqlalchemy.orm import Session +from src.domain.entities.alternative_name import AlternativeName +from src.domain.repository import BaseRepository + class AlternativeNameRepository(BaseRepository): def __init__(self, session: Session) -> None: diff --git a/backend/src/domain/repository/artist.py b/backend/src/domain/repository/artist.py index 9c1e063..b6e6259 100644 --- a/backend/src/domain/repository/artist.py +++ b/backend/src/domain/repository/artist.py @@ -1,7 +1,8 @@ -from src.domain.repository import BaseRepository -from src.domain.entities.artist import Artist from sqlalchemy.orm import Session +from src.domain.entities.artist import Artist +from src.domain.repository import BaseRepository + class ArtistRepository(BaseRepository): def __init__(self, session: Session) -> None: diff --git a/backend/src/domain/repository/author.py b/backend/src/domain/repository/author.py index fbfca8e..1ebebde 100644 --- a/backend/src/domain/repository/author.py +++ b/backend/src/domain/repository/author.py @@ -1,7 +1,8 @@ -from src.domain.repository import BaseRepository -from src.domain.entities.author import Author from sqlalchemy.orm import Session +from src.domain.entities.author import Author +from src.domain.repository import BaseRepository + class AuthorRepository(BaseRepository): def __init__(self, session: Session) -> None: diff --git a/backend/src/domain/repository/chapter.py b/backend/src/domain/repository/chapter.py index 9d6ea92..598bd4a 100644 --- a/backend/src/domain/repository/chapter.py +++ b/backend/src/domain/repository/chapter.py @@ -1,7 +1,8 @@ -from src.domain.repository import BaseRepository -from src.domain.entities.chapter import Chapter from sqlalchemy.orm import Session +from src.domain.entities.chapter import Chapter +from src.domain.repository import BaseRepository + class ChapterRepository(BaseRepository): def __init__(self, session: Session) -> None: diff --git a/backend/src/domain/repository/genre.py b/backend/src/domain/repository/genre.py index bcf07f9..622322d 100644 --- a/backend/src/domain/repository/genre.py +++ b/backend/src/domain/repository/genre.py @@ -1,7 +1,8 @@ -from src.domain.repository import BaseRepository -from src.domain.entities.genre import Genre from sqlalchemy.orm import Session +from src.domain.entities.genre import Genre +from src.domain.repository import BaseRepository + class GenreRepository(BaseRepository): def __init__(self, session: Session) -> None: diff --git a/backend/src/domain/repository/manwha.py b/backend/src/domain/repository/manwha.py index 10c54fc..c986bfd 100644 --- a/backend/src/domain/repository/manwha.py +++ b/backend/src/domain/repository/manwha.py @@ -1,11 +1,12 @@ -from src.domain.repository import BaseRepository +from sqlalchemy.orm import Session + from src.domain.entities.manwha import ( Manwha, - ManwhaAuthor, ManwhaArtist, + ManwhaAuthor, ManwhaGenre, ) -from sqlalchemy.orm import Session +from src.domain.repository import BaseRepository class ManwhaRepository(BaseRepository): diff --git a/backend/src/domain/repository/scraper.py b/backend/src/domain/repository/scraper.py index 3e8d108..0aa8824 100644 --- a/backend/src/domain/repository/scraper.py +++ b/backend/src/domain/repository/scraper.py @@ -1,7 +1,8 @@ -from src.domain.repository import BaseRepository -from src.domain.entities.scraper import ScraperManwha, Reader from sqlalchemy.orm import Session +from src.domain.entities.scraper import Reader, ScraperManwha +from src.domain.repository import BaseRepository + class ScraperManwhaRepository(BaseRepository): def __init__(self, session: Session) -> None: diff --git a/backend/src/domain/schemas/manwha.py b/backend/src/domain/schemas/manwha.py index 1ca6f2e..7f3994e 100644 --- a/backend/src/domain/schemas/manwha.py +++ b/backend/src/domain/schemas/manwha.py @@ -1,14 +1,15 @@ +from datetime import datetime + from pydantic import BaseModel -from src.domain.enums.manwha import GetManwhasOrderEntity + from src.domain.enums.core import OrdenationOrder +from src.domain.enums.manwha import GetManwhasOrderEntity from src.domain.schemas import Pagination +from src.domain.schemas.alternative_name import AlternativeNameSchema from src.domain.schemas.artist import ArtistSchema from src.domain.schemas.author import AuthorSchema -from src.domain.schemas.genre import GenreSchema -from src.domain.schemas.alternative_name import AlternativeNameSchema from src.domain.schemas.chapter import ChapterSchema - -from datetime import datetime +from src.domain.schemas.genre import GenreSchema class ManwhaSchema(BaseModel): diff --git a/backend/src/domain/use_cases/chapter/check_new_chapters.py b/backend/src/domain/use_cases/chapter/check_new_chapters.py index aec58ae..2752813 100644 --- a/backend/src/domain/use_cases/chapter/check_new_chapters.py +++ b/backend/src/domain/use_cases/chapter/check_new_chapters.py @@ -1,8 +1,9 @@ +from sqlalchemy import select +from sqlalchemy.orm import Session + +from src.domain.entities.chapter import Chapter from src.domain.repository.chapter import ChapterRepository from src.domain.repository.scraper import ScraperManwhaRepository -from src.domain.entities.chapter import Chapter -from sqlalchemy.orm import Session -from sqlalchemy import select class CheckNewChaptersUseCase: diff --git a/backend/src/domain/use_cases/chapter/get_chapter_pages.py b/backend/src/domain/use_cases/chapter/get_chapter_pages.py index 71783fb..b40bdbc 100644 --- a/backend/src/domain/use_cases/chapter/get_chapter_pages.py +++ b/backend/src/domain/use_cases/chapter/get_chapter_pages.py @@ -1,6 +1,6 @@ -from src.domain.entities.manwha import Manwha from src.domain.entities.chapter import Chapter -from src.domain.schemas.chapter import GetChapterPagesResponse, ChapterPage +from src.domain.entities.manwha import Manwha +from src.domain.schemas.chapter import ChapterPage, GetChapterPagesResponse from src.infrastructure.persistence.unit_of_work import UnitOfWork from src.infrastructure.services.s3 import S3Service @@ -34,7 +34,5 @@ def execute(self, chapter_id: int) -> GetChapterPagesResponse: ) def prepare_chapter_pages(self, manwha_id: int, chapter_id: int) -> list[ChapterPage]: - chapter_pages = self.storage.list_objects( - path=f"manwha/{manwha_id}/chapters/{chapter_id}/" - ) + chapter_pages = self.storage.list_objects(path=f"manwha/{manwha_id}/chapters/{chapter_id}/") return [ChapterPage(url=f"{self.storage.bucket_url}/{page}") for page in chapter_pages] diff --git a/backend/src/domain/use_cases/chapter/upload_chapter_pages.py b/backend/src/domain/use_cases/chapter/upload_chapter_pages.py index db0bd6d..7a5734f 100644 --- a/backend/src/domain/use_cases/chapter/upload_chapter_pages.py +++ b/backend/src/domain/use_cases/chapter/upload_chapter_pages.py @@ -1,10 +1,11 @@ -from src.domain.utils import normalize_string -from src.domain.repository.chapter import ChapterRepository +import os + +from sqlalchemy.orm import Session + from src.domain.entities.chapter import Chapter +from src.domain.repository.chapter import ChapterRepository +from src.domain.utils import normalize_string from src.infrastructure.services.s3 import S3Service -from sqlalchemy.orm import Session -from src.infrastructure.config import SETTINGS -import os class UploadChapterPagesUseCase: diff --git a/backend/src/domain/use_cases/common/download_image.py b/backend/src/domain/use_cases/common/download_image.py index 1c790e8..a42d75b 100644 --- a/backend/src/domain/use_cases/common/download_image.py +++ b/backend/src/domain/use_cases/common/download_image.py @@ -1,6 +1,7 @@ -import requests import os +import requests + class DownloadImageUseCase: def __init__(self, referer: str | None): diff --git a/backend/src/domain/use_cases/manwha/delete_manwha_chapters.py b/backend/src/domain/use_cases/manwha/delete_manwha_chapters.py index 304e699..e4a9808 100644 --- a/backend/src/domain/use_cases/manwha/delete_manwha_chapters.py +++ b/backend/src/domain/use_cases/manwha/delete_manwha_chapters.py @@ -1,8 +1,8 @@ from src.domain.exceptions.client import BadRequestException -from src.infrastructure.services.s3 import S3Service -from src.infrastructure.persistence.unit_of_work import UnitOfWork from src.domain.repository.chapter import ChapterRepository from src.domain.repository.scraper import ScraperManwhaRepository +from src.infrastructure.persistence.unit_of_work import UnitOfWork +from src.infrastructure.services.s3 import S3Service class DeleteManwhaChaptersUseCase: diff --git a/backend/src/domain/use_cases/manwha/get_manwha.py b/backend/src/domain/use_cases/manwha/get_manwha.py index 1514592..f56dd25 100644 --- a/backend/src/domain/use_cases/manwha/get_manwha.py +++ b/backend/src/domain/use_cases/manwha/get_manwha.py @@ -1,24 +1,24 @@ +from sqlalchemy import String, cast + +from src.domain.entities.alternative_name import AlternativeName +from src.domain.entities.artist import Artist +from src.domain.entities.author import Author +from src.domain.entities.chapter import Chapter +from src.domain.entities.genre import Genre from src.domain.entities.manwha import ( Manwha, ManwhaArtist, ManwhaAuthor, ManwhaGenre, ) -from src.domain.entities.chapter import Chapter -from src.domain.entities.author import Author -from src.domain.entities.artist import Artist -from src.domain.entities.genre import Genre -from src.domain.entities.alternative_name import AlternativeName -from src.domain.schemas.manwha import GetManwhaResponse +from src.domain.schemas.alternative_name import AlternativeNameSchema from src.domain.schemas.artist import ArtistSchema from src.domain.schemas.author import AuthorSchema -from src.domain.schemas.genre import GenreSchema -from src.domain.schemas.alternative_name import AlternativeNameSchema from src.domain.schemas.chapter import ChapterSchema +from src.domain.schemas.genre import GenreSchema +from src.domain.schemas.manwha import GetManwhaResponse from src.infrastructure.persistence.unit_of_work import UnitOfWork -from sqlalchemy import cast, String - class GetManwhaUseCase: def execute(self, db: UnitOfWork, manwha_id: int) -> GetManwhaResponse: @@ -38,9 +38,7 @@ def execute(self, db: UnitOfWork, manwha_id: int) -> GetManwhaResponse: artists=self._additional_data( Artist, ManwhaArtist, ManwhaArtist.artist_id, ArtistSchema ), - genres=self._additional_data( - Genre, ManwhaGenre, ManwhaGenre.genre_id, GenreSchema - ), + genres=self._additional_data(Genre, ManwhaGenre, ManwhaGenre.genre_id, GenreSchema), alternative_names=self._alternative_names(), ) diff --git a/backend/src/domain/use_cases/manwha/get_manwhas.py b/backend/src/domain/use_cases/manwha/get_manwhas.py index 88d4f81..cbd6f4e 100644 --- a/backend/src/domain/use_cases/manwha/get_manwhas.py +++ b/backend/src/domain/use_cases/manwha/get_manwhas.py @@ -1,22 +1,23 @@ -from src.domain.enums.core import OrdenationOrder -from src.domain.enums.manwha import GetManwhasOrderEntity +from sqlalchemy import asc, case, desc, or_ +from sqlalchemy.sql import func + from src.domain.entities.alternative_name import AlternativeName -from src.domain.entities.manwha import Manwha from src.domain.entities.chapter import Chapter +from src.domain.entities.manwha import Manwha +from src.domain.enums.core import OrdenationOrder +from src.domain.enums.manwha import GetManwhasOrderEntity from src.domain.schemas.manwha import ( GetManwhasRequestQueryParams, - ManwhaPresentationData, GetManwhasResponse, -) -from src.infrastructure.persistence.unit_of_work import UnitOfWork -from src.domain.use_cases.pagination.prepare_pagination import ( - PreparePaginationUseCase, + ManwhaPresentationData, ) from src.domain.use_cases.pagination.get_limit_offset import ( GetLimitOffsetUseCase, ) -from sqlalchemy.sql import func -from sqlalchemy import case, or_, asc, desc +from src.domain.use_cases.pagination.prepare_pagination import ( + PreparePaginationUseCase, +) +from src.infrastructure.persistence.unit_of_work import UnitOfWork class GetManwhasUseCase: @@ -57,7 +58,11 @@ def execute(self, query_params: GetManwhasRequestQueryParams) -> GetManwhasRespo ), ) .join(Chapter, Chapter.manwha_id == Manwha.id, isouter=True) - .join(AlternativeName, AlternativeName.manwha_id == Manwha.id, isouter=True) + .join( + AlternativeName, + AlternativeName.manwha_id == Manwha.id, + isouter=True, + ) .filter(Chapter.id == subquery.scalar_subquery()) ) @@ -66,7 +71,9 @@ def execute(self, query_params: GetManwhasRequestQueryParams) -> GetManwhasRespo query = query.filter(or_(*search_conditions)) _order_by, _order = self._prepare_ordenation( - query_params.order_entity, query_params.order_by, query_params.order + query_params.order_entity, + query_params.order_by, + query_params.order, ) query = query.group_by(Manwha.id, Chapter.downloaded) query = query.order_by(_order(func.max(_order_by))) @@ -78,7 +85,10 @@ def execute(self, query_params: GetManwhasRequestQueryParams) -> GetManwhasRespo return GetManwhasResponse( records=manwhas, pagination=self.prepare_pagination( - "/v1/manwhas", query, query_params.page, query_params.per_page + "/v1/manwhas", + query, + query_params.page, + query_params.per_page, ), ) @@ -91,7 +101,10 @@ def _get_search_conditions(self, search_input: str): return conditions def _prepare_ordenation( - self, order_entity: GetManwhasOrderEntity, order_by: str, order: OrdenationOrder + self, + order_entity: GetManwhasOrderEntity, + order_by: str, + order: OrdenationOrder, ): match order_entity: case GetManwhasOrderEntity.MANWHA: diff --git a/backend/src/domain/use_cases/manwha/manage_manwha.py b/backend/src/domain/use_cases/manwha/manage_manwha.py index 2cd8681..36c0988 100644 --- a/backend/src/domain/use_cases/manwha/manage_manwha.py +++ b/backend/src/domain/use_cases/manwha/manage_manwha.py @@ -1,28 +1,29 @@ -from src.domain.repository.manwha import ( - ManwhaRepository, - ManwhaGenreRepository, - ManwhaArtistRepository, - ManwhaAuthorRepository, -) -from src.domain.repository.genre import GenreRepository -from src.domain.repository.artist import ArtistRepository -from src.domain.repository.author import AuthorRepository -from src.domain.repository.alternative_name import AlternativeNameRepository +import shutil + +from sqlalchemy.orm import Session + +from src.domain.entities.alternative_name import AlternativeName +from src.domain.entities.artist import Artist +from src.domain.entities.author import Author +from src.domain.entities.genre import Genre from src.domain.entities.manwha import ( Manwha, - ManwhaGenre, ManwhaArtist, ManwhaAuthor, + ManwhaGenre, +) +from src.domain.repository.alternative_name import AlternativeNameRepository +from src.domain.repository.artist import ArtistRepository +from src.domain.repository.author import AuthorRepository +from src.domain.repository.genre import GenreRepository +from src.domain.repository.manwha import ( + ManwhaArtistRepository, + ManwhaAuthorRepository, + ManwhaGenreRepository, + ManwhaRepository, ) -from src.domain.entities.genre import Genre -from src.domain.entities.artist import Artist -from src.domain.entities.author import Author -from src.domain.entities.alternative_name import AlternativeName from src.domain.use_cases.common.download_image import DownloadImageUseCase from src.infrastructure.services.s3 import S3Service -from src.infrastructure.config import SETTINGS -from sqlalchemy.orm import Session -import shutil class ManageManwhaUseCase: diff --git a/backend/src/domain/use_cases/pagination/prepare_pagination.py b/backend/src/domain/use_cases/pagination/prepare_pagination.py index e211f4c..f323822 100644 --- a/backend/src/domain/use_cases/pagination/prepare_pagination.py +++ b/backend/src/domain/use_cases/pagination/prepare_pagination.py @@ -1,8 +1,9 @@ from math import ceil -from src.domain.schemas import Pagination from sqlalchemy.orm.query import RowReturningQuery +from src.domain.schemas import Pagination + class PreparePaginationUseCase: @staticmethod diff --git a/backend/src/domain/use_cases/scraper/base_scraper.py b/backend/src/domain/use_cases/scraper/base_scraper.py index 49737b8..221adb3 100644 --- a/backend/src/domain/use_cases/scraper/base_scraper.py +++ b/backend/src/domain/use_cases/scraper/base_scraper.py @@ -1,25 +1,33 @@ -from abc import ABC, abstractmethod import shutil +from abc import ABC, abstractmethod from selenium import webdriver -from selenium.webdriver.chrome.service import Service from selenium.common.exceptions import TimeoutException +from selenium.webdriver.chrome.service import Service from sqlalchemy.orm import Session -from src.domain.repository.manwha import ManwhaRepository -from src.infrastructure.services.notifier import Notifier -from src.domain.utils import normalize_string -from src.domain.entities.scraper import ScraperManwha -from src.domain.exceptions.client import BadRequestException, NotAcceptableException + from src.domain.entities.chapter import Chapter -from src.domain.repository.chapter import ChapterRepository +from src.domain.entities.scraper import ScraperManwha +from src.domain.exceptions.client import ( + BadRequestException, + NotAcceptableException, +) from src.domain.exceptions.server import BadGatewayException -from src.domain.use_cases.manwha.manage_manwha import ManageManwhaUseCase -from src.domain.use_cases.chapter.check_new_chapters import CheckNewChaptersUseCase -from src.domain.use_cases.chapter.upload_chapter_pages import UploadChapterPagesUseCase -from src.domain.use_cases.common.download_image import DownloadImageUseCase +from src.domain.repository.chapter import ChapterRepository +from src.domain.repository.manwha import ManwhaRepository from src.domain.repository.scraper import ScraperManwhaRepository -from src.infrastructure.services.s3 import S3Service +from src.domain.use_cases.chapter.check_new_chapters import ( + CheckNewChaptersUseCase, +) +from src.domain.use_cases.chapter.upload_chapter_pages import ( + UploadChapterPagesUseCase, +) +from src.domain.use_cases.common.download_image import DownloadImageUseCase +from src.domain.use_cases.manwha.manage_manwha import ManageManwhaUseCase +from src.domain.utils import normalize_string from src.infrastructure.log import logger +from src.infrastructure.services.notifier import Notifier +from src.infrastructure.services.s3 import S3Service class BaseScraperUseCase(ABC): diff --git a/backend/src/domain/use_cases/scraper/create_manwha_to_scrape.py b/backend/src/domain/use_cases/scraper/create_manwha_to_scrape.py index 67ed580..e66c67f 100644 --- a/backend/src/domain/use_cases/scraper/create_manwha_to_scrape.py +++ b/backend/src/domain/use_cases/scraper/create_manwha_to_scrape.py @@ -1,8 +1,14 @@ -from src.infrastructure.persistence.unit_of_work import UnitOfWork -from src.domain.repository.scraper import ReaderRepository, ScraperManwhaRepository from src.domain.entities.scraper import ScraperManwha from src.domain.exceptions.client import BadRequestException, ConflictException -from src.domain.schemas.scraper import CreateManwhaToScrapeRequest, CreateManwhaToScrapeResponse +from src.domain.repository.scraper import ( + ReaderRepository, + ScraperManwhaRepository, +) +from src.domain.schemas.scraper import ( + CreateManwhaToScrapeRequest, + CreateManwhaToScrapeResponse, +) +from src.infrastructure.persistence.unit_of_work import UnitOfWork class CreateManwhaToScrapeUseCase: @@ -23,12 +29,15 @@ def execute(self, payload: CreateManwhaToScrapeRequest): scraper_manwha_id = self.scraper_manwha_repository.add( ScraperManwha( - reader_id=payload.reader_id, url=payload.url, chapter_start=payload.chapter_start + reader_id=payload.reader_id, + url=payload.url, + chapter_start=payload.chapter_start, ) ) self.session.commit() return CreateManwhaToScrapeResponse( - message="Manwha created and ready to be scraped", scraper_manwha_id=scraper_manwha_id + message="Manwha created and ready to be scraped", + scraper_manwha_id=scraper_manwha_id, ) diff --git a/backend/src/domain/use_cases/scraper/get_readers.py b/backend/src/domain/use_cases/scraper/get_readers.py index f886491..67b4de6 100644 --- a/backend/src/domain/use_cases/scraper/get_readers.py +++ b/backend/src/domain/use_cases/scraper/get_readers.py @@ -1,9 +1,12 @@ +from sqlalchemy import asc + from src.domain.entities.scraper import Reader -from src.domain.schemas.scraper import GetReadersResponse, ReaderData from src.domain.repository.scraper import ReaderRepository -from src.domain.use_cases.pagination.prepare_pagination import PreparePaginationUseCase +from src.domain.schemas.scraper import GetReadersResponse, ReaderData +from src.domain.use_cases.pagination.prepare_pagination import ( + PreparePaginationUseCase, +) from src.infrastructure.persistence.unit_of_work import UnitOfWork -from sqlalchemy import asc class GetReadersUseCase: diff --git a/backend/src/domain/use_cases/scraper/get_scraper_manwha.py b/backend/src/domain/use_cases/scraper/get_scraper_manwha.py index e4651ee..3f846ea 100644 --- a/backend/src/domain/use_cases/scraper/get_scraper_manwha.py +++ b/backend/src/domain/use_cases/scraper/get_scraper_manwha.py @@ -1,6 +1,6 @@ from src.domain.exceptions.client import NotFoundException -from src.domain.schemas.scraper import GetReadersResponse, ScraperManwhaSchema from src.domain.repository.scraper import ScraperManwhaRepository +from src.domain.schemas.scraper import GetReadersResponse, ScraperManwhaSchema from src.infrastructure.persistence.unit_of_work import UnitOfWork diff --git a/backend/src/domain/use_cases/scraper/scrape_flower_manwhas.py b/backend/src/domain/use_cases/scraper/scrape_flower_manwhas.py index 919b950..6f2a8ca 100644 --- a/backend/src/domain/use_cases/scraper/scrape_flower_manwhas.py +++ b/backend/src/domain/use_cases/scraper/scrape_flower_manwhas.py @@ -1,9 +1,10 @@ +from selenium.common.exceptions import NoSuchElementException +from selenium.webdriver.common.by import By from sqlalchemy.orm import Session + +from src.domain.enums.scraper import ReaderEnum from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase -from selenium.webdriver.common.by import By from src.infrastructure.services.s3 import S3Service -from selenium.common.exceptions import NoSuchElementException -from src.domain.enums.scraper import ReaderEnum class ScrapeFlowerManwhasUseCase(BaseScraperUseCase): @@ -41,7 +42,11 @@ def scrape_manwha_chapter_pages(self, chapter_url): image_type = image_url.split(".")[-1] pages.append( - {"image_url": image_url, "image_name": image_name, "image_type": image_type} + { + "image_url": image_url, + "image_name": image_name, + "image_type": image_type, + } ) return pages diff --git a/backend/src/domain/use_cases/scraper/scrape_hari_manwhas.py b/backend/src/domain/use_cases/scraper/scrape_hari_manwhas.py index 4bb3146..2f322e4 100644 --- a/backend/src/domain/use_cases/scraper/scrape_hari_manwhas.py +++ b/backend/src/domain/use_cases/scraper/scrape_hari_manwhas.py @@ -1,9 +1,10 @@ +from selenium.common.exceptions import NoSuchElementException +from selenium.webdriver.common.by import By from sqlalchemy.orm import Session + +from src.domain.enums.scraper import ReaderEnum from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase -from selenium.webdriver.common.by import By from src.infrastructure.services.s3 import S3Service -from selenium.common.exceptions import NoSuchElementException -from src.domain.enums.scraper import ReaderEnum class ScrapeHariManwhasUseCase(BaseScraperUseCase): @@ -40,7 +41,11 @@ def scrape_manwha_chapter_pages(self, chapter_url): image_type = image_url.split(".")[-1] pages.append( - {"image_url": image_url, "image_name": image_name, "image_type": image_type} + { + "image_url": image_url, + "image_name": image_name, + "image_type": image_type, + } ) return pages diff --git a/backend/src/domain/use_cases/scraper/scrape_inari_manwhas.py b/backend/src/domain/use_cases/scraper/scrape_inari_manwhas.py index 67df389..580fdab 100644 --- a/backend/src/domain/use_cases/scraper/scrape_inari_manwhas.py +++ b/backend/src/domain/use_cases/scraper/scrape_inari_manwhas.py @@ -1,9 +1,10 @@ +from selenium.common.exceptions import NoSuchElementException +from selenium.webdriver.common.by import By from sqlalchemy.orm import Session + +from src.domain.enums.scraper import ReaderEnum from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase -from selenium.webdriver.common.by import By from src.infrastructure.services.s3 import S3Service -from selenium.common.exceptions import NoSuchElementException -from src.domain.enums.scraper import ReaderEnum class ScrapeInariManwhasUseCase(BaseScraperUseCase): @@ -44,7 +45,11 @@ def scrape_manwha_chapter_pages(self, chapter_url): image_type = image_url.split(".")[-1] pages.append( - {"image_url": image_url, "image_name": image_name, "image_type": image_type} + { + "image_url": image_url, + "image_name": image_name, + "image_type": image_type, + } ) return pages diff --git a/backend/src/domain/use_cases/scraper/scrape_kingofshojo_manwhas.py b/backend/src/domain/use_cases/scraper/scrape_kingofshojo_manwhas.py index 3d02192..a8a5df8 100644 --- a/backend/src/domain/use_cases/scraper/scrape_kingofshojo_manwhas.py +++ b/backend/src/domain/use_cases/scraper/scrape_kingofshojo_manwhas.py @@ -1,9 +1,10 @@ +from selenium.common.exceptions import NoSuchElementException +from selenium.webdriver.common.by import By from sqlalchemy.orm import Session + from src.domain.enums.scraper import ReaderEnum from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase -from selenium.webdriver.common.by import By from src.infrastructure.services.s3 import S3Service -from selenium.common.exceptions import NoSuchElementException class ScrapeKingOfShojoManwhasUseCase(BaseScraperUseCase): @@ -46,7 +47,11 @@ def scrape_manwha_chapter_pages(self, chapter_url): image_type = image_url.split(".")[-1] pages.append( - {"image_url": image_url, "image_name": image_name, "image_type": image_type} + { + "image_url": image_url, + "image_name": image_name, + "image_type": image_type, + } ) return pages diff --git a/backend/src/domain/use_cases/scraper/scrape_kun_manwhas.py b/backend/src/domain/use_cases/scraper/scrape_kun_manwhas.py index 9d2bbe8..5b5c303 100644 --- a/backend/src/domain/use_cases/scraper/scrape_kun_manwhas.py +++ b/backend/src/domain/use_cases/scraper/scrape_kun_manwhas.py @@ -1,9 +1,10 @@ +from selenium.common.exceptions import NoSuchElementException +from selenium.webdriver.common.by import By from sqlalchemy.orm import Session + +from src.domain.enums.scraper import ReaderEnum from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase -from selenium.webdriver.common.by import By from src.infrastructure.services.s3 import S3Service -from selenium.common.exceptions import NoSuchElementException -from src.domain.enums.scraper import ReaderEnum class ScrapeKunManwhasUseCase(BaseScraperUseCase): @@ -40,7 +41,11 @@ def scrape_manwha_chapter_pages(self, chapter_url): image_type = image_url.split(".")[-1] pages.append( - {"image_url": image_url, "image_name": image_name, "image_type": image_type} + { + "image_url": image_url, + "image_name": image_name, + "image_type": image_type, + } ) return pages diff --git a/backend/src/domain/use_cases/scraper/scrape_miau_manwhas.py b/backend/src/domain/use_cases/scraper/scrape_miau_manwhas.py index c7116f2..bf51200 100644 --- a/backend/src/domain/use_cases/scraper/scrape_miau_manwhas.py +++ b/backend/src/domain/use_cases/scraper/scrape_miau_manwhas.py @@ -1,9 +1,10 @@ +from selenium.common.exceptions import NoSuchElementException +from selenium.webdriver.common.by import By from sqlalchemy.orm import Session + +from src.domain.enums.scraper import ReaderEnum from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase -from selenium.webdriver.common.by import By from src.infrastructure.services.s3 import S3Service -from selenium.common.exceptions import NoSuchElementException -from src.domain.enums.scraper import ReaderEnum class ScrapeMiauManwhasUseCase(BaseScraperUseCase): @@ -40,7 +41,11 @@ def scrape_manwha_chapter_pages(self, chapter_url): image_type = image_url.split(".")[-1] pages.append( - {"image_url": image_url, "image_name": image_name, "image_type": image_type} + { + "image_url": image_url, + "image_name": image_name, + "image_type": image_type, + } ) return pages diff --git a/backend/src/domain/use_cases/scraper/update_scraper_manwha.py b/backend/src/domain/use_cases/scraper/update_scraper_manwha.py index 8b8c9e8..b0efadf 100644 --- a/backend/src/domain/use_cases/scraper/update_scraper_manwha.py +++ b/backend/src/domain/use_cases/scraper/update_scraper_manwha.py @@ -1,10 +1,10 @@ from src.domain.exceptions.client import BadRequestException, NotFoundException +from src.domain.repository.scraper import ScraperManwhaRepository from src.domain.schemas.scraper import ( GetReadersResponse, ScraperManwhaSchema, UpdateScraperManwhaRequest, ) -from src.domain.repository.scraper import ScraperManwhaRepository from src.infrastructure.persistence.unit_of_work import UnitOfWork diff --git a/backend/src/domain/utils/normalize.py b/backend/src/domain/utils/normalize.py index 4e6c53b..ce1fe25 100644 --- a/backend/src/domain/utils/normalize.py +++ b/backend/src/domain/utils/normalize.py @@ -1,5 +1,5 @@ -import unicodedata import re +import unicodedata def normalize_string(input_str: str) -> str: diff --git a/backend/src/infrastructure/log/__init__.py b/backend/src/infrastructure/log/__init__.py index 2328627..c6e5bbf 100644 --- a/backend/src/infrastructure/log/__init__.py +++ b/backend/src/infrastructure/log/__init__.py @@ -1,7 +1,8 @@ import logging logging.basicConfig( - format="%(asctime)s - %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S" + format="%(asctime)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", ) logger = logging.getLogger() logger.setLevel(logging.INFO) diff --git a/backend/src/infrastructure/services/aws.py b/backend/src/infrastructure/services/aws.py index bcd2e87..19c366d 100644 --- a/backend/src/infrastructure/services/aws.py +++ b/backend/src/infrastructure/services/aws.py @@ -1,4 +1,5 @@ import boto3 + from src.infrastructure.config import SETTINGS diff --git a/backend/src/infrastructure/services/notifier.py b/backend/src/infrastructure/services/notifier.py index bb009aa..32342ed 100644 --- a/backend/src/infrastructure/services/notifier.py +++ b/backend/src/infrastructure/services/notifier.py @@ -1,7 +1,7 @@ import requests -from src.domain.utils import normalize_string from src.domain.entities.manwha import Manwha +from src.domain.utils import normalize_string from src.infrastructure.config import SETTINGS diff --git a/backend/src/infrastructure/services/s3.py b/backend/src/infrastructure/services/s3.py index 0c72652..6c45016 100644 --- a/backend/src/infrastructure/services/s3.py +++ b/backend/src/infrastructure/services/s3.py @@ -1,6 +1,6 @@ -from src.infrastructure.services.aws import AWS from src.infrastructure.config import SETTINGS from src.infrastructure.log import logger +from src.infrastructure.services.aws import AWS class S3Service(AWS): @@ -42,7 +42,8 @@ def delete_objects(self, path: str) -> bool: limit = offset + batch_size delete_keys_batch = delete_keys[offset:limit] self.s3.Bucket(self.bucket_name).delete_objects( - Delete={"Objects": delete_keys_batch}, Bucket=self.bucket_name + Delete={"Objects": delete_keys_batch}, + Bucket=self.bucket_name, ) offset += batch_size logger.info(f"Deleted objects from S3 with prefix: {path}") diff --git a/backend/src/presentation/api/exception_handlers.py b/backend/src/presentation/api/exception_handlers.py index 570480b..bfd242a 100644 --- a/backend/src/presentation/api/exception_handlers.py +++ b/backend/src/presentation/api/exception_handlers.py @@ -1,13 +1,13 @@ from fastapi import Request from fastapi.responses import JSONResponse -from src.domain.exceptions.server import BadGatewayException from src.domain.exceptions.client import ( BadRequestException, ConflictException, NotAcceptableException, NotFoundException, ) +from src.domain.exceptions.server import BadGatewayException from src.infrastructure.log import logger diff --git a/backend/src/presentation/api/main.py b/backend/src/presentation/api/main.py index 7e3dc8b..4230348 100644 --- a/backend/src/presentation/api/main.py +++ b/backend/src/presentation/api/main.py @@ -2,7 +2,14 @@ from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware +from src.domain.exceptions.client import ( + BadRequestException, + ConflictException, + NotAcceptableException, + NotFoundException, +) from src.domain.exceptions.server import BadGatewayException +from src.infrastructure.config import SETTINGS from src.presentation.api.exception_handlers import ( bad_gateway_exception_handler, bad_request_exception_handler, @@ -11,17 +18,10 @@ not_acceptable_exception_handler, not_found_exception_handler, ) -from src.domain.exceptions.client import ( - BadRequestException, - ConflictException, - NotAcceptableException, - NotFoundException, -) -from src.presentation.api.v1.routers.manwhas import router as v1_manwhas from src.presentation.api.v1.routers.chapters import router as v1_chapters -from src.presentation.api.v1.routers.scrapers import router as v1_scrapers +from src.presentation.api.v1.routers.manwhas import router as v1_manwhas from src.presentation.api.v1.routers.readers import router as v1_readers -from src.infrastructure.config import SETTINGS +from src.presentation.api.v1.routers.scrapers import router as v1_scrapers app = FastAPI() diff --git a/backend/src/presentation/api/v1/routers/chapters.py b/backend/src/presentation/api/v1/routers/chapters.py index 65be77a..31abd5c 100644 --- a/backend/src/presentation/api/v1/routers/chapters.py +++ b/backend/src/presentation/api/v1/routers/chapters.py @@ -1,10 +1,10 @@ from fastapi import APIRouter, Depends -from src.infrastructure.persistence.unit_of_work import UnitOfWork from src.domain.schemas.chapter import GetChapterPagesResponse from src.domain.use_cases.chapter.get_chapter_pages import ( GetChapterPagesUseCase, ) +from src.infrastructure.persistence.unit_of_work import UnitOfWork router = APIRouter(prefix="/api/v1/chapters", tags=["v1"]) diff --git a/backend/src/presentation/api/v1/routers/manwhas.py b/backend/src/presentation/api/v1/routers/manwhas.py index 309a3cd..57ae063 100644 --- a/backend/src/presentation/api/v1/routers/manwhas.py +++ b/backend/src/presentation/api/v1/routers/manwhas.py @@ -1,15 +1,17 @@ from fastapi import APIRouter, Depends -from src.infrastructure.services.s3 import S3Service -from src.infrastructure.persistence.unit_of_work import UnitOfWork -from src.domain.use_cases.manwha.delete_manwha_chapters import DeleteManwhaChaptersUseCase -from src.domain.use_cases.manwha.get_manwhas import GetManwhasUseCase -from src.domain.use_cases.manwha.get_manwha import GetManwhaUseCase from src.domain.schemas.manwha import ( + GetManwhaResponse, GetManwhasRequestQueryParams, GetManwhasResponse, - GetManwhaResponse, ) +from src.domain.use_cases.manwha.delete_manwha_chapters import ( + DeleteManwhaChaptersUseCase, +) +from src.domain.use_cases.manwha.get_manwha import GetManwhaUseCase +from src.domain.use_cases.manwha.get_manwhas import GetManwhasUseCase +from src.infrastructure.persistence.unit_of_work import UnitOfWork +from src.infrastructure.services.s3 import S3Service router = APIRouter(prefix="/api/v1/manwhas", tags=["v1"]) diff --git a/backend/src/presentation/api/v1/routers/scrapers.py b/backend/src/presentation/api/v1/routers/scrapers.py index 03905bd..b065279 100644 --- a/backend/src/presentation/api/v1/routers/scrapers.py +++ b/backend/src/presentation/api/v1/routers/scrapers.py @@ -1,10 +1,7 @@ -from fastapi import APIRouter, Depends, BackgroundTasks +from fastapi import APIRouter, BackgroundTasks, Depends + from src.domain.enums.scraper import ReaderEnum from src.domain.exceptions.client import BadRequestException -from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase -from src.domain.use_cases.scraper.scrape_hari_manwhas import ScrapeHariManwhasUseCase -from src.domain.use_cases.scraper.update_scraper_manwha import UpdateScraperManwhaUseCase -from src.domain.use_cases.scraper.create_manwha_to_scrape import CreateManwhaToScrapeUseCase from src.domain.schemas.scraper import ( CreateManwhaToScrapeRequest, CreateManwhaToScrapeResponse, @@ -12,17 +9,36 @@ ScraperManwhaSchema, UpdateScraperManwhaRequest, ) -from src.infrastructure.persistence.unit_of_work import UnitOfWork -from src.infrastructure.services.s3 import S3Service -from src.domain.use_cases.scraper.get_scraper_manwha import GetScraperManwhaUseCase -from src.domain.use_cases.scraper.scrape_inari_manwhas import ScrapeInariManwhasUseCase +from src.domain.use_cases.scraper.base_scraper import BaseScraperUseCase +from src.domain.use_cases.scraper.create_manwha_to_scrape import ( + CreateManwhaToScrapeUseCase, +) +from src.domain.use_cases.scraper.get_scraper_manwha import ( + GetScraperManwhaUseCase, +) from src.domain.use_cases.scraper.scrape_flower_manwhas import ( ScrapeFlowerManwhasUseCase, ) -from src.domain.use_cases.scraper.scrape_kingofshojo_manwhas import ScrapeKingOfShojoManwhasUseCase -from src.domain.use_cases.scraper.scrape_kun_manwhas import ScrapeKunManwhasUseCase -from src.domain.use_cases.scraper.scrape_miau_manwhas import ScrapeMiauManwhasUseCase - +from src.domain.use_cases.scraper.scrape_hari_manwhas import ( + ScrapeHariManwhasUseCase, +) +from src.domain.use_cases.scraper.scrape_inari_manwhas import ( + ScrapeInariManwhasUseCase, +) +from src.domain.use_cases.scraper.scrape_kingofshojo_manwhas import ( + ScrapeKingOfShojoManwhasUseCase, +) +from src.domain.use_cases.scraper.scrape_kun_manwhas import ( + ScrapeKunManwhasUseCase, +) +from src.domain.use_cases.scraper.scrape_miau_manwhas import ( + ScrapeMiauManwhasUseCase, +) +from src.domain.use_cases.scraper.update_scraper_manwha import ( + UpdateScraperManwhaUseCase, +) +from src.infrastructure.persistence.unit_of_work import UnitOfWork +from src.infrastructure.services.s3 import S3Service router = APIRouter(prefix="/api/v1/scrapers", tags=["v1"]) diff --git a/backend/tests/unit/domain/pagination/test_get_limit_offset.py b/backend/tests/unit/domain/pagination/test_get_limit_offset.py index 7c1c2e7..d331382 100644 --- a/backend/tests/unit/domain/pagination/test_get_limit_offset.py +++ b/backend/tests/unit/domain/pagination/test_get_limit_offset.py @@ -1,4 +1,5 @@ import pytest + from src.domain.use_cases.pagination.get_limit_offset import ( GetLimitOffsetUseCase, )