diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3d0fa1036b..97bdbdf06d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,7 +2,6 @@ exclude: |
(?x)
# NOT INSTALLABLE ADDONS
^base_import_async/|
- ^queue_job/|
^queue_job_batch/|
^queue_job_cron/|
^queue_job_cron_jobrunner/|
diff --git a/queue_job/README.rst b/queue_job/README.rst
index 88b5a4d00b..cf341a21b7 100644
--- a/queue_job/README.rst
+++ b/queue_job/README.rst
@@ -21,13 +21,13 @@ Job Queue
:target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html
:alt: License: LGPL-3
.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fqueue-lightgray.png?logo=github
- :target: https://github.com/OCA/queue/tree/18.0/queue_job
+ :target: https://github.com/OCA/queue/tree/19.0/queue_job
:alt: OCA/queue
.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png
- :target: https://translation.odoo-community.org/projects/queue-18-0/queue-18-0-queue_job
+ :target: https://translation.odoo-community.org/projects/queue-19-0/queue-19-0-queue_job
:alt: Translate me on Weblate
.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png
- :target: https://runboat.odoo-community.org/builds?repo=OCA/queue&target_branch=18.0
+ :target: https://runboat.odoo-community.org/builds?repo=OCA/queue&target_branch=19.0
:alt: Try me on Runboat
|badge1| |badge2| |badge3| |badge4| |badge5|
@@ -661,7 +661,7 @@ Bug Tracker
Bugs are tracked on `GitHub Issues `_.
In case of trouble, please check there if your issue has already been reported.
If you spotted it first, help us to smash it by providing a detailed and welcomed
-`feedback `_.
+`feedback `_.
Do not contact contributors directly about support or help with technical issues.
@@ -696,8 +696,7 @@ Contributors
Other credits
-------------
-The migration of this module from 17.0 to 18.0 was financially supported
-by Camptocamp.
+
Maintainers
-----------
@@ -720,6 +719,6 @@ Current `maintainer `__:
|maintainer-guewen|
-This module is part of the `OCA/queue `_ project on GitHub.
+This module is part of the `OCA/queue `_ project on GitHub.
You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.
diff --git a/queue_job/__manifest__.py b/queue_job/__manifest__.py
index 70b9774518..4b282af322 100644
--- a/queue_job/__manifest__.py
+++ b/queue_job/__manifest__.py
@@ -2,7 +2,7 @@
{
"name": "Job Queue",
- "version": "18.0.2.0.2",
+ "version": "19.0.1.0.0",
"author": "Camptocamp,ACSONE SA/NV,Odoo Community Association (OCA)",
"website": "https://github.com/OCA/queue",
"license": "LGPL-3",
@@ -27,7 +27,7 @@
"/queue_job/static/src/views/**/*",
],
},
- "installable": False,
+ "installable": True,
"development_status": "Mature",
"maintainers": ["guewen"],
"post_init_hook": "post_init_hook",
diff --git a/queue_job/controllers/main.py b/queue_job/controllers/main.py
index 6365e6efbc..11cbd6c09b 100644
--- a/queue_job/controllers/main.py
+++ b/queue_job/controllers/main.py
@@ -11,7 +11,8 @@
from psycopg2 import OperationalError, errorcodes
from werkzeug.exceptions import BadRequest, Forbidden
-from odoo import SUPERUSER_ID, _, api, http
+from odoo import _, api, http
+from odoo.api import SUPERUSER_ID
from odoo.modules.registry import Registry
from odoo.service.model import PG_CONCURRENCY_ERRORS_TO_RETRY
diff --git a/queue_job/job.py b/queue_job/job.py
index 6cfe12f232..78c3fd5ce4 100644
--- a/queue_job/job.py
+++ b/queue_job/job.py
@@ -12,6 +12,7 @@
from random import randint
import odoo
+from odoo.fields import Domain
from .exception import FailedJobError, NoSuchJobError, RetryableJobError
@@ -337,23 +338,19 @@ def _load_from_db_record(cls, job_db_record):
def job_record_with_same_identity_key(self):
"""Check if a job to be executed with the same key exists."""
- existing = (
- self.env["queue.job"]
- .sudo()
- .search(
- [
- ("identity_key", "=", self.identity_key),
- ("state", "in", [WAIT_DEPENDENCIES, PENDING, ENQUEUED]),
- ],
- limit=1,
- )
+ domain = Domain.AND(
+ [
+ Domain("identity_key", "=", self.identity_key),
+ Domain("state", "in", [WAIT_DEPENDENCIES, PENDING, ENQUEUED]),
+ ]
)
+ existing = self.env["queue.job"].sudo().search(domain, limit=1)
return existing
@staticmethod
def db_records_from_uuids(env, job_uuids):
model = env["queue.job"].sudo()
- record = model.search([("uuid", "in", tuple(job_uuids))])
+ record = model.search(Domain("uuid", "in", tuple(job_uuids)))
return record.with_env(env).sudo()
def __init__(
@@ -856,8 +853,7 @@ def related_action(self):
funcname = record._default_related_action
if not isinstance(funcname, str):
raise ValueError(
- "related_action must be the name of the "
- "method on queue.job as string"
+ "related_action must be the name of the method on queue.job as string"
)
action = getattr(record, funcname)
action_kwargs = self.job_config.related_action_kwargs
diff --git a/queue_job/jobrunner/channels.py b/queue_job/jobrunner/channels.py
index c895d9caf3..dd47434fd8 100644
--- a/queue_job/jobrunner/channels.py
+++ b/queue_job/jobrunner/channels.py
@@ -894,8 +894,7 @@ def parse_simple_config(cls, config_string):
)
if k in config:
raise ValueError(
- f"Invalid channel config {config_string}: "
- f"duplicate key {k}"
+ f"Invalid channel config {config_string}: duplicate key {k}"
)
config[k] = v
else:
diff --git a/queue_job/migrations/18.0.1.0.0/pre-migrate.py b/queue_job/migrations/18.0.1.0.0/pre-migrate.py
deleted file mode 100644
index ce624e14dd..0000000000
--- a/queue_job/migrations/18.0.1.0.0/pre-migrate.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from openupgradelib import openupgrade
-
-from odoo.tools import SQL
-
-
-def migrate(cr, version):
- if not version:
- return
-
- # List of tables and their corresponding columns
- table_column_map = {
- "queue.job.function": ["retry_pattern", "related_action"],
- "queue.job": ["records", "args", "kwargs"],
- }
-
- for table, columns in table_column_map.items():
- for column in columns:
- if openupgrade.column_exists(cr, table, column):
- cr.execute(
- SQL(
- """
- UPDATE %(table)s
- SET %(column)s = %(column)s::jsonb
- WHERE %(column)s IS NOT NULL
- """,
- table=SQL.identifier(table),
- column=SQL.identifier(column),
- )
- )
diff --git a/queue_job/migrations/18.0.1.7.0/pre-migration.py b/queue_job/migrations/18.0.1.7.0/pre-migration.py
deleted file mode 100644
index 931c336866..0000000000
--- a/queue_job/migrations/18.0.1.7.0/pre-migration.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
-from openupgradelib import openupgrade
-
-
-@openupgrade.migrate()
-def migrate(env, version):
- # Remove cron garbage collector
- openupgrade.delete_records_safely_by_xml_id(
- env,
- ["queue_job.ir_cron_queue_job_garbage_collector"],
- )
diff --git a/queue_job/models/queue_job.py b/queue_job/models/queue_job.py
index 411ae43af5..d66bfd2aba 100644
--- a/queue_job/models/queue_job.py
+++ b/queue_job/models/queue_job.py
@@ -6,6 +6,7 @@
from datetime import datetime, timedelta
from odoo import _, api, exceptions, fields, models
+from odoo.fields import Domain
from odoo.tools import config, html_escape, index_exists
from odoo.addons.base_sparse_field.models.fields import Serialized
@@ -145,20 +146,13 @@ def init(self):
@api.depends("dependencies")
def _compute_dependency_graph(self):
- jobs_groups = self.env["queue.job"].read_group(
- [
- (
- "graph_uuid",
- "in",
- [uuid for uuid in self.mapped("graph_uuid") if uuid],
- )
- ],
- ["graph_uuid", "ids:array_agg(id)"],
- ["graph_uuid"],
+ graph_uuids = [uuid for uuid in self.mapped("graph_uuid") if uuid]
+ jobs_groups = self.env["queue.job"]._read_group(
+ Domain("graph_uuid", "in", graph_uuids),
+ groupby=["graph_uuid"],
+ aggregates=["id:array_agg"],
)
- ids_per_graph_uuid = {
- group["graph_uuid"]: group["ids"] for group in jobs_groups
- }
+ ids_per_graph_uuid = {graph_uuid: ids for graph_uuid, ids in jobs_groups}
for record in self:
if not record.graph_uuid:
record.dependency_graph = {}
@@ -216,20 +210,13 @@ def _dependency_graph_vis_node(self):
}
def _compute_graph_jobs_count(self):
- jobs_groups = self.env["queue.job"].read_group(
- [
- (
- "graph_uuid",
- "in",
- [uuid for uuid in self.mapped("graph_uuid") if uuid],
- )
- ],
- ["graph_uuid"],
- ["graph_uuid"],
+ graph_uuids = [uuid for uuid in self.mapped("graph_uuid") if uuid]
+ jobs_groups = self.env["queue.job"]._read_group(
+ Domain("graph_uuid", "in", graph_uuids),
+ groupby=["graph_uuid"],
+ aggregates=["__count"],
)
- count_per_graph_uuid = {
- group["graph_uuid"]: group["graph_uuid_count"] for group in jobs_groups
- }
+ count_per_graph_uuid = {graph_uuid: count for graph_uuid, count in jobs_groups}
for record in self:
record.graph_jobs_count = count_per_graph_uuid.get(record.graph_uuid) or 0
@@ -285,7 +272,7 @@ def open_related_action(self):
def open_graph_jobs(self):
"""Return action that opens all jobs of the same graph"""
self.ensure_one()
- jobs = self.env["queue.job"].search([("graph_uuid", "=", self.graph_uuid)])
+ jobs = self.env["queue.job"].search(Domain("graph_uuid", "=", self.graph_uuid))
action = self.env["ir.actions.act_window"]._for_xml_id(
"queue_job.action_queue_job"
@@ -294,7 +281,7 @@ def open_graph_jobs(self):
{
"name": _("Jobs for graph %s") % (self.graph_uuid),
"context": {},
- "domain": [("id", "in", jobs.ids)],
+ "domain": Domain("id", "in", jobs.ids),
}
)
return action
@@ -354,11 +341,11 @@ def _subscribe_users_domain(self):
"""Subscribe all users having the 'Queue Job Manager' group"""
group = self.env.ref("queue_job.group_queue_job_manager")
if not group:
- return None
+ return Domain([])
companies = self.mapped("company_id")
- domain = [("groups_id", "=", group.id)]
+ domain = Domain("groups_id", "=", group.id)
if companies:
- domain.append(("company_id", "in", companies.ids))
+ domain &= Domain("company_id", "in", companies.ids)
return domain
def _message_failed_job(self):
@@ -380,7 +367,7 @@ def _needaction_domain_get(self):
:return: domain or False is no action
"""
- return [("state", "=", "failed")]
+ return Domain("state", "=", "failed")
def autovacuum(self):
"""Delete all jobs done based on the removal interval defined on the
@@ -388,16 +375,15 @@ def autovacuum(self):
Called from a cron.
"""
- for channel in self.env["queue.job.channel"].search([]):
+ for channel in self.env["queue.job.channel"].search(Domain([])):
deadline = datetime.now() - timedelta(days=int(channel.removal_interval))
while True:
+ domain = Domain.OR(
+ Domain("date_done", "<=", deadline),
+ Domain("date_cancelled", "<=", deadline),
+ ) & Domain("channel", "=", channel.complete_name)
jobs = self.search(
- [
- "|",
- ("date_done", "<=", deadline),
- ("date_cancelled", "<=", deadline),
- ("channel", "=", channel.complete_name),
- ],
+ domain,
order="date_done, date_created",
limit=1000,
)
@@ -437,7 +423,7 @@ def related_action_open_record(self):
{
"name": _("Related Records"),
"view_mode": "list,form",
- "domain": [("id", "in", records.ids)],
+ "domain": Domain("id", "in", records.ids),
}
)
return action
diff --git a/queue_job/models/queue_job_channel.py b/queue_job/models/queue_job_channel.py
index 4aabb0188c..43e2063ca2 100644
--- a/queue_job/models/queue_job_channel.py
+++ b/queue_job/models/queue_job_channel.py
@@ -3,6 +3,7 @@
from odoo import _, api, exceptions, fields, models
+from odoo.fields import Domain
class QueueJobChannel(models.Model):
@@ -60,7 +61,12 @@ def create(self, vals_list):
parent_id = vals.get("parent_id")
if name and parent_id:
existing = self.search(
- [("name", "=", name), ("parent_id", "=", parent_id)]
+ Domain.AND(
+ [
+ Domain("name", "=", name),
+ Domain("parent_id", "=", parent_id),
+ ]
+ )
)
if existing:
if not existing.get_metadata()[0].get("noupdate"):
diff --git a/queue_job/models/queue_job_function.py b/queue_job/models/queue_job_function.py
index 7cf73ea370..7e599c1e5a 100644
--- a/queue_job/models/queue_job_function.py
+++ b/queue_job/models/queue_job_function.py
@@ -7,6 +7,7 @@
from collections import namedtuple
from odoo import _, api, exceptions, fields, models, tools
+from odoo.fields import Domain
from ..fields import JobSerialized
@@ -95,7 +96,9 @@ def _inverse_name(self):
model_name = groups[1]
method = groups[2]
model = (
- self.env["ir.model"].sudo().search([("model", "=", model_name)], limit=1)
+ self.env["ir.model"]
+ .sudo()
+ .search(Domain("model", "=", model_name), limit=1)
)
if not model:
raise exceptions.UserError(_("Model {} not found").format(model_name))
@@ -173,7 +176,7 @@ def _parse_retry_pattern(self):
@tools.ormcache("name")
def job_config(self, name):
- config = self.search([("name", "=", name)], limit=1)
+ config = self.search(Domain("name", "=", name), limit=1)
if not config:
return self.job_default_config()
retry_pattern = config._parse_retry_pattern()
@@ -250,7 +253,7 @@ def create(self, vals_list):
for vals in vals_list:
name = vals.get("name")
if name:
- existing = self.search([("name", "=", name)], limit=1)
+ existing = self.search(Domain("name", "=", name), limit=1)
if existing:
if not existing.get_metadata()[0].get("noupdate"):
existing.write(vals)
diff --git a/queue_job/readme/CREDITS.md b/queue_job/readme/CREDITS.md
index 83b3ec91f7..e69de29bb2 100644
--- a/queue_job/readme/CREDITS.md
+++ b/queue_job/readme/CREDITS.md
@@ -1 +0,0 @@
-The migration of this module from 17.0 to 18.0 was financially supported by Camptocamp.
diff --git a/queue_job/static/description/index.html b/queue_job/static/description/index.html
index 6cc2121a4d..76bd8c1362 100644
--- a/queue_job/static/description/index.html
+++ b/queue_job/static/description/index.html
@@ -374,7 +374,7 @@ Job Queue
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!! source digest: sha256:58f9182440bb316576671959b69148ea5454958f9ae8db75bccd30c89012676d
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! -->
-

+

This addon adds an integrated Job Queue to Odoo.
It allows to postpone method calls executed asynchronously.
Jobs are executed in the background by a Jobrunner, in their own
@@ -962,7 +962,7 @@
Bugs are tracked on GitHub Issues.
In case of trouble, please check there if your issue has already been reported.
If you spotted it first, help us to smash it by providing a detailed and welcomed
-feedback.
+feedback.
Do not contact contributors directly about support or help with technical issues.
-
The migration of this module from 17.0 to 18.0 was financially supported
-by Camptocamp.
@@ -1010,7 +1008,7 @@
promote its widespread use.
Current maintainer:

-
This module is part of the OCA/queue project on GitHub.
+
This module is part of the OCA/queue project on GitHub.
You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.
diff --git a/queue_job/tests/common.py b/queue_job/tests/common.py
index ec036bd639..0cc9e00168 100644
--- a/queue_job/tests/common.py
+++ b/queue_job/tests/common.py
@@ -8,6 +8,8 @@
from operator import attrgetter
from unittest import TestCase, mock
+from odoo.fields import Domain
+from odoo.tests import common as odoo_common
from odoo.tests.case import TestCase as _TestCase
from odoo.tests.common import MetaCase
@@ -17,6 +19,27 @@
from odoo.addons.queue_job.job import Job
+class DisableTrackingCaseMixin:
+ """Disable tracking for the whole test environment."""
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.env = cls.env(context=dict(cls.env.context, tracking_disable=True))
+
+
+class BaseCase(DisableTrackingCaseMixin, odoo_common.BaseCase):
+ pass
+
+
+class TransactionCase(DisableTrackingCaseMixin, odoo_common.TransactionCase):
+ pass
+
+
+class HttpCase(DisableTrackingCaseMixin, odoo_common.HttpCase):
+ pass
+
+
@contextmanager
def trap_jobs():
"""Context Manager used to test enqueuing of jobs
@@ -212,7 +235,7 @@ def assert_enqueued_job(self, method, args=None, kwargs=None, properties=None):
if expected_call not in actual_calls:
raise AssertionError(
- "Job {} was not enqueued.\n" "Actual enqueued jobs:\n{}".format(
+ "Job {} was not enqueued.\nActual enqueued jobs:\n{}".format(
self._format_job_call(expected_call),
"\n".join(
f" * {self._format_job_call(call)}" for call in actual_calls
@@ -335,7 +358,7 @@ def search_created(self):
return self.search_all() - self.existing
def search_all(self):
- return self.env["queue.job"].search([])
+ return self.env["queue.job"].search(Domain([]))
class JobMixin:
diff --git a/queue_job/tests/test_delayable.py b/queue_job/tests/test_delayable.py
index df4c3cf5c9..f3f430d569 100644
--- a/queue_job/tests/test_delayable.py
+++ b/queue_job/tests/test_delayable.py
@@ -5,12 +5,12 @@
import logging
from unittest import mock
-from odoo.tests import common
-
from odoo.addons.queue_job.delay import Delayable, DelayableGraph
+from .common import BaseCase
+
-class TestDelayable(common.BaseCase):
+class TestDelayable(BaseCase):
def setUp(self):
super().setUp()
self.recordset = mock.MagicMock(name="recordset")
diff --git a/queue_job/tests/test_delayable_split.py b/queue_job/tests/test_delayable_split.py
index 25fd9c3ebc..ac068869f6 100644
--- a/queue_job/tests/test_delayable_split.py
+++ b/queue_job/tests/test_delayable_split.py
@@ -2,12 +2,12 @@
# @author Florian Mounier
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
-from odoo.tests import common
-
from odoo.addons.queue_job.delay import Delayable
+from .common import BaseCase
+
-class TestDelayableSplit(common.BaseCase):
+class TestDelayableSplit(BaseCase):
def setUp(self):
super().setUp()
diff --git a/queue_job/tests/test_json_field.py b/queue_job/tests/test_json_field.py
index f5bf760ffe..c26f9c5d74 100644
--- a/queue_job/tests/test_json_field.py
+++ b/queue_job/tests/test_json_field.py
@@ -6,24 +6,50 @@
from lxml import etree
-from odoo.tests import common
-
# pylint: disable=odoo-addons-relative-import
# we are testing, we want to test as we were an external consumer of the API
from odoo.addons.queue_job.fields import JobDecoder, JobEncoder
+from .common import TransactionCase
+
+
+class TestJson(TransactionCase):
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass()
+ cls.partner = cls.env["res.partner"].create({"name": "Queue Job JSON Partner"})
+ user_partner = cls.env["res.partner"].create({"name": "Queue Job JSON User"})
+ cls.demo_user = (
+ cls.env["res.users"]
+ .with_context(no_reset_password=True)
+ .create(
+ {
+ "name": "Queue Job JSON User",
+ "login": "queue_job_json_user",
+ "email": "queue.job.json@example.com",
+ "partner_id": user_partner.id,
+ "company_id": cls.env.company.id,
+ "company_ids": [(6, 0, cls.env.companies.ids)],
+ "tz": cls.env.user.tz or "UTC",
+ "lang": cls.env.user.lang or "en_US",
+ }
+ )
+ )
+
+ def _partner_with_user_context(self, context):
+ return self.env(user=self.demo_user, context=context)["res.partner"].browse(
+ self.partner.id
+ )
-class TestJson(common.TransactionCase):
def test_encoder_recordset(self):
- demo_user = self.env.ref("base.user_demo")
- context = demo_user.context_get()
- partner = self.env(user=demo_user, context=context).ref("base.main_partner")
+ context = self.demo_user.context_get()
+ partner = self._partner_with_user_context(context)
value = partner
value_json = json.dumps(value, cls=JobEncoder)
expected_context = context.copy()
expected_context.pop("uid")
expected = {
- "uid": demo_user.id,
+ "uid": self.demo_user.id,
"_type": "odoo_recordset",
"model": "res.partner",
"ids": [partner.id],
@@ -33,9 +59,8 @@ def test_encoder_recordset(self):
self.assertEqual(json.loads(value_json), expected)
def test_encoder_recordset_list(self):
- demo_user = self.env.ref("base.user_demo")
- context = demo_user.context_get()
- partner = self.env(user=demo_user, context=context).ref("base.main_partner")
+ context = self.demo_user.context_get()
+ partner = self._partner_with_user_context(context)
value = ["a", 1, partner]
value_json = json.dumps(value, cls=JobEncoder)
expected_context = context.copy()
@@ -44,7 +69,7 @@ def test_encoder_recordset_list(self):
"a",
1,
{
- "uid": demo_user.id,
+ "uid": self.demo_user.id,
"_type": "odoo_recordset",
"model": "res.partner",
"ids": [partner.id],
@@ -55,9 +80,8 @@ def test_encoder_recordset_list(self):
self.assertEqual(json.loads(value_json), expected)
def test_decoder_recordset(self):
- demo_user = self.env.ref("base.user_demo")
- context = demo_user.context_get()
- partner = self.env(user=demo_user).ref("base.main_partner")
+ context = self.demo_user.context_get()
+ partner = self._partner_with_user_context(context)
value_json = json.dumps(
{
@@ -65,7 +89,7 @@ def test_decoder_recordset(self):
"model": "res.partner",
"su": False,
"ids": partner.ids,
- "uid": demo_user.id,
+ "uid": self.demo_user.id,
"context": {"tz": context["tz"], "lang": context["lang"]},
}
)
@@ -73,12 +97,11 @@ def test_decoder_recordset(self):
expected = partner
value = json.loads(value_json, cls=JobDecoder, env=self.env)
self.assertEqual(value, expected)
- self.assertEqual(demo_user, expected.env.user)
+ self.assertEqual(self.demo_user, expected.env.user)
def test_decoder_recordset_list(self):
- demo_user = self.env.ref("base.user_demo")
- context = demo_user.context_get()
- partner = self.env(user=demo_user).ref("base.main_partner")
+ context = self.demo_user.context_get()
+ partner = self._partner_with_user_context(context)
value_json = json.dumps(
[
"a",
@@ -88,7 +111,7 @@ def test_decoder_recordset_list(self):
"model": "res.partner",
"su": False,
"ids": partner.ids,
- "uid": demo_user.id,
+ "uid": self.demo_user.id,
"context": {"tz": context["tz"], "lang": context["lang"]},
},
]
@@ -96,11 +119,11 @@ def test_decoder_recordset_list(self):
expected = ["a", 1, partner]
value = json.loads(value_json, cls=JobDecoder, env=self.env)
self.assertEqual(value, expected)
- self.assertEqual(demo_user, expected[2].env.user)
+ self.assertEqual(self.demo_user, expected[2].env.user)
def test_decoder_recordset_list_without_user(self):
value_json = (
- '["a", 1, {"_type": "odoo_recordset",' '"model": "res.users", "ids": [1]}]'
+ '["a", 1, {"_type": "odoo_recordset","model": "res.users", "ids": [1]}]'
)
expected = ["a", 1, self.env.ref("base.user_root")]
value = json.loads(value_json, cls=JobDecoder, env=self.env)
@@ -132,7 +155,7 @@ def test_encoder_date(self):
self.assertEqual(json.loads(value_json), expected)
def test_decoder_date(self):
- value_json = '["a", 1, {"_type": "date_isoformat",' '"value": "2017-04-19"}]'
+ value_json = '["a", 1, {"_type": "date_isoformat","value": "2017-04-19"}]'
expected = ["a", 1, date(2017, 4, 19)]
value = json.loads(value_json, cls=JobDecoder, env=self.env)
self.assertEqual(value, expected)
diff --git a/queue_job/tests/test_model_job_channel.py b/queue_job/tests/test_model_job_channel.py
index 20ebbc0bfe..1d79312faa 100644
--- a/queue_job/tests/test_model_job_channel.py
+++ b/queue_job/tests/test_model_job_channel.py
@@ -4,14 +4,16 @@
from psycopg2 import IntegrityError
import odoo
-from odoo.tests import common
+from odoo.fields import Domain
+from .common import TransactionCase
-class TestJobChannel(common.TransactionCase):
+
+class TestJobChannel(TransactionCase):
def setUp(self):
super().setUp()
self.Channel = self.env["queue.job.channel"]
- self.root_channel = self.Channel.search([("name", "=", "root")])
+ self.root_channel = self.Channel.search(Domain("name", "=", "root"))
def test_channel_new(self):
channel = self.Channel.new()
diff --git a/queue_job/tests/test_model_job_function.py b/queue_job/tests/test_model_job_function.py
index 84676fdb65..0c7da9f055 100644
--- a/queue_job/tests/test_model_job_function.py
+++ b/queue_job/tests/test_model_job_function.py
@@ -2,10 +2,11 @@
# license lgpl-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
from odoo import exceptions
-from odoo.tests import common
+from .common import TransactionCase
-class TestJobFunction(common.TransactionCase):
+
+class TestJobFunction(TransactionCase):
def test_function_name_compute(self):
function = self.env["queue.job.function"].create(
{"model_id": self.env.ref("base.model_res_users").id, "method": "read"}
diff --git a/queue_job/tests/test_queue_job_protected_write.py b/queue_job/tests/test_queue_job_protected_write.py
index eadb16ab9c..c918d24ee5 100644
--- a/queue_job/tests/test_queue_job_protected_write.py
+++ b/queue_job/tests/test_queue_job_protected_write.py
@@ -2,14 +2,16 @@
# license lgpl-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
from odoo import exceptions
-from odoo.tests import common
+from odoo.tests.common import JsonRpcException
from odoo.tools import mute_logger
+from .common import HttpCase, TransactionCase
-class TestJobCreatePrivate(common.HttpCase):
+
+class TestJobCreatePrivate(HttpCase):
def test_create_error(self):
self.authenticate("admin", "admin")
- with self.assertRaises(common.JsonRpcException) as cm, mute_logger("odoo.http"):
+ with self.assertRaises(JsonRpcException) as cm, mute_logger("odoo.http"):
self.make_jsonrpc_request(
"/web/dataset/call_kw",
params={
@@ -29,7 +31,7 @@ def test_create_error(self):
self.assertEqual("odoo.exceptions.AccessError", str(cm.exception))
-class TestJobWriteProtected(common.TransactionCase):
+class TestJobWriteProtected(TransactionCase):
def test_write_protected_field_error(self):
job_ = self.env["res.partner"].with_delay().create({"name": "test"})
db_job = job_.db_record()
diff --git a/queue_job/tests/test_run_rob_controller.py b/queue_job/tests/test_run_rob_controller.py
index bb63bc82ec..c0a605d9d0 100644
--- a/queue_job/tests/test_run_rob_controller.py
+++ b/queue_job/tests/test_run_rob_controller.py
@@ -1,9 +1,8 @@
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
-from odoo.tests.common import TransactionCase
-
from ..controllers.main import RunJobController
from ..job import Job
+from .common import TransactionCase
class TestRunJobController(TransactionCase):
diff --git a/queue_job/tests/test_runner_runner.py b/queue_job/tests/test_runner_runner.py
index 131ce6322d..6298a7365d 100644
--- a/queue_job/tests/test_runner_runner.py
+++ b/queue_job/tests/test_runner_runner.py
@@ -5,11 +5,11 @@
# we are testing, we want to test as we were an external consumer of the API
import os
-from odoo.tests import BaseCase, tagged
+from odoo.tests import tagged
from odoo.addons.queue_job.jobrunner import runner
-from .common import load_doctests
+from .common import BaseCase, load_doctests
load_tests = load_doctests(runner)
diff --git a/queue_job/tests/test_wizards.py b/queue_job/tests/test_wizards.py
index 2ac162d313..7356346509 100644
--- a/queue_job/tests/test_wizards.py
+++ b/queue_job/tests/test_wizards.py
@@ -1,8 +1,8 @@
# license lgpl-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
-from odoo.tests import common
+from .common import TransactionCase
-class TestWizards(common.TransactionCase):
+class TestWizards(TransactionCase):
def setUp(self):
super().setUp()
self.job = (