diff --git a/.github/workflows/ci-full.yml b/.github/workflows/ci-full.yml
index 1945005d..f38195d0 100644
--- a/.github/workflows/ci-full.yml
+++ b/.github/workflows/ci-full.yml
@@ -30,7 +30,7 @@ concurrency:
cancel-in-progress: true
env:
- ODOO_ADDONS_PATH: /opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it
+ ODOO_ADDONS_PATH: /opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/odoo-job-worker,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it
jobs:
# ============================================================================
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e6dbe836..dc4a57ad 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -17,7 +17,7 @@ concurrency:
cancel-in-progress: true
env:
- ODOO_ADDONS_PATH: /opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it
+ ODOO_ADDONS_PATH: /opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/odoo-job-worker,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it
jobs:
# ============================================================================
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3584cb30..5bc24166 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -178,7 +178,7 @@ repos:
- PyYAML
types: [python]
# Exclude: scripts, tests, migrations, and third-party modules
- exclude: ^scripts/|/tests/|migrations/|^(fastapi|queue_job|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
+ exclude: ^scripts/|/tests/|migrations/|^(fastapi|job_worker|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
pass_filenames: true
- id: openspp-check-xml-ids
name: "OpenSPP: XML ID naming conventions"
@@ -189,7 +189,7 @@ repos:
- PyYAML
types: [xml]
# Exclude: scripts, tests, data, demo, and third-party modules
- exclude: ^scripts/|/tests/|/data/|/demo/|^(fastapi|queue_job|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
+ exclude: ^scripts/|/tests/|/data/|/demo/|^(fastapi|job_worker|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
pass_filenames: true
# Phase 2: ACL check (warning only, runs on all files)
- id: openspp-check-acl
@@ -222,7 +222,7 @@ repos:
- PyYAML
types: [python]
# Exclude: scripts, tests, migrations, and third-party modules
- exclude: ^scripts/|/tests/|migrations/|^(fastapi|queue_job|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
+ exclude: ^scripts/|/tests/|migrations/|^(fastapi|job_worker|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
pass_filenames: true
# Phase 3: Logger setup check (warning only)
- id: openspp-check-logger
@@ -234,7 +234,7 @@ repos:
- PyYAML
types: [python]
# Exclude: scripts, tests, manifests, inits, and third-party modules
- exclude: ^scripts/|/tests/|__manifest__|__init__|^(fastapi|queue_job|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
+ exclude: ^scripts/|/tests/|__manifest__|__init__|^(fastapi|job_worker|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
pass_filenames: true
# Phase 3: UI patterns check (warning only)
- id: openspp-check-ui
@@ -248,7 +248,7 @@ repos:
- PyYAML
types: [xml]
# Exclude: scripts, tests, data, demo, and third-party modules
- exclude: ^scripts/|/tests/|/data/|/demo/|^(fastapi|queue_job|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
+ exclude: ^scripts/|/tests/|/data/|/demo/|^(fastapi|job_worker|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
pass_filenames: true
# Odoo 19 Compatibility Checks
- id: openspp-check-odoo19-python
@@ -260,7 +260,7 @@ repos:
- PyYAML
types: [python]
# Exclude: scripts, tests, migrations, and third-party modules
- exclude: ^scripts/|/tests/|migrations/|^(fastapi|queue_job|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
+ exclude: ^scripts/|/tests/|migrations/|^(fastapi|job_worker|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
pass_filenames: true
- id: openspp-check-odoo19-xml
name: "OpenSPP: Odoo 19 compatibility (XML)"
@@ -272,7 +272,7 @@ repos:
- lxml
types: [xml]
# Exclude: scripts, tests, data, demo, and third-party modules
- exclude: ^scripts/|/tests/|/data/|/demo/|^(fastapi|queue_job|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
+ exclude: ^scripts/|/tests/|/data/|/demo/|^(fastapi|job_worker|base_user_role|extendable|extendable_fastapi|endpoint_route_handler)/
pass_filenames: true
# API authentication enforcement
- id: openspp-check-api-auth
diff --git a/.semgrep/odoo-security.yml b/.semgrep/odoo-security.yml
index 865d45e9..82aa791f 100644
--- a/.semgrep/odoo-security.yml
+++ b/.semgrep/odoo-security.yml
@@ -355,7 +355,7 @@ rules:
...
message: |
cr.commit() inside a loop can cause partial data states on failure.
- Use queue_job for batch operations or handle transactions properly.
+ Use job_worker for batch operations or handle transactions properly.
severity: WARNING
languages: [python]
metadata:
diff --git a/docker-compose.yml b/docker-compose.yml
index 4b8b6746..316ee5fa 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -89,9 +89,6 @@ services:
ODOO_WORKERS: "0"
ODOO_CRON_THREADS: "0"
- # Job queue configuration (defaults to "root:2")
- # ODOO_QUEUE_JOB_CHANNELS: "root:4"
-
# Modules to initialize (configurable via env)
# For E2E: ODOO_INIT_MODULES=spp_mis_demo_v2
# For UI dev: ODOO_INIT_MODULES=spp_base (or your module)
diff --git a/docker/.env.production.example b/docker/.env.production.example
index 95d2dd07..3f3ff6f5 100644
--- a/docker/.env.production.example
+++ b/docker/.env.production.example
@@ -60,9 +60,6 @@ DB_SSLMODE=prefer
# Number of Odoo workers (rule: (CPU cores * 2) + 1; ~1 worker per 6 concurrent users)
ODOO_WORKERS=2
-# Queue job concurrent channels
-ODOO_QUEUE_JOB_CHANNELS=root:2
-
# Memory limits (Docker format: 512M, 1G, 2G, etc.)
ODOO_MEMORY_LIMIT=4G
ODOO_MEMORY_RESERVATION=2G
diff --git a/docker/Dockerfile b/docker/Dockerfile
index d4a51328..7e7c5f9c 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -112,6 +112,7 @@ RUN --mount=type=cache,target=/tmp/downloads,sharing=locked \
download_module "OCA/server-ux" "server-ux"; \
download_module "OCA/server-tools" "server-tools"; \
download_module "OCA/queue" "queue"; \
+ download_module "OpenSPP/odoo-job-worker" "odoo-job-worker"; \
download_module "OCA/server-backend" "server-backend"; \
download_module "OCA/rest-framework" "rest-framework"; \
download_module "muk-it/odoo-modules" "muk-it"
diff --git a/docker/README.md b/docker/README.md
index f128eebc..37e31294 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -190,15 +190,14 @@ docker compose -f docker/docker-compose.production.yml exec clamav clamscan --ve
### Performance
-| Variable | Default | Description |
-| ------------------------- | ---------- | ------------------------------------- |
-| `ODOO_WORKERS` | 2 | Number of worker processes |
-| `ODOO_CRON_THREADS` | 1 | Number of cron threads |
-| `ODOO_MEMORY_SOFT` | 2147483648 | Soft memory limit per worker (bytes) |
-| `ODOO_MEMORY_HARD` | 2684354560 | Hard memory limit per worker (bytes) |
-| `ODOO_TIME_CPU` | 600 | CPU time limit per request (seconds) |
-| `ODOO_TIME_REAL` | 1200 | Real time limit per request (seconds) |
-| `ODOO_QUEUE_JOB_CHANNELS` | root:2 | Concurrent background jobs |
+| Variable | Default | Description |
+| ------------------- | ---------- | ------------------------------------- |
+| `ODOO_WORKERS` | 2 | Number of worker processes |
+| `ODOO_CRON_THREADS` | 1 | Number of cron threads |
+| `ODOO_MEMORY_SOFT` | 2147483648 | Soft memory limit per worker (bytes) |
+| `ODOO_MEMORY_HARD` | 2684354560 | Hard memory limit per worker (bytes) |
+| `ODOO_TIME_CPU` | 600 | CPU time limit per request (seconds) |
+| `ODOO_TIME_REAL` | 1200 | Real time limit per request (seconds) |
### Logging
diff --git a/docker/docker-compose.production.yml b/docker/docker-compose.production.yml
index 5a96b952..189995f4 100644
--- a/docker/docker-compose.production.yml
+++ b/docker/docker-compose.production.yml
@@ -252,7 +252,7 @@ services:
memory: ${ODOO_MEMORY_RESERVATION:-2G}
# ==========================================================================
- # Queue Worker - Background job processing (OCA/queue_job)
+ # Queue Worker - Background job processing (job_worker)
# ==========================================================================
queue-worker:
image: ${OPENSPP_IMAGE:-ghcr.io/openspp/openspp:latest}
@@ -275,17 +275,10 @@ services:
DB_SSLMODE: ${DB_SSLMODE:-prefer}
ODOO_ADMIN_PASSWD: ${ODOO_ADMIN_PASSWD:?ODOO_ADMIN_PASSWD is required}
- # Queue worker mode - no HTTP, just process jobs
- ODOO_WORKERS: "0"
- ODOO_CRON_THREADS: "0"
-
- # Queue job channels (number of concurrent jobs)
- QUEUE_JOB_CHANNELS: ${QUEUE_JOB_CHANNELS:-2}
-
# Logging
LOG_LEVEL: ${LOG_LEVEL:-info}
- # Override command to run queue job worker
- command: ["odoo", "gevent", "--limit-time-real=0"]
+ # Run job_worker standalone runner process
+ command: ["python", "-m", "odoo.addons.job_worker.cli"]
volumes:
- odoo_data:/var/lib/odoo
- odoo_addons:/mnt/extra-addons
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
index c51138e2..fce78ba8 100755
--- a/docker/entrypoint.sh
+++ b/docker/entrypoint.sh
@@ -175,10 +175,6 @@ export ODOO_TIME_REAL="${ODOO_TIME_REAL:-1200}"
export ODOO_LIMIT_REQUEST="${ODOO_LIMIT_REQUEST:-8192}"
[[ -z "$ODOO_LIMIT_REQUEST" || ! "$ODOO_LIMIT_REQUEST" =~ ^[0-9]+$ ]] && export ODOO_LIMIT_REQUEST="8192"
-# Queue Job configuration (OCA/queue)
-export ODOO_QUEUE_JOB_CHANNELS="${ODOO_QUEUE_JOB_CHANNELS:-root:2}"
-[[ -z "$ODOO_QUEUE_JOB_CHANNELS" || ! "$ODOO_QUEUE_JOB_CHANNELS" =~ ^[a-zA-Z_][a-zA-Z0-9_.]*:[0-9]+(,[a-zA-Z_][a-zA-Z0-9_.]*:[0-9]+)*$ ]] && export ODOO_QUEUE_JOB_CHANNELS="root:2"
-
# Logging
export LOG_LEVEL="${LOG_LEVEL:-info}"
export LOG_HANDLER="${LOG_HANDLER:-:INFO}"
diff --git a/docker/odoo.conf.template b/docker/odoo.conf.template
index ba646136..db31e847 100644
--- a/docker/odoo.conf.template
+++ b/docker/odoo.conf.template
@@ -18,7 +18,7 @@ list_db = ${LIST_DB}
data_dir = /var/lib/odoo
# Addons path - includes Odoo core, OpenSPP, and OCA modules
-addons_path = /opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it
+addons_path = /opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/odoo-job-worker,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it
# =============================================================================
# Security
@@ -62,8 +62,8 @@ http_enable = True
http_interface = ${HTTP_INTERFACE}
gevent_port = 8072
-# Server-wide modules (queue_job for background jobs)
-server_wide_modules = base,web,queue_job
+# Server-wide modules
+server_wide_modules = base,web
# =============================================================================
# Email configuration
@@ -100,11 +100,7 @@ unaccent = ${UNACCENT}
without_demo = True
# =============================================================================
-# Queue Job Configuration (OCA/queue)
+# Job Worker Configuration
# =============================================================================
-[queue_job]
-channels = ${ODOO_QUEUE_JOB_CHANNELS}
-jobrunner_db_host = ${DB_HOST}
-jobrunner_db_port = ${DB_PORT}
-jobrunner_db_user = ${DB_USER}
-jobrunner_db_password = ${DB_PASSWORD}
+# The job_worker runner runs as a separate process via:
+# python -m odoo.addons.job_worker.cli
diff --git a/scripts/lint/check_naming.py b/scripts/lint/check_naming.py
index c946c27c..c6197b0d 100755
--- a/scripts/lint/check_naming.py
+++ b/scripts/lint/check_naming.py
@@ -47,7 +47,7 @@
# Whitelist for modules that don't follow spp_* naming
MODULE_WHITELIST = {
"base_user_role",
- "queue_job",
+ "job_worker",
"endpoint_route_handler",
"extendable",
"extendable_fastapi",
@@ -75,13 +75,14 @@
# Third-party models that don't need to follow spp.* naming
# These are from external/third-party modules we depend on
THIRD_PARTY_MODEL_WHITELIST = {
- # queue_job module
+ # job_worker module
"queue.job",
"queue.job.channel",
"queue.job.function",
"queue.job.lock",
"queue.jobs.to.cancelled",
"queue.jobs.to.done",
+ "queue.limit",
"queue.requeue.job",
# fastapi module
"fastapi.endpoint",
diff --git a/scripts/lint/check_performance.py b/scripts/lint/check_performance.py
index 64c93822..b975a369 100755
--- a/scripts/lint/check_performance.py
+++ b/scripts/lint/check_performance.py
@@ -4,7 +4,7 @@
Detects common performance issues based on docs/principles/performance-scalability.md:
- Offset-based pagination (should use cursor-based)
-- cr.commit() in loops (should use queue_job)
+- cr.commit() in loops (should use job_worker)
- N+1 query patterns (attribute access in loops without prefetch)
Features:
@@ -159,10 +159,10 @@ def visit_Call(self, node):
Violation(
file_path=file_path,
line=line_num,
- message="cr.commit() inside loop - use queue_job for batch processing instead",
+ message="cr.commit() inside loop - use job_worker for batch processing instead",
rule_id="performance.commit_in_loop",
severity=severity,
- suggestion="Use queue_job to process records asynchronously",
+ suggestion="Use job_worker to process records asynchronously",
doc_link="docs/principles/performance-scalability.md#batch-processing-pattern",
)
)
@@ -457,7 +457,7 @@ def main():
Checks:
- Offset pagination: .search(..., offset=...) should use cursor-based
- - Commit in loops: cr.commit() in loops should use queue_job
+ - Commit in loops: cr.commit() in loops should use job_worker
- N+1 queries: Related field access in loops without prefetch
See docs/principles/performance-scalability.md for guidelines.
diff --git a/scripts/test_single_module.sh b/scripts/test_single_module.sh
index 269aea28..3e49ecbc 100755
--- a/scripts/test_single_module.sh
+++ b/scripts/test_single_module.sh
@@ -196,7 +196,7 @@ run_tests_docker() {
--entrypoint "" \
test \
/opt/odoo/odoo/odoo-bin \
- --addons-path=/opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it \
+ --addons-path=/opt/odoo/odoo/addons,/opt/odoo/odoo/odoo/addons,/mnt/extra-addons/openspp,/mnt/extra-addons/server-ux,/mnt/extra-addons/server-tools,/mnt/extra-addons/queue,/mnt/extra-addons/odoo-job-worker,/mnt/extra-addons/server-backend,/mnt/extra-addons/rest-framework,/mnt/extra-addons/muk-it \
-d "$DB_NAME" \
--db_host=db \
--db_port=5432 \
diff --git a/spp b/spp
index 3bea2709..c42b35ae 100755
--- a/spp
+++ b/spp
@@ -571,17 +571,17 @@ def cmd_start(args):
is_fresh_start = True
# Two-phase initialization for fresh starts with demo modules
- # Phase 1: Install queue_job first, then restart so it can listen to job events
+ # Phase 1: Install job_worker first, then restart so it can listen to job events
if is_fresh_start and demo_modules:
- print("\n" + _color("1;34", "Phase 1: Installing queue_job..."))
+ print("\n" + _color("1;34", "Phase 1: Installing job_worker..."))
phase1_env = env.copy()
- phase1_env["ODOO_INIT_MODULES"] = "queue_job"
+ phase1_env["ODOO_INIT_MODULES"] = "job_worker"
run(docker_compose("up", "-d", profile=profile), env=phase1_env)
if not _wait_for_odoo_ready(profile):
warn("Odoo did not become ready in time, continuing anyway...")
- print("Restarting Odoo so queue_job can listen to events...")
+ print("Restarting Odoo so job_worker can listen to events...")
service = "openspp-dev" if profile == "dev" else "openspp"
run(docker_compose("restart", service, profile=profile))
diff --git a/spp_area/__manifest__.py b/spp_area/__manifest__.py
index 1fd2e211..29ef826e 100644
--- a/spp_area/__manifest__.py
+++ b/spp_area/__manifest__.py
@@ -18,7 +18,7 @@
"spp_base_common",
"spp_user_roles",
"spp_registry",
- "queue_job",
+ "job_worker",
"spp_security",
],
"external_dependencies": {
@@ -29,7 +29,7 @@
"data": [
"data/area_type_data.xml",
"data/area_tag_data.xml",
- "data/queue_job_channel.xml",
+ "data/queue_limit_data.xml",
"security/privileges.xml",
"security/groups.xml",
"security/ir.model.access.csv",
diff --git a/spp_area/data/queue_job_channel.xml b/spp_area/data/queue_job_channel.xml
deleted file mode 100644
index 3ac763ab..00000000
--- a/spp_area/data/queue_job_channel.xml
+++ /dev/null
@@ -1,6 +0,0 @@
-
-
- area_import
-
-
-
diff --git a/spp_area/data/queue_limit_data.xml b/spp_area/data/queue_limit_data.xml
new file mode 100644
index 00000000..01cde9a6
--- /dev/null
+++ b/spp_area/data/queue_limit_data.xml
@@ -0,0 +1,7 @@
+
+
+ area_import
+ 1
+ 0
+
+
diff --git a/spp_area/models/area_import.py b/spp_area/models/area_import.py
index 5d876b7e..bbdf4b0c 100644
--- a/spp_area/models/area_import.py
+++ b/spp_area/models/area_import.py
@@ -13,12 +13,12 @@
from odoo import Command, _, api, fields, models
from odoo.exceptions import ValidationError
-from odoo.addons.queue_job.delay import group
+from odoo.addons.job_worker.delay import group
_logger = logging.getLogger(__name__)
_area_import_raw_model = "spp.area.import.raw"
_res_lang_model = "res.lang"
-_area_import_channel = "root.area_import"
+_area_import_channel = "area_import"
# Regex patterns for COD column name normalization
# Pattern 1: admin{N}Name_{lang} or admin{N}Pcode (Sri Lanka style)
diff --git a/spp_area/tests/common.py b/spp_area/tests/common.py
index 6486795d..b4f17ce0 100644
--- a/spp_area/tests/common.py
+++ b/spp_area/tests/common.py
@@ -51,7 +51,7 @@ def _create_import_from_file(cls, file_path):
def setUpClass(cls):
super().setUpClass()
# Set context to avoid job queue delay for faster tests
- # Note: queue_job module uses 'queue_job__no_delay' (double underscore)
+ # Note: job_worker module uses 'queue_job__no_delay' (double underscore)
cls.env = cls.env(
context=dict(
cls.env.context,
diff --git a/spp_area/tests/test_area.py b/spp_area/tests/test_area.py
index 38283e25..f24d04c3 100644
--- a/spp_area/tests/test_area.py
+++ b/spp_area/tests/test_area.py
@@ -12,7 +12,7 @@ class BaseAreaTest(TransactionCase):
def setUpClass(cls):
super().setUpClass()
# Set context to avoid job queue delay for faster tests
- # Note: queue_job module uses 'queue_job__no_delay' (double underscore)
+ # Note: job_worker module uses 'queue_job__no_delay' (double underscore)
cls.env = cls.env(
context=dict(
cls.env.context,
diff --git a/spp_area/tests/test_area_tags.py b/spp_area/tests/test_area_tags.py
index c0eca852..57dde3c0 100644
--- a/spp_area/tests/test_area_tags.py
+++ b/spp_area/tests/test_area_tags.py
@@ -12,7 +12,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_area/tests/test_res_partner.py b/spp_area/tests/test_res_partner.py
index 6dff3fc5..c746ab8e 100644
--- a/spp_area/tests/test_res_partner.py
+++ b/spp_area/tests/test_res_partner.py
@@ -11,7 +11,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
cls._create_test_data()
diff --git a/spp_area_hdx/tests/test_area_gps_lookup.py b/spp_area_hdx/tests/test_area_gps_lookup.py
index 15b0f252..d7c3f362 100644
--- a/spp_area_hdx/tests/test_area_gps_lookup.py
+++ b/spp_area_hdx/tests/test_area_gps_lookup.py
@@ -2,7 +2,10 @@
import json
+from psycopg2 import IntegrityError
+
from odoo.tests import common, tagged
+from odoo.tools import mute_logger
@tagged("post_install", "-at_install")
@@ -157,9 +160,7 @@ def test_find_by_pcode(self):
def test_hdx_pcode_unique_constraint(self):
"""Test that HDX P-codes must be unique."""
- from odoo.exceptions import ValidationError
-
- with self.assertRaises(ValidationError):
+ with self.assertRaises(IntegrityError), mute_logger("odoo.sql_db"):
self.env["spp.area"].create(
{
"draft_name": "Duplicate",
diff --git a/spp_area_hdx/tests/test_hdx_cod_resource.py b/spp_area_hdx/tests/test_hdx_cod_resource.py
index 8e40e8f6..a8212923 100644
--- a/spp_area_hdx/tests/test_hdx_cod_resource.py
+++ b/spp_area_hdx/tests/test_hdx_cod_resource.py
@@ -3,8 +3,11 @@
import json
from unittest.mock import patch
+from psycopg2 import IntegrityError
+
from odoo.exceptions import UserError
from odoo.tests import common, tagged
+from odoo.tools import mute_logger
@tagged("post_install", "-at_install")
@@ -38,10 +41,8 @@ def test_create_resource(self):
def test_required_fields(self):
"""Test that required fields are enforced."""
- from odoo.exceptions import ValidationError
-
- # source_id is required
- with self.assertRaises(ValidationError):
+ # source_id is required (NOT NULL constraint at DB level)
+ with self.assertRaises(IntegrityError), mute_logger("odoo.sql_db"):
self.env["spp.hdx.cod.resource"].create(
{
"name": "Test",
@@ -49,24 +50,6 @@ def test_required_fields(self):
}
)
- # name is required
- with self.assertRaises(ValidationError):
- self.env["spp.hdx.cod.resource"].create(
- {
- "source_id": self.source.id,
- "admin_level": 1,
- }
- )
-
- # admin_level is required
- with self.assertRaises(ValidationError):
- self.env["spp.hdx.cod.resource"].create(
- {
- "source_id": self.source.id,
- "name": "Test",
- }
- )
-
def test_default_format(self):
"""Test default format is geojson."""
resource = self.env["spp.hdx.cod.resource"].create(
diff --git a/spp_area_hdx/tests/test_hdx_cod_source.py b/spp_area_hdx/tests/test_hdx_cod_source.py
index d812c49b..a40bdbe2 100644
--- a/spp_area_hdx/tests/test_hdx_cod_source.py
+++ b/spp_area_hdx/tests/test_hdx_cod_source.py
@@ -2,7 +2,10 @@
from unittest.mock import patch
+from psycopg2 import IntegrityError
+
from odoo.tests import common, tagged
+from odoo.tools import mute_logger
@tagged("post_install", "-at_install")
@@ -23,9 +26,7 @@ def test_compute_country_iso3(self):
def test_unique_country_constraint(self):
"""Test that only one source per country is allowed."""
- from odoo.exceptions import ValidationError
-
- with self.assertRaises(ValidationError):
+ with self.assertRaises(IntegrityError), mute_logger("odoo.sql_db"):
self.env["spp.hdx.cod.source"].create(
{
"country_id": self.country_lk.id,
diff --git a/spp_area_hdx/wizards/hdx_cod_import_wizard.py b/spp_area_hdx/wizards/hdx_cod_import_wizard.py
index 54daa181..c4b8dadd 100644
--- a/spp_area_hdx/wizards/hdx_cod_import_wizard.py
+++ b/spp_area_hdx/wizards/hdx_cod_import_wizard.py
@@ -461,7 +461,7 @@ def _process_features(
stats["errors"] += len(to_create)
# Note: No manual commit here - let Odoo handle the transaction
- # atomically. For very large imports, consider using queue_job
+ # atomically. For very large imports, consider using job_worker
# to process in background with proper transaction management.
return stats
diff --git a/spp_base_common/models/ir_module_module.py b/spp_base_common/models/ir_module_module.py
index 651cb424..a9e90221 100644
--- a/spp_base_common/models/ir_module_module.py
+++ b/spp_base_common/models/ir_module_module.py
@@ -18,8 +18,8 @@ class IrModuleModule(models.Model):
"menu_xml_id": "mail.menu_root_discuss",
"icon": "spp_base_common,static/description/icon-Discuss-White-line.png",
},
- "queue_job": {
- "menu_xml_id": "queue_job.menu_queue_job_root",
+ "job_worker": {
+ "menu_xml_id": "job_worker.menu_queue_job_root",
"icon": "spp_base_common,static/description/icon-Job-Queue-White-line.png",
},
"spreadsheet_dashboard": {
diff --git a/spp_cr_types_advanced/security/ir.model.access.csv b/spp_cr_types_advanced/security/ir.model.access.csv
index 513c253e..bb39868e 100644
--- a/spp_cr_types_advanced/security/ir.model.access.csv
+++ b/spp_cr_types_advanced/security/ir.model.access.csv
@@ -1,17 +1,17 @@
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
-access_spp_cr_detail_add_member_user,spp.cr.detail.add_member.user,model_spp_cr_detail_add_member,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_add_member_manager,spp.cr.detail.add_member.manager,model_spp_cr_detail_add_member,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_remove_member_user,spp.cr.detail.remove_member.user,model_spp_cr_detail_remove_member,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_remove_member_manager,spp.cr.detail.remove_member.manager,model_spp_cr_detail_remove_member,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_change_hoh_user,spp.cr.detail.change_hoh.user,model_spp_cr_detail_change_hoh,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_change_hoh_manager,spp.cr.detail.change_hoh.manager,model_spp_cr_detail_change_hoh,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_transfer_member_user,spp.cr.detail.transfer_member.user,model_spp_cr_detail_transfer_member,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_transfer_member_manager,spp.cr.detail.transfer_member.manager,model_spp_cr_detail_transfer_member,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_exit_registrant_user,spp.cr.detail.exit_registrant.user,model_spp_cr_detail_exit_registrant,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_exit_registrant_manager,spp.cr.detail.exit_registrant.manager,model_spp_cr_detail_exit_registrant,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_create_group_user,spp.cr.detail.create_group.user,model_spp_cr_detail_create_group,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_create_group_manager,spp.cr.detail.create_group.manager,model_spp_cr_detail_create_group,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_split_household_user,spp.cr.detail.split_household.user,model_spp_cr_detail_split_household,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_split_household_manager,spp.cr.detail.split_household.manager,model_spp_cr_detail_split_household,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_merge_registrants_user,spp.cr.detail.merge_registrants.user,model_spp_cr_detail_merge_registrants,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_merge_registrants_manager,spp.cr.detail.merge_registrants.manager,model_spp_cr_detail_merge_registrants,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_add_member_user,spp.cr.detail.add_member.user,spp_change_request_v2.model_spp_cr_detail_add_member,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_add_member_manager,spp.cr.detail.add_member.manager,spp_change_request_v2.model_spp_cr_detail_add_member,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_remove_member_user,spp.cr.detail.remove_member.user,spp_change_request_v2.model_spp_cr_detail_remove_member,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_remove_member_manager,spp.cr.detail.remove_member.manager,spp_change_request_v2.model_spp_cr_detail_remove_member,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_change_hoh_user,spp.cr.detail.change_hoh.user,spp_change_request_v2.model_spp_cr_detail_change_hoh,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_change_hoh_manager,spp.cr.detail.change_hoh.manager,spp_change_request_v2.model_spp_cr_detail_change_hoh,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_transfer_member_user,spp.cr.detail.transfer_member.user,spp_change_request_v2.model_spp_cr_detail_transfer_member,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_transfer_member_manager,spp.cr.detail.transfer_member.manager,spp_change_request_v2.model_spp_cr_detail_transfer_member,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_exit_registrant_user,spp.cr.detail.exit_registrant.user,spp_change_request_v2.model_spp_cr_detail_exit_registrant,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_exit_registrant_manager,spp.cr.detail.exit_registrant.manager,spp_change_request_v2.model_spp_cr_detail_exit_registrant,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_create_group_user,spp.cr.detail.create_group.user,spp_change_request_v2.model_spp_cr_detail_create_group,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_create_group_manager,spp.cr.detail.create_group.manager,spp_change_request_v2.model_spp_cr_detail_create_group,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_split_household_user,spp.cr.detail.split_household.user,spp_change_request_v2.model_spp_cr_detail_split_household,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_split_household_manager,spp.cr.detail.split_household.manager,spp_change_request_v2.model_spp_cr_detail_split_household,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_merge_registrants_user,spp.cr.detail.merge_registrants.user,spp_change_request_v2.model_spp_cr_detail_merge_registrants,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_merge_registrants_manager,spp.cr.detail.merge_registrants.manager,spp_change_request_v2.model_spp_cr_detail_merge_registrants,spp_change_request_v2.group_cr_manager,1,1,1,1
diff --git a/spp_cr_types_base/security/ir.model.access.csv b/spp_cr_types_base/security/ir.model.access.csv
index 1f7c7569..3cc2ed2c 100644
--- a/spp_cr_types_base/security/ir.model.access.csv
+++ b/spp_cr_types_base/security/ir.model.access.csv
@@ -1,7 +1,7 @@
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
-access_spp_cr_detail_edit_individual_user,spp.cr.detail.edit_individual.user,model_spp_cr_detail_edit_individual,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_edit_individual_manager,spp.cr.detail.edit_individual.manager,model_spp_cr_detail_edit_individual,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_edit_group_user,spp.cr.detail.edit_group.user,model_spp_cr_detail_edit_group,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_edit_group_manager,spp.cr.detail.edit_group.manager,model_spp_cr_detail_edit_group,spp_change_request_v2.group_cr_manager,1,1,1,1
-access_spp_cr_detail_update_id_user,spp.cr.detail.update_id.user,model_spp_cr_detail_update_id,spp_change_request_v2.group_cr_user,1,1,1,0
-access_spp_cr_detail_update_id_manager,spp.cr.detail.update_id.manager,model_spp_cr_detail_update_id,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_edit_individual_user,spp.cr.detail.edit_individual.user,spp_change_request_v2.model_spp_cr_detail_edit_individual,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_edit_individual_manager,spp.cr.detail.edit_individual.manager,spp_change_request_v2.model_spp_cr_detail_edit_individual,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_edit_group_user,spp.cr.detail.edit_group.user,spp_change_request_v2.model_spp_cr_detail_edit_group,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_edit_group_manager,spp.cr.detail.edit_group.manager,spp_change_request_v2.model_spp_cr_detail_edit_group,spp_change_request_v2.group_cr_manager,1,1,1,1
+access_spp_cr_detail_update_id_user,spp.cr.detail.update_id.user,spp_change_request_v2.model_spp_cr_detail_update_id,spp_change_request_v2.group_cr_user,1,1,1,0
+access_spp_cr_detail_update_id_manager,spp.cr.detail.update_id.manager,spp_change_request_v2.model_spp_cr_detail_update_id,spp_change_request_v2.group_cr_manager,1,1,1,1
diff --git a/spp_dci_server/__manifest__.py b/spp_dci_server/__manifest__.py
index 613358c0..e76e8305 100644
--- a/spp_dci_server/__manifest__.py
+++ b/spp_dci_server/__manifest__.py
@@ -10,7 +10,7 @@
"depends": [
"base",
"fastapi",
- "queue_job",
+ "job_worker",
"spp_dci",
"spp_dci_client",
"spp_api_v2",
@@ -18,7 +18,7 @@
"data": [
"security/ir.model.access.csv",
"data/fastapi_endpoint_data.xml",
- "data/queue_job_channel.xml",
+ "data/queue_limit_data.xml",
"data/cron_data.xml",
"data/server_key_default.xml",
"views/sender_registry_views.xml",
diff --git a/spp_dci_server/data/queue_job_channel.xml b/spp_dci_server/data/queue_limit_data.xml
similarity index 80%
rename from spp_dci_server/data/queue_job_channel.xml
rename to spp_dci_server/data/queue_limit_data.xml
index 3cd006ba..54be17ea 100644
--- a/spp_dci_server/data/queue_job_channel.xml
+++ b/spp_dci_server/data/queue_limit_data.xml
@@ -1,9 +1,9 @@
-
-
+
dci
-
+ 2
+ 10
diff --git a/spp_dci_server/models/subscription.py b/spp_dci_server/models/subscription.py
index aa72fd4f..57f128c5 100644
--- a/spp_dci_server/models/subscription.py
+++ b/spp_dci_server/models/subscription.py
@@ -1,5 +1,5 @@
# Part of OpenSPP. See LICENSE file for full copyright and licensing details.
-"""DCI Subscription model for event notifications with queue_job."""
+"""DCI Subscription model for event notifications with job_worker."""
import logging
import uuid
@@ -19,7 +19,7 @@
class DCISubscription(models.Model):
- """Manage DCI event subscriptions with queue_job notifications.
+ """Manage DCI event subscriptions with job_worker notifications.
External systems can subscribe to events (registration, update, delete)
for specific registry types. When events occur, notifications are
@@ -227,12 +227,13 @@ def notify_event(self, event_type: str, records: list, reg_type: str):
for sub in subscriptions:
# Queue notification job for each subscription
sub.with_delay(
- channel="root.dci",
+ channel="dci",
+ timeout=60,
description=f"DCI Notify {sub.subscription_code}",
)._send_notification(event_type, records)
def _send_notification(self, event_type: str, records: list):
- """Send notification to subscriber. Called by queue_job.
+ """Send notification to subscriber. Called by job_worker.
Args:
event_type: Event type
diff --git a/spp_dci_server/models/transaction.py b/spp_dci_server/models/transaction.py
index c7405365..95c22a4c 100644
--- a/spp_dci_server/models/transaction.py
+++ b/spp_dci_server/models/transaction.py
@@ -1,5 +1,5 @@
# Part of OpenSPP. See LICENSE file for full copyright and licensing details.
-"""DCI Transaction model for async operation tracking with queue_job."""
+"""DCI Transaction model for async operation tracking with job_worker."""
import json
import logging
@@ -20,10 +20,10 @@
class DCITransaction(models.Model):
- """Track async DCI transactions with queue_job integration.
+ """Track async DCI transactions with job_worker integration.
This model stores async DCI requests (search, subscribe, etc.) and
- manages background processing via queue_job. It tracks:
+ manages background processing via job_worker. It tracks:
- Original request payload
- Processing state transitions
- Response/callback delivery status
@@ -154,16 +154,16 @@ class DCITransaction(models.Model):
string="Max Retries",
)
- # queue_job Integration
+ # job_worker Integration
job_uuid = fields.Char(
string="Job UUID",
- help="queue_job UUID for tracking",
+ help="job_worker UUID for tracking",
index=True,
)
job_state = fields.Char(
string="Job State",
compute="_compute_job_state",
- help="Current queue_job state",
+ help="Current job_worker state",
)
# Timing
@@ -194,7 +194,7 @@ def _compute_dci_status(self):
record.dci_status = state_to_dci.get(record.state, "pdng")
def _compute_job_state(self):
- """Get queue_job state if available."""
+ """Get job_worker state if available."""
for record in self:
if record.job_uuid and "queue.job" in self.env:
job = self.env["queue.job"].search([("uuid", "=", record.job_uuid)], limit=1)
@@ -203,9 +203,9 @@ def _compute_job_state(self):
record.job_state = "n/a"
def process_async_search(self):
- """Execute search and send callback. Called by queue_job.
+ """Execute search and send callback. Called by job_worker.
- This method is decorated with @job when queue_job is available.
+ This method is decorated with @job when job_worker is available.
"""
self.ensure_one()
self.state = "processing"
@@ -318,8 +318,9 @@ def _send_callback(self, response):
self.max_retries,
)
self.with_delay(
- channel="root.dci",
+ channel="dci",
eta=delay,
+ timeout=60,
description=f"DCI Retry Callback {self.transaction_id}",
)._retry_callback()
else:
@@ -331,7 +332,7 @@ def _send_callback(self, response):
)
def _retry_callback(self):
- """Retry sending callback. Called by queue_job.
+ """Retry sending callback. Called by job_worker.
Rebuilds the full DCI envelope from stored response_payload
to ensure consistent callback format on retries.
@@ -391,8 +392,9 @@ def _retry_callback(self):
self.retry_count += 1
delay = 60 * (2 ** (self.retry_count - 1))
self.with_delay(
- channel="root.dci",
+ channel="dci",
eta=delay,
+ timeout=60,
description=f"DCI Retry Callback {self.transaction_id}",
)._retry_callback()
else:
@@ -524,7 +526,7 @@ def _build_callback_envelope(self, response) -> dict:
}
def process_async_subscribe(self):
- """Process subscribe request and send callback. Called by queue_job.
+ """Process subscribe request and send callback. Called by job_worker.
For subscribe operations, the subscription is already created during
the initial request. This method builds the response and sends callback.
@@ -588,7 +590,7 @@ def process_async_subscribe(self):
_logger.exception("DCI async subscribe failed: %s", self.transaction_id)
def process_async_unsubscribe(self):
- """Process unsubscribe request and send callback. Called by queue_job.
+ """Process unsubscribe request and send callback. Called by job_worker.
For unsubscribe operations, subscriptions are already cancelled during
the initial request. This method builds the response and sends callback.
@@ -636,7 +638,7 @@ def process_async_unsubscribe(self):
_logger.exception("DCI async unsubscribe failed: %s", self.transaction_id)
def process_async_txn_status(self):
- """Process transaction status request and send callback. Called by queue_job.
+ """Process transaction status request and send callback. Called by job_worker.
Looks up the status of the referenced transaction and sends callback.
"""
@@ -819,8 +821,9 @@ def _send_callback_dict(self, response_dict: dict):
self.max_retries,
)
self.with_delay(
- channel="root.dci",
+ channel="dci",
eta=delay,
+ timeout=60,
description=f"DCI Retry Callback {self.transaction_id}",
)._retry_callback()
else:
@@ -839,7 +842,7 @@ def action_retry_callback(self):
self._retry_callback()
def action_view_job(self):
- """View the queue_job record."""
+ """View the job_worker record."""
self.ensure_one()
if not self.job_uuid:
raise UserError(_("No job associated with this transaction"))
diff --git a/spp_dci_server/routers/async_router.py b/spp_dci_server/routers/async_router.py
index bd1a18c5..4de334a6 100644
--- a/spp_dci_server/routers/async_router.py
+++ b/spp_dci_server/routers/async_router.py
@@ -1,5 +1,5 @@
# Part of OpenSPP. See LICENSE file for full copyright and licensing details.
-"""DCI Async API endpoints with queue_job integration.
+"""DCI Async API endpoints with job_worker integration.
Implements asynchronous DCI operations (mounted under /social/registry):
- POST /search - Async search (202 Accepted, callback delivery)
@@ -127,7 +127,7 @@ async def async_search(
"""
Async search - queue processing and return 202 Accepted.
- The search is executed in the background via queue_job.
+ The search is executed in the background via job_worker.
Results are delivered to the sender's callback_uri.
**Response**: 202 Accepted with transaction tracking info
@@ -186,9 +186,10 @@ async def async_search(
sender.id if sender else "unknown",
)
- # Queue the search job with queue_job
+ # Queue the search job with job_worker
job = transaction.with_delay(
- channel="root.dci",
+ channel="dci",
+ timeout=60,
description=f"DCI Search {transaction.transaction_id}",
).process_async_search()
@@ -352,7 +353,8 @@ async def subscribe(
# Queue the callback job
if callback_uri:
job = transaction.with_delay(
- channel="root.dci",
+ channel="dci",
+ timeout=60,
description=f"DCI Subscribe Callback {transaction.transaction_id}",
).process_async_subscribe()
transaction.job_uuid = job.uuid
@@ -473,7 +475,8 @@ async def unsubscribe(
# Queue the callback job
if callback_uri:
job = transaction.with_delay(
- channel="root.dci",
+ channel="dci",
+ timeout=60,
description=f"DCI Unsubscribe Callback {transaction.transaction_id}",
).process_async_unsubscribe()
transaction.job_uuid = job.uuid
diff --git a/spp_dci_server/routers/bulk_upload.py b/spp_dci_server/routers/bulk_upload.py
index a8c8ce7f..ff52c29c 100644
--- a/spp_dci_server/routers/bulk_upload.py
+++ b/spp_dci_server/routers/bulk_upload.py
@@ -364,7 +364,8 @@ async def bulk_search_upload(
# Queue the async search job
job = transaction.with_delay(
- channel="root.dci",
+ channel="dci",
+ timeout=60,
description=f"DCI Bulk Search {transaction_id} ({len(search_items)} items)",
).process_async_search()
diff --git a/spp_dci_server/routers/callbacks.py b/spp_dci_server/routers/callbacks.py
index bbe3676e..8b0e7db2 100644
--- a/spp_dci_server/routers/callbacks.py
+++ b/spp_dci_server/routers/callbacks.py
@@ -310,7 +310,8 @@ async def async_txn_status(
# Queue the callback job
if callback_uri:
job = transaction.with_delay(
- channel="root.dci",
+ channel="dci",
+ timeout=60,
description=f"DCI TxnStatus Callback {transaction.transaction_id}",
).process_async_txn_status()
transaction.job_uuid = job.uuid
diff --git a/spp_dci_server/tests/test_transaction.py b/spp_dci_server/tests/test_transaction.py
index 8551c33e..26d79c02 100644
--- a/spp_dci_server/tests/test_transaction.py
+++ b/spp_dci_server/tests/test_transaction.py
@@ -294,7 +294,7 @@ class MockResponse:
def model_dump(self, mode="python", by_alias=False, exclude_none=False):
return {"result": "ok"}
- # Mock with_delay at class level to avoid queue_job dependency in tests
+ # Mock with_delay at class level to avoid job_worker dependency in tests
with patch.object(type(transaction), "with_delay") as mock_delay:
mock_delay.return_value._retry_callback = MagicMock()
transaction._send_callback(MockResponse())
diff --git a/spp_demo/__manifest__.py b/spp_demo/__manifest__.py
index 84025f81..b7789ceb 100644
--- a/spp_demo/__manifest__.py
+++ b/spp_demo/__manifest__.py
@@ -16,7 +16,7 @@
"spp_base_common",
"spp_registry",
"spp_vocabulary",
- "queue_job",
+ "job_worker",
"spp_security",
],
"external_dependencies": {
diff --git a/spp_demo/models/demo_data_generator.py b/spp_demo/models/demo_data_generator.py
index 1a65438c..78c7b487 100644
--- a/spp_demo/models/demo_data_generator.py
+++ b/spp_demo/models/demo_data_generator.py
@@ -9,7 +9,7 @@
from odoo import api, fields, models
from odoo.exceptions import ValidationError
-from odoo.addons.queue_job.delay import group
+from odoo.addons.job_worker.delay import group
_logger = logging.getLogger(__name__)
diff --git a/spp_demo/tests/test_apps_wizard.py b/spp_demo/tests/test_apps_wizard.py
index dec10451..664d98ed 100644
--- a/spp_demo/tests/test_apps_wizard.py
+++ b/spp_demo/tests/test_apps_wizard.py
@@ -14,7 +14,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_demo/tests/test_demo_data_generator.py b/spp_demo/tests/test_demo_data_generator.py
index 9e5dc8c2..1fbbd032 100644
--- a/spp_demo/tests/test_demo_data_generator.py
+++ b/spp_demo/tests/test_demo_data_generator.py
@@ -19,7 +19,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_demo/tests/test_demo_stories.py b/spp_demo/tests/test_demo_stories.py
index 0d8e41d9..58b31025 100644
--- a/spp_demo/tests/test_demo_stories.py
+++ b/spp_demo/tests/test_demo_stories.py
@@ -16,7 +16,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_demo/tests/test_res_config_settings.py b/spp_demo/tests/test_res_config_settings.py
index 755fa080..4d823440 100644
--- a/spp_demo/tests/test_res_config_settings.py
+++ b/spp_demo/tests/test_res_config_settings.py
@@ -13,7 +13,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_demo/tests/test_res_country.py b/spp_demo/tests/test_res_country.py
index 292f4eae..9951dfc3 100644
--- a/spp_demo/tests/test_res_country.py
+++ b/spp_demo/tests/test_res_country.py
@@ -13,7 +13,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_demo/tests/test_res_partner.py b/spp_demo/tests/test_res_partner.py
index e59978ed..2a8ca5a4 100644
--- a/spp_demo/tests/test_res_partner.py
+++ b/spp_demo/tests/test_res_partner.py
@@ -13,7 +13,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_drims/__manifest__.py b/spp_drims/__manifest__.py
index a66a1136..403cad6c 100644
--- a/spp_drims/__manifest__.py
+++ b/spp_drims/__manifest__.py
@@ -27,7 +27,7 @@
"spp_approval",
"spp_cel_domain",
"spp_audit",
- "queue_job",
+ "job_worker",
],
"data": [
# Security (must be first)
diff --git a/spp_drims_sl_demo/wizard/drims_demo_generator.py b/spp_drims_sl_demo/wizard/drims_demo_generator.py
index 5a720c62..deda0fc0 100644
--- a/spp_drims_sl_demo/wizard/drims_demo_generator.py
+++ b/spp_drims_sl_demo/wizard/drims_demo_generator.py
@@ -367,7 +367,7 @@ def _import_sl_areas(self):
Returns:
int: Number of areas imported/updated
"""
- # Use context to bypass queue_job delays for synchronous processing
+ # Use context to bypass job_worker delays for synchronous processing
AreaImport = self.env["spp.area.import"].with_context(queue_job__no_delay=True)
Area = self.env["spp.area"]
diff --git a/spp_gis/tests/test_area_import_raw.py b/spp_gis/tests/test_area_import_raw.py
index 59928099..74f06b26 100644
--- a/spp_gis/tests/test_area_import_raw.py
+++ b/spp_gis/tests/test_area_import_raw.py
@@ -17,7 +17,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_gis/tests/test_geo_fields.py b/spp_gis/tests/test_geo_fields.py
index ca8c97a3..241506ce 100644
--- a/spp_gis/tests/test_geo_fields.py
+++ b/spp_gis/tests/test_geo_fields.py
@@ -17,7 +17,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_gis_report/__manifest__.py b/spp_gis_report/__manifest__.py
index f26de524..3fcf7318 100644
--- a/spp_gis_report/__manifest__.py
+++ b/spp_gis_report/__manifest__.py
@@ -13,7 +13,7 @@
"spp_registry",
"spp_vocabulary",
"spp_cel_domain",
- "queue_job",
+ "job_worker",
],
"external_dependencies": {
"python": ["numpy>=1.22.2", "shapely"],
diff --git a/spp_gis_report/controllers/geojson.py b/spp_gis_report/controllers/geojson.py
index 903ec940..20dde19e 100644
--- a/spp_gis_report/controllers/geojson.py
+++ b/spp_gis_report/controllers/geojson.py
@@ -315,7 +315,7 @@ def trigger_refresh(self, report_code):
try:
report = self._get_report_by_code(report_code)
- # Trigger refresh (uses queue_job in implementation)
+ # Trigger refresh (uses job_worker in implementation)
report.action_refresh()
return self._json_response(
diff --git a/spp_gis_report/models/gis_report.py b/spp_gis_report/models/gis_report.py
index 8ccc068c..bc89d650 100644
--- a/spp_gis_report/models/gis_report.py
+++ b/spp_gis_report/models/gis_report.py
@@ -1272,7 +1272,7 @@ def _refresh_data(self):
def action_refresh(self):
"""Refresh report data.
- Uses queue_job for background processing if available,
+ Uses job_worker for background processing if available,
otherwise runs synchronously.
Returns:
@@ -1281,7 +1281,7 @@ def action_refresh(self):
self.ensure_one()
_logger.info("Scheduling refresh for report ID %s", self.id)
- # Use queue_job if available, otherwise run synchronously
+ # Use job_worker if available, otherwise run synchronously
if hasattr(self, "with_delay"):
self.is_syncing = True
self.with_delay(priority=10, description=f"Re-sync GIS Report: {self.name}")._refresh_data()
diff --git a/spp_gis_report/tests/common.py b/spp_gis_report/tests/common.py
index 21bd9e0a..e52fe35e 100644
--- a/spp_gis_report/tests/common.py
+++ b/spp_gis_report/tests/common.py
@@ -18,7 +18,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_gis_report/tests/test_area_ext.py b/spp_gis_report/tests/test_area_ext.py
index 34c050e6..65f72ff5 100644
--- a/spp_gis_report/tests/test_area_ext.py
+++ b/spp_gis_report/tests/test_area_ext.py
@@ -18,7 +18,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
@@ -519,7 +519,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_gis_report/tests/test_gis_report.py b/spp_gis_report/tests/test_gis_report.py
index f45bce4d..320b818d 100644
--- a/spp_gis_report/tests/test_gis_report.py
+++ b/spp_gis_report/tests/test_gis_report.py
@@ -21,7 +21,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_gis_report/tests/test_gis_report_data.py b/spp_gis_report/tests/test_gis_report_data.py
index fc188044..950b6ba6 100644
--- a/spp_gis_report/tests/test_gis_report_data.py
+++ b/spp_gis_report/tests/test_gis_report_data.py
@@ -23,7 +23,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_gis_report/tests/test_gis_report_wizard.py b/spp_gis_report/tests/test_gis_report_wizard.py
index a394d090..c66f0415 100644
--- a/spp_gis_report/tests/test_gis_report_wizard.py
+++ b/spp_gis_report/tests/test_gis_report_wizard.py
@@ -31,7 +31,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_hazard/tests/common.py b/spp_hazard/tests/common.py
index dc88e9d1..16ac4342 100644
--- a/spp_hazard/tests/common.py
+++ b/spp_hazard/tests/common.py
@@ -13,7 +13,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_hide_menus_base/models/ir_module_module.py b/spp_hide_menus_base/models/ir_module_module.py
index 499b0f5a..e760c072 100644
--- a/spp_hide_menus_base/models/ir_module_module.py
+++ b/spp_hide_menus_base/models/ir_module_module.py
@@ -52,8 +52,8 @@ class IrModuleModule(models.Model):
"fastapi": {
"menu_xml_id": "fastapi.menu_fastapi_root",
},
- "queue_job": {
- "menu_xml_id": "queue_job.menu_queue_job_root",
+ "job_worker": {
+ "menu_xml_id": "job_worker.menu_queue_job_root",
},
}
diff --git a/spp_programs/__manifest__.py b/spp_programs/__manifest__.py
index 92655b5b..b8bf4c1d 100644
--- a/spp_programs/__manifest__.py
+++ b/spp_programs/__manifest__.py
@@ -30,6 +30,7 @@
# CEL core libraries for expression-based managers
"spp_cel_domain",
"spp_cel_widget",
+ "job_worker",
],
"data": [
# Security files (merged from both modules)
@@ -40,7 +41,7 @@
"security/registrant_rule.xml",
# Data files
"data/sequences.xml",
- # "data/queue_data.xml", # Disabled: requires queue_job module
+ "data/queue_data.xml",
"data/user_roles.xml",
"data/approval_definitions.xml",
"data/cel_examples.xml",
@@ -124,9 +125,6 @@
"spp_programs/static/src/xml/field_domain.xml",
],
},
- "oca_data_manual": [
- "data/queue_data.xml",
- ],
"demo": [],
"images": [],
"external_dependencies": {
diff --git a/spp_programs/data/queue_data.xml b/spp_programs/data/queue_data.xml
index c347bb2f..9dd6f2df 100644
--- a/spp_programs/data/queue_data.xml
+++ b/spp_programs/data/queue_data.xml
@@ -1,18 +1,17 @@
-
-
- root_program
-
-
+
cycle
-
+ 1
+ 0
-
+
eligibility_manager
-
+ 1
+ 0
-
+
program_manager
-
+ 1
+ 0
diff --git a/spp_programs/models/__init__.py b/spp_programs/models/__init__.py
index 0014c6c0..58d39bfd 100644
--- a/spp_programs/models/__init__.py
+++ b/spp_programs/models/__init__.py
@@ -18,7 +18,6 @@
from . import spp_entitlement
from . import stock
-# from . import queue_job_channel # Disabled: requires queue_job module
from . import res_user
from . import program_config
from . import program_manager_ui
diff --git a/spp_programs/models/job_relate_mixin.py b/spp_programs/models/job_relate_mixin.py
index 3fa33ece..17edef38 100644
--- a/spp_programs/models/job_relate_mixin.py
+++ b/spp_programs/models/job_relate_mixin.py
@@ -24,7 +24,7 @@ def _get_related_job_domain(self):
return [("id", "in", [1, 2, 3])]
"""
self.ensure_one()
- action = self.env.ref("queue_job.action_queue_job").read()[0]
+ action = self.env.ref("job_worker.action_queue_job").read()[0]
action["domain"] = self._get_related_job_domain()
return action
diff --git a/spp_programs/models/managers/cycle_manager_base.py b/spp_programs/models/managers/cycle_manager_base.py
index eed3354e..1a09533b 100644
--- a/spp_programs/models/managers/cycle_manager_base.py
+++ b/spp_programs/models/managers/cycle_manager_base.py
@@ -8,10 +8,7 @@
from odoo import _, api, fields, models
from odoo.exceptions import UserError, ValidationError
-try:
- from odoo.addons.queue_job.delay import group
-except ImportError:
- group = None
+from odoo.addons.job_worker.delay import group
from .. import constants
@@ -521,12 +518,10 @@ def _check_eligibility_async(self, cycle, beneficiaries_count):
jobs = []
for i in range(0, beneficiaries_count, self.MAX_ROW_JOB_QUEUE):
jobs.append(
- self.delayable(channel="root_program.cycle")._check_eligibility(
- cycle, offset=i, limit=self.MAX_ROW_JOB_QUEUE
- )
+ self.delayable(channel="cycle")._check_eligibility(cycle, offset=i, limit=self.MAX_ROW_JOB_QUEUE)
)
main_job = group(*jobs)
- main_job.on_done(self.delayable(channel="root_program.cycle").mark_check_eligibility_as_done(cycle))
+ main_job.on_done(self.delayable(channel="cycle").mark_check_eligibility_as_done(cycle))
main_job.delay()
def _check_eligibility(self, cycle, beneficiaries=None, offset=0, limit=None, do_count=False):
@@ -592,14 +587,10 @@ def _prepare_entitlements_async(self, cycle, beneficiaries_count):
jobs = []
for i in range(0, beneficiaries_count, self.MAX_ROW_JOB_QUEUE):
- jobs.append(
- self.delayable(channel="root_program.cycle")._prepare_entitlements(cycle, i, self.MAX_ROW_JOB_QUEUE)
- )
+ jobs.append(self.delayable(channel="cycle")._prepare_entitlements(cycle, i, self.MAX_ROW_JOB_QUEUE))
main_job = group(*jobs)
main_job.on_done(
- self.delayable(channel="root_program.cycle").mark_prepare_entitlement_as_done(
- cycle, _("Entitlement Ready.")
- )
+ self.delayable(channel="cycle").mark_prepare_entitlement_as_done(cycle, _("Entitlement Ready."))
)
main_job.delay()
@@ -829,7 +820,7 @@ def _add_beneficiaries_async(self, cycle, beneficiaries, state):
jobs = []
for i in range(0, beneficiaries_count, self.MAX_ROW_JOB_QUEUE):
jobs.append(
- self.delayable(channel="root_program.cycle")._add_beneficiaries(
+ self.delayable(channel="cycle")._add_beneficiaries(
cycle,
beneficiaries[i : i + self.MAX_ROW_JOB_QUEUE],
state,
@@ -837,9 +828,7 @@ def _add_beneficiaries_async(self, cycle, beneficiaries, state):
)
main_job = group(*jobs)
- main_job.on_done(
- self.delayable(channel="root_program.cycle").mark_import_as_done(cycle, _("Beneficiary import finished."))
- )
+ main_job.on_done(self.delayable(channel="cycle").mark_import_as_done(cycle, _("Beneficiary import finished.")))
main_job.delay()
def _add_beneficiaries(self, cycle, beneficiaries, state="draft", do_count=False):
diff --git a/spp_programs/models/managers/eligibility_manager.py b/spp_programs/models/managers/eligibility_manager.py
index 425b532e..d54f7945 100644
--- a/spp_programs/models/managers/eligibility_manager.py
+++ b/spp_programs/models/managers/eligibility_manager.py
@@ -3,10 +3,7 @@
from odoo import Command, _, api, fields, models
-try:
- from odoo.addons.queue_job.delay import group
-except ImportError:
- group = None
+from odoo.addons.job_worker.delay import group
_logger = logging.getLogger(__name__)
@@ -158,12 +155,12 @@ def _import_registrants_async(self, new_beneficiaries, state="draft"):
jobs = []
for i in range(0, len(new_beneficiaries), 10000):
jobs.append(
- self.delayable(channel="root_program.eligibility_manager")._import_registrants(
+ self.delayable(channel="eligibility_manager")._import_registrants(
new_beneficiaries[i : i + 10000], state
)
)
main_job = group(*jobs)
- main_job.on_done(self.delayable(channel="root_program.eligibility_manager").mark_import_as_done())
+ main_job.on_done(self.delayable(channel="eligibility_manager").mark_import_as_done())
main_job.delay()
def mark_import_as_done(self):
diff --git a/spp_programs/models/managers/entitlement_manager_base.py b/spp_programs/models/managers/entitlement_manager_base.py
index 1b454f06..92c8c0a4 100644
--- a/spp_programs/models/managers/entitlement_manager_base.py
+++ b/spp_programs/models/managers/entitlement_manager_base.py
@@ -4,10 +4,7 @@
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError
-try:
- from odoo.addons.queue_job.delay import group
-except ImportError:
- group = None
+from odoo.addons.job_worker.delay import group
_logger = logging.getLogger(__name__)
diff --git a/spp_programs/models/managers/entitlement_manager_cash.py b/spp_programs/models/managers/entitlement_manager_cash.py
index df3c8fa4..790ae342 100644
--- a/spp_programs/models/managers/entitlement_manager_cash.py
+++ b/spp_programs/models/managers/entitlement_manager_cash.py
@@ -4,7 +4,7 @@
from odoo import Command, _, api, fields, models
from odoo.exceptions import UserError, ValidationError
-from odoo.addons.queue_job.delay import group
+from odoo.addons.job_worker.delay import group
_logger = logging.getLogger(__name__)
diff --git a/spp_programs/models/managers/entitlement_manager_inkind.py b/spp_programs/models/managers/entitlement_manager_inkind.py
index ac320e0d..f01e2592 100644
--- a/spp_programs/models/managers/entitlement_manager_inkind.py
+++ b/spp_programs/models/managers/entitlement_manager_inkind.py
@@ -4,7 +4,7 @@
from odoo import _, api, fields, models
from odoo.exceptions import UserError, ValidationError
-from odoo.addons.queue_job.delay import group
+from odoo.addons.job_worker.delay import group
_logger = logging.getLogger(__name__)
diff --git a/spp_programs/models/managers/payment_manager.py b/spp_programs/models/managers/payment_manager.py
index aad7e5d9..5903a508 100644
--- a/spp_programs/models/managers/payment_manager.py
+++ b/spp_programs/models/managers/payment_manager.py
@@ -8,10 +8,7 @@
from odoo import _, api, fields, models
from odoo.exceptions import ValidationError
-try:
- from odoo.addons.queue_job.delay import group
-except ImportError:
- group = None
+from odoo.addons.job_worker.delay import group
_logger = logging.getLogger(__name__)
diff --git a/spp_programs/models/managers/program_manager.py b/spp_programs/models/managers/program_manager.py
index e21c3fcd..125331c2 100644
--- a/spp_programs/models/managers/program_manager.py
+++ b/spp_programs/models/managers/program_manager.py
@@ -5,10 +5,7 @@
from odoo import _, api, fields, models
from odoo.exceptions import UserError
-try:
- from odoo.addons.queue_job.delay import group
-except ImportError:
- group = None
+from odoo.addons.job_worker.delay import group
from ..programs import SPPProgram
@@ -190,12 +187,12 @@ def _enroll_eligible_registrants_async(self, states, members_count):
jobs = []
for i in range(0, members_count, self.MAX_ROW_JOB_QUEUE):
jobs.append(
- self.delayable(channel="root_program.program_manager")._enroll_eligible_registrants(
+ self.delayable(channel="program_manager")._enroll_eligible_registrants(
states, i, self.MAX_ROW_JOB_QUEUE
)
)
main_job = group(*jobs)
- main_job.on_done(self.delayable(channel="root_program.program_manager").mark_enroll_eligible_as_done())
+ main_job.on_done(self.delayable(channel="program_manager").mark_enroll_eligible_as_done())
main_job.delay()
def _enroll_eligible_registrants(self, states, offset=0, limit=None, do_count=False):
diff --git a/spp_programs/models/queue_job_channel.py b/spp_programs/models/queue_job_channel.py
deleted file mode 100644
index 9852d074..00000000
--- a/spp_programs/models/queue_job_channel.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from odoo import _, api, exceptions, models
-
-
-class CustomQueueJobChannel(models.Model):
- _inherit = "queue.job.channel"
-
- @api.constrains("parent_id", "name")
- def parent_required(self):
- for record in self:
- if record.name and not record.name.startswith("root") and not record.parent_id:
- raise exceptions.ValidationError(_("Parent channel required."))
- return
diff --git a/spp_programs/tests/test_cycle_auto_approve_fund_check.py b/spp_programs/tests/test_cycle_auto_approve_fund_check.py
index d693d443..fa26233f 100644
--- a/spp_programs/tests/test_cycle_auto_approve_fund_check.py
+++ b/spp_programs/tests/test_cycle_auto_approve_fund_check.py
@@ -20,7 +20,7 @@ def setUpClass(cls):
cls.env = cls.env(
context=dict(
cls.env.context,
- test_queue_job_no_delay=True,
+ queue_job__no_delay=True,
)
)
diff --git a/spp_starter_social_registry/__manifest__.py b/spp_starter_social_registry/__manifest__.py
index 1987d296..60a1d6d4 100644
--- a/spp_starter_social_registry/__manifest__.py
+++ b/spp_starter_social_registry/__manifest__.py
@@ -24,7 +24,7 @@
"spp_consent",
"spp_source_tracking",
# Async Processing
- "queue_job",
+ "job_worker",
# Change Request System
"spp_change_request_v2",
"spp_cr_types_base",