diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dc4a57ad..afde9b5b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -78,7 +78,7 @@ jobs: echo "$changed_files" if [ "${{ github.event_name }}" = "pull_request" ]; then - modules=$(echo "$changed_files" | python scripts/detect_modules.py --include-critical --max-modules=15) + modules=$(echo "$changed_files" | python scripts/detect_modules.py --include-critical --max-modules=20) else modules=$(echo "$changed_files" | python scripts/detect_modules.py --max-modules=10) fi diff --git a/codecov.yml b/codecov.yml index d2b5b9a6..fb1cdba1 100644 --- a/codecov.yml +++ b/codecov.yml @@ -5,9 +5,10 @@ coverage: status: project: default: - # Only check coverage on files changed in this PR, not the entire project. - # CI only tests changed modules, so project-wide coverage would always drop - # on PRs that don't re-test every module. + # Informational only - don't block PRs on project-wide coverage. + # CI only tests changed modules (max 15), so project coverage fluctuates + # depending on which modules are in the test matrix. + informational: true only_pulls: true patch: default: diff --git a/requirements.txt b/requirements.txt index 8545c5d0..f0f0fcb2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,6 +15,7 @@ openpyxl parse-accept-language pydantic pyjwt +pyjwt>=2.4.0 pyproj python-dateutil python-magic diff --git a/spp_api_v2_change_request/services/change_request_service.py b/spp_api_v2_change_request/services/change_request_service.py index e09babc1..1b9220d6 100644 --- a/spp_api_v2_change_request/services/change_request_service.py +++ b/spp_api_v2_change_request/services/change_request_service.py @@ -4,6 +4,7 @@ import logging from typing import Any +from odoo import _ from odoo.api import Environment from odoo.exceptions import UserError, ValidationError @@ -392,7 +393,7 @@ def _deserialize_detail(self, detail, data: dict[str, Any]) -> dict[str, Any]: if unresolved: raise ValidationError( - "Failed to resolve the following fields:\n" + "\n".join(f"- {msg}" for msg in unresolved) + _("Failed to resolve the following fields:\n") + "\n".join(f"- {msg}" for msg in unresolved) ) return vals diff --git a/spp_dci_client/services/client.py b/spp_dci_client/services/client.py index 526905d1..522f65f4 100644 --- a/spp_dci_client/services/client.py +++ b/spp_dci_client/services/client.py @@ -920,7 +920,7 @@ def _sign_request(self, header: dict, message: dict) -> dict: except Exception as e: _logger.error("Failed to sign DCI message: %s", str(e)) - raise UserError(f"Failed to sign DCI message: {str(e)}") from e + raise UserError(_("Failed to sign DCI message: %s") % str(e)) from e else: _logger.debug( "No signing key configured for data source '%s' - sending unsigned request", diff --git a/spp_dci_client_crvs/services/crvs_service.py b/spp_dci_client_crvs/services/crvs_service.py index cfeee2fe..99263be2 100644 --- a/spp_dci_client_crvs/services/crvs_service.py +++ b/spp_dci_client_crvs/services/crvs_service.py @@ -4,6 +4,7 @@ import json import logging +from odoo import _ from odoo.exceptions import UserError, ValidationError from odoo.addons.spp_dci.schemas.constants import RegistryType @@ -115,7 +116,7 @@ def verify_birth(self, identifier_type: str, identifier_value: str) -> dict | No except Exception as e: _logger.error("Failed to verify birth record: %s", str(e), exc_info=True) - raise UserError(f"Failed to verify birth record: {str(e)}") from e + raise UserError(_("Failed to verify birth record: %s") % str(e)) from e def check_death(self, identifier_type: str, identifier_value: str) -> bool: """Check if person is deceased in CRVS system. @@ -177,7 +178,7 @@ def check_death(self, identifier_type: str, identifier_value: str) -> bool: except Exception as e: _logger.error("Failed to check death status: %s", str(e), exc_info=True) - raise UserError(f"Failed to check death status: {str(e)}") from e + raise UserError(_("Failed to check death status: %s") % str(e)) from e def subscribe_events(self, event_types: list | None = None) -> list[str]: """Subscribe to CRVS vital events. @@ -241,7 +242,7 @@ def subscribe_events(self, event_types: list | None = None) -> list[str]: ) if not subscription_ids: - raise UserError("Failed to subscribe to any CRVS events") + raise UserError(_("Failed to subscribe to any CRVS events")) return subscription_ids @@ -249,7 +250,7 @@ def subscribe_events(self, event_types: list | None = None) -> list[str]: raise except Exception as e: _logger.error("Failed to subscribe to CRVS events: %s", str(e), exc_info=True) - raise UserError(f"Failed to subscribe to CRVS events: {str(e)}") from e + raise UserError(_("Failed to subscribe to CRVS events: %s") % str(e)) from e def unsubscribe_events(self, subscription_codes: list[str]) -> dict: """Unsubscribe from CRVS vital events. @@ -280,7 +281,7 @@ def unsubscribe_events(self, subscription_codes: list[str]) -> dict: except Exception as e: _logger.error("Failed to unsubscribe from CRVS events: %s", str(e), exc_info=True) - raise UserError(f"Failed to unsubscribe from CRVS events: {str(e)}") from e + raise UserError(_("Failed to unsubscribe from CRVS events: %s") % str(e)) from e def process_notification(self, notification_data: dict) -> int: """Process CRVS notification and create event record. @@ -361,7 +362,7 @@ def process_notification(self, notification_data: dict) -> int: except Exception as e: _logger.error("Failed to process CRVS notification: %s", str(e), exc_info=True) - raise ValidationError(f"Failed to process CRVS notification: {str(e)}") from e + raise ValidationError(_("Failed to process CRVS notification: %s") % str(e)) from e def _extract_birth_data(self, record_data: dict) -> dict: """Extract birth information from DCI record data. diff --git a/spp_dci_client_crvs/tests/test_crvs_service.py b/spp_dci_client_crvs/tests/test_crvs_service.py index 08142baf..01d379a9 100644 --- a/spp_dci_client_crvs/tests/test_crvs_service.py +++ b/spp_dci_client_crvs/tests/test_crvs_service.py @@ -6,6 +6,8 @@ from odoo.exceptions import UserError, ValidationError from odoo.tests import tagged +from odoo.addons.spp_dci.schemas.constants import RegistryType + from .common import CRVSClientCommon @@ -25,7 +27,7 @@ def setUp(self): "auth_type": "none", "our_sender_id": "openspp.example.org", "our_callback_uri": "https://openspp.example.org/callback", - "registry_type": "CRVS", + "registry_type": RegistryType.CRVS.value, } ) diff --git a/spp_dci_client_dr/services/dr_service.py b/spp_dci_client_dr/services/dr_service.py index f8de002f..fdf09d19 100644 --- a/spp_dci_client_dr/services/dr_service.py +++ b/spp_dci_client_dr/services/dr_service.py @@ -4,7 +4,7 @@ import json import logging -from odoo import fields +from odoo import _, fields from odoo.exceptions import UserError, ValidationError from odoo.addons.spp_dci_client.services import DCIClient @@ -145,7 +145,7 @@ def get_disability_status(self, partner) -> dict | None: str(e), exc_info=True, ) - raise UserError(f"Failed to get disability status: {str(e)}") from e + raise UserError(_("Failed to get disability status: %s") % str(e)) from e def get_functional_assessment(self, identifier_type: str, identifier_value: str) -> dict | None: """Get functional assessment scores for a person. @@ -230,7 +230,7 @@ def get_functional_assessment(self, identifier_type: str, identifier_value: str) str(e), exc_info=True, ) - raise UserError(f"Failed to get functional assessment: {str(e)}") from e + raise UserError(_("Failed to get functional assessment: %s") % str(e)) from e def is_pwd(self, partner) -> bool: """Quick check if person is registered as PWD (Person with Disability). @@ -354,7 +354,7 @@ def sync_disability_data(self, partner) -> bool: str(e), exc_info=True, ) - raise UserError(f"Failed to sync disability data: {str(e)}") from e + raise UserError(_("Failed to sync disability data: %s") % str(e)) from e def _get_partner_identifier(self, partner): """Get suitable identifier for querying DR. diff --git a/spp_hazard_programs/README.rst b/spp_hazard_programs/README.rst new file mode 100644 index 00000000..b098c5c0 --- /dev/null +++ b/spp_hazard_programs/README.rst @@ -0,0 +1,138 @@ +=================================== +OpenSPP Hazard Programs Integration +=================================== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:e4ca3f5b5e8c998b833ccd6526f575c07f53a38ac49875511c31201f13122916 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2FOpenSPP2-lightgray.png?logo=github + :target: https://github.com/OpenSPP/OpenSPP2/tree/19.0/spp_hazard_programs + :alt: OpenSPP/OpenSPP2 + +|badge1| |badge2| |badge3| + +Links hazard incidents to emergency response programs. Enables programs +to target affected populations using verified impact data, filter +registrants by damage severity, and automatically enable emergency mode +when responding to active incidents. + +Key Capabilities +~~~~~~~~~~~~~~~~ + +- Link programs to one or more hazard incidents via many-to-many + relation +- Automatically flag programs as emergency when linked to incidents in + alert/active/recovery status +- Filter eligible registrants by damage level threshold (any, moderate+, + severe+, critical only) +- Count affected registrants based on verified impacts matching damage + criteria +- Track which programs are responding to each incident (bidirectional + navigation) + +Key Models +~~~~~~~~~~ + ++----------------------------------+-----------------------------------+ +| Model | Description | ++==================================+===================================+ +| ``spp.program`` (extend) | Adds target incidents, emergency | +| | mode, damage filter | ++----------------------------------+-----------------------------------+ +| ``spp.hazard.incident`` (extend) | Adds reverse relation to response | +| | programs | ++----------------------------------+-----------------------------------+ + +UI Location +~~~~~~~~~~~ + +- **Programs**: Programs > Programs > "Emergency Response" tab +- **Incidents**: Hazard & Emergency > Incidents > All Incidents > + "Response Programs" tab +- **Stat buttons**: Programs show incident count and affected registrant + count; incidents show response program count +- **Filters**: "Emergency Programs" and "Has Target Incidents" filters + in program search view + +Security +~~~~~~~~ + +No new ACL entries. Access inherited from base models: + +- ``spp.program``: Controlled by ``spp_programs`` security groups +- ``spp.hazard.incident``: Controlled by ``spp_hazard`` security groups + +Extension Points +~~~~~~~~~~~~~~~~ + +- Override ``get_emergency_eligible_registrants()`` to customize + eligibility logic beyond damage levels +- Override ``_get_damage_level_domain()`` to add custom damage filtering + rules +- Inherit ``spp.program`` to add fields used in emergency calculations +- Use ``is_emergency_program`` and ``is_emergency_mode`` flags in + downstream program logic + +Dependencies +~~~~~~~~~~~~ + +``spp_hazard``, ``spp_programs`` + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* OpenSPP.org + +Maintainers +----------- + +.. |maintainer-jeremi| image:: https://github.com/jeremi.png?size=40px + :target: https://github.com/jeremi + :alt: jeremi +.. |maintainer-gonzalesedwin1123| image:: https://github.com/gonzalesedwin1123.png?size=40px + :target: https://github.com/gonzalesedwin1123 + :alt: gonzalesedwin1123 +.. |maintainer-reichie020212| image:: https://github.com/reichie020212.png?size=40px + :target: https://github.com/reichie020212 + :alt: reichie020212 + +Current maintainers: + +|maintainer-jeremi| |maintainer-gonzalesedwin1123| |maintainer-reichie020212| + +This module is part of the `OpenSPP/OpenSPP2 `_ project on GitHub. + +You are welcome to contribute. \ No newline at end of file diff --git a/spp_hazard_programs/__init__.py b/spp_hazard_programs/__init__.py new file mode 100644 index 00000000..c4ccea79 --- /dev/null +++ b/spp_hazard_programs/__init__.py @@ -0,0 +1,3 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import models diff --git a/spp_hazard_programs/__manifest__.py b/spp_hazard_programs/__manifest__.py new file mode 100644 index 00000000..d81d7b17 --- /dev/null +++ b/spp_hazard_programs/__manifest__.py @@ -0,0 +1,31 @@ +# pylint: disable=pointless-statement +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + + +{ + "name": "OpenSPP Hazard Programs Integration", + "summary": "Links hazard impacts to program eligibility and entitlements. " + "Enables emergency programs to use hazard data for targeting and benefit calculation.", + "category": "OpenSPP/Targeting", + "version": "19.0.1.0.0", + "sequence": 1, + "author": "OpenSPP.org", + "website": "https://github.com/OpenSPP/OpenSPP2", + "license": "LGPL-3", + "development_status": "Alpha", + "maintainers": ["jeremi", "gonzalesedwin1123", "reichie020212"], + "depends": [ + "spp_hazard", + "spp_programs", + ], + "data": [ + "security/ir.model.access.csv", + "views/program_views.xml", + "views/incident_views.xml", + ], + "demo": [], + "assets": {}, + "application": False, + "installable": True, + "auto_install": True, +} diff --git a/spp_hazard_programs/models/__init__.py b/spp_hazard_programs/models/__init__.py new file mode 100644 index 00000000..919018c0 --- /dev/null +++ b/spp_hazard_programs/models/__init__.py @@ -0,0 +1,4 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import program +from . import incident diff --git a/spp_hazard_programs/models/incident.py b/spp_hazard_programs/models/incident.py new file mode 100644 index 00000000..b617a14e --- /dev/null +++ b/spp_hazard_programs/models/incident.py @@ -0,0 +1,48 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging + +from odoo import _, fields, models + +_logger = logging.getLogger(__name__) + + +class HazardIncident(models.Model): + """ + Extends spp.hazard.incident to add program integration. + + This extension links incidents to programs, enabling tracking of + which programs are responding to which incidents. + """ + + _inherit = "spp.hazard.incident" + + program_ids = fields.Many2many( + "spp.program", + "spp_program_hazard_incident_rel", + "incident_id", + "program_id", + string="Response Programs", + help="Programs responding to this incident", + ) + program_count = fields.Integer( + compute="_compute_program_count", + string="Number of Programs", + ) + + def _compute_program_count(self): + """Compute the number of programs responding to this incident.""" + for rec in self: + rec.program_count = len(rec.program_ids) + + def action_view_programs(self): + """Open a list view of programs responding to this incident.""" + self.ensure_one() + return { + "name": _("Response Programs - %s", self.name), + "type": "ir.actions.act_window", + "res_model": "spp.program", + "view_mode": "list,form", + "domain": [("id", "in", self.program_ids.ids)], + "context": {}, + } diff --git a/spp_hazard_programs/models/program.py b/spp_hazard_programs/models/program.py new file mode 100644 index 00000000..4d48edc0 --- /dev/null +++ b/spp_hazard_programs/models/program.py @@ -0,0 +1,160 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging + +from odoo import _, api, fields, models + +_logger = logging.getLogger(__name__) + + +class SppProgram(models.Model): + """ + Extends spp.program to add hazard/emergency response capabilities. + + This extension links programs to hazard incidents, enabling: + - Emergency program eligibility based on hazard impacts + - Targeting of affected populations + - Emergency mode with relaxed compliance rules + """ + + _inherit = "spp.program" + + target_incident_ids = fields.Many2many( + "spp.hazard.incident", + "spp_program_hazard_incident_rel", + "program_id", + "incident_id", + string="Target Incidents", + help="Incidents this program responds to. Registrants with verified " + "impacts from these incidents may be eligible for this program.", + ) + is_emergency_program = fields.Boolean( + compute="_compute_is_emergency_program", + store=True, + help="Whether this program is responding to any active emergency incidents", + ) + qualifying_damage_levels = fields.Selection( + [ + ("any", "Any Damage Level"), + ("moderate_up", "Moderate and Above"), + ("severe_up", "Severe and Above"), + ("critical_only", "Critical/Totally Damaged Only"), + ], + default="any", + help="Minimum damage level required for emergency eligibility", + ) + is_emergency_mode = fields.Boolean( + string="Emergency Mode", + default=False, + help="When enabled, relaxed compliance rules apply for this program. " + "Automatically enabled when linked to active incidents.", + ) + target_incident_count = fields.Integer( + compute="_compute_target_incident_count", + string="Number of Target Incidents", + store=True, + ) + affected_registrant_count = fields.Integer( + compute="_compute_affected_registrant_count", + string="Potentially Affected Registrants", + ) + + @api.depends("target_incident_ids") + def _compute_target_incident_count(self): + """Compute the number of target incidents.""" + for rec in self: + rec.target_incident_count = len(rec.target_incident_ids) + + @api.depends("target_incident_ids.status") + def _compute_is_emergency_program(self): + """Compute whether this is an emergency program based on linked incidents.""" + for rec in self: + rec.is_emergency_program = bool( + rec.target_incident_ids.filtered(lambda i: i.status in ("alert", "active", "recovery")) + ) + + @api.depends("target_incident_ids", "qualifying_damage_levels") + def _compute_affected_registrant_count(self): + """Compute the number of potentially affected registrants.""" + for rec in self: + if not rec.target_incident_ids: + rec.affected_registrant_count = 0 + continue + + # Build domain for qualifying damage levels + damage_domain = rec._get_damage_level_domain() + + # Count unique registrants with qualifying impacts + impacts = self.env["spp.hazard.impact"].search( + [ + ("incident_id", "in", rec.target_incident_ids.ids), + ("verification_status", "=", "verified"), + ] + + damage_domain + ) + rec.affected_registrant_count = len(impacts.mapped("registrant_id")) + + def _get_damage_level_domain(self): + """Get the domain filter for qualifying damage levels.""" + self.ensure_one() + if self.qualifying_damage_levels == "any": + return [] + elif self.qualifying_damage_levels == "moderate_up": + return [("damage_level", "in", ("moderate", "severe", "critical", "partially_damaged", "totally_damaged"))] + elif self.qualifying_damage_levels == "severe_up": + return [("damage_level", "in", ("severe", "critical", "totally_damaged"))] + elif self.qualifying_damage_levels == "critical_only": + return [("damage_level", "in", ("critical", "totally_damaged"))] + return [] + + def get_emergency_eligible_registrants(self): + """ + Get registrants eligible for this emergency program based on hazard impacts. + + Returns registrants who: + - Have verified impact from one of the target incidents + - Meet the qualifying damage level threshold + + :return: recordset of res.partner + """ + self.ensure_one() + if not self.target_incident_ids: + return self.env["res.partner"].browse() + + damage_domain = self._get_damage_level_domain() + + # Find qualifying impacts + impacts = self.env["spp.hazard.impact"].search( + [ + ("incident_id", "in", self.target_incident_ids.ids), + ("verification_status", "=", "verified"), + ] + + damage_domain + ) + + return impacts.mapped("registrant_id") + + def action_view_target_incidents(self): + """Open a list view of target incidents.""" + self.ensure_one() + return { + "name": _("Target Incidents - %s", self.name), + "type": "ir.actions.act_window", + "res_model": "spp.hazard.incident", + "view_mode": "list,form", + "domain": [("id", "in", self.target_incident_ids.ids)], + "context": {}, + } + + def action_view_affected_registrants(self): + """Open a list view of potentially affected registrants.""" + self.ensure_one() + registrants = self.get_emergency_eligible_registrants() + return { + "name": _("Affected Registrants - %s", self.name), + "type": "ir.actions.act_window", + "res_model": "res.partner", + "view_mode": "list,form", + "domain": [("id", "in", registrants.ids)], + "context": {}, + } diff --git a/spp_hazard_programs/pyproject.toml b/spp_hazard_programs/pyproject.toml new file mode 100644 index 00000000..4231d0cc --- /dev/null +++ b/spp_hazard_programs/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/spp_hazard_programs/readme/DESCRIPTION.md b/spp_hazard_programs/readme/DESCRIPTION.md new file mode 100644 index 00000000..60c4c967 --- /dev/null +++ b/spp_hazard_programs/readme/DESCRIPTION.md @@ -0,0 +1,41 @@ +Links hazard incidents to emergency response programs. Enables programs to target affected populations using verified impact data, filter registrants by damage severity, and automatically enable emergency mode when responding to active incidents. + +### Key Capabilities + +- Link programs to one or more hazard incidents via many-to-many relation +- Automatically flag programs as emergency when linked to incidents in alert/active/recovery status +- Filter eligible registrants by damage level threshold (any, moderate+, severe+, critical only) +- Count affected registrants based on verified impacts matching damage criteria +- Track which programs are responding to each incident (bidirectional navigation) + +### Key Models + +| Model | Description | +| --------------------------- | ---------------------------------------------------- | +| `spp.program` (extend) | Adds target incidents, emergency mode, damage filter | +| `spp.hazard.incident` (extend) | Adds reverse relation to response programs | + +### UI Location + +- **Programs**: Programs > Programs > "Emergency Response" tab +- **Incidents**: Hazard & Emergency > Incidents > All Incidents > "Response Programs" tab +- **Stat buttons**: Programs show incident count and affected registrant count; incidents show response program count +- **Filters**: "Emergency Programs" and "Has Target Incidents" filters in program search view + +### Security + +No new ACL entries. Access inherited from base models: + +- `spp.program`: Controlled by `spp_programs` security groups +- `spp.hazard.incident`: Controlled by `spp_hazard` security groups + +### Extension Points + +- Override `get_emergency_eligible_registrants()` to customize eligibility logic beyond damage levels +- Override `_get_damage_level_domain()` to add custom damage filtering rules +- Inherit `spp.program` to add fields used in emergency calculations +- Use `is_emergency_program` and `is_emergency_mode` flags in downstream program logic + +### Dependencies + +`spp_hazard`, `spp_programs` diff --git a/spp_hazard_programs/security/ir.model.access.csv b/spp_hazard_programs/security/ir.model.access.csv new file mode 100644 index 00000000..97dd8b91 --- /dev/null +++ b/spp_hazard_programs/security/ir.model.access.csv @@ -0,0 +1 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink diff --git a/spp_hazard_programs/static/description/icon.png b/spp_hazard_programs/static/description/icon.png new file mode 100644 index 00000000..c7dbdaaf Binary files /dev/null and b/spp_hazard_programs/static/description/icon.png differ diff --git a/spp_hazard_programs/static/description/index.html b/spp_hazard_programs/static/description/index.html new file mode 100644 index 00000000..d86761dd --- /dev/null +++ b/spp_hazard_programs/static/description/index.html @@ -0,0 +1,494 @@ + + + + + +OpenSPP Hazard Programs Integration + + + +
+

OpenSPP Hazard Programs Integration

+ + +

Alpha License: LGPL-3 OpenSPP/OpenSPP2

+

Links hazard incidents to emergency response programs. Enables programs +to target affected populations using verified impact data, filter +registrants by damage severity, and automatically enable emergency mode +when responding to active incidents.

+
+

Key Capabilities

+
    +
  • Link programs to one or more hazard incidents via many-to-many +relation
  • +
  • Automatically flag programs as emergency when linked to incidents in +alert/active/recovery status
  • +
  • Filter eligible registrants by damage level threshold (any, moderate+, +severe+, critical only)
  • +
  • Count affected registrants based on verified impacts matching damage +criteria
  • +
  • Track which programs are responding to each incident (bidirectional +navigation)
  • +
+
+
+

Key Models

+ ++++ + + + + + + + + + + + + + +
ModelDescription
spp.program (extend)Adds target incidents, emergency +mode, damage filter
spp.hazard.incident (extend)Adds reverse relation to response +programs
+
+
+

UI Location

+
    +
  • Programs: Programs > Programs > “Emergency Response” tab
  • +
  • Incidents: Hazard & Emergency > Incidents > All Incidents > +“Response Programs” tab
  • +
  • Stat buttons: Programs show incident count and affected registrant +count; incidents show response program count
  • +
  • Filters: “Emergency Programs” and “Has Target Incidents” filters +in program search view
  • +
+
+
+

Security

+

No new ACL entries. Access inherited from base models:

+
    +
  • spp.program: Controlled by spp_programs security groups
  • +
  • spp.hazard.incident: Controlled by spp_hazard security groups
  • +
+
+
+

Extension Points

+
    +
  • Override get_emergency_eligible_registrants() to customize +eligibility logic beyond damage levels
  • +
  • Override _get_damage_level_domain() to add custom damage filtering +rules
  • +
  • Inherit spp.program to add fields used in emergency calculations
  • +
  • Use is_emergency_program and is_emergency_mode flags in +downstream program logic
  • +
+
+
+

Dependencies

+

spp_hazard, spp_programs

+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production.

+
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • OpenSPP.org
  • +
+
+
+

Maintainers

+

Current maintainers:

+

jeremi gonzalesedwin1123 reichie020212

+

This module is part of the OpenSPP/OpenSPP2 project on GitHub.

+

You are welcome to contribute.

+
+
+
+
+ + diff --git a/spp_hazard_programs/tests/__init__.py b/spp_hazard_programs/tests/__init__.py new file mode 100644 index 00000000..66fbca0e --- /dev/null +++ b/spp_hazard_programs/tests/__init__.py @@ -0,0 +1,3 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import test_hazard_programs diff --git a/spp_hazard_programs/tests/common.py b/spp_hazard_programs/tests/common.py new file mode 100644 index 00000000..9409c0ec --- /dev/null +++ b/spp_hazard_programs/tests/common.py @@ -0,0 +1,129 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.tests.common import TransactionCase + + +class HazardProgramsTestCase(TransactionCase): + """Base test case for hazard programs integration tests.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.env = cls.env( + context=dict( + cls.env.context, + queue_job__no_delay=True, + tracking_disable=True, + ) + ) + + # Create test area + cls.area = cls.env["spp.area"].create( + { + "draft_name": "Test Area", + "code": "HP-TEST-AREA-001", + } + ) + + # Create hazard category + cls.category = cls.env["spp.hazard.category"].create( + { + "name": "Typhoon", + "code": "HP-TYPHOON-TEST", + } + ) + + # Create impact type + cls.impact_type = cls.env["spp.hazard.impact.type"].create( + { + "name": "Property Damage", + "code": "HP-PROPERTY-TEST", + "category": "physical", + } + ) + + # Create test registrants + cls.registrant_1 = cls.env["res.partner"].create( + { + "name": "Test Registrant 1", + "is_registrant": True, + "is_group": False, + "area_id": cls.area.id, + } + ) + cls.registrant_2 = cls.env["res.partner"].create( + { + "name": "Test Registrant 2", + "is_registrant": True, + "is_group": False, + "area_id": cls.area.id, + } + ) + cls.registrant_3 = cls.env["res.partner"].create( + { + "name": "Test Registrant 3", + "is_registrant": True, + "is_group": False, + "area_id": cls.area.id, + } + ) + + # Create test incidents + cls.incident_active = cls.env["spp.hazard.incident"].create( + { + "name": "Active Typhoon", + "code": "HP-INC-ACTIVE", + "category_id": cls.category.id, + "start_date": "2024-01-01", + "status": "active", + } + ) + cls.incident_closed = cls.env["spp.hazard.incident"].create( + { + "name": "Closed Typhoon", + "code": "HP-INC-CLOSED", + "category_id": cls.category.id, + "start_date": "2024-01-01", + "end_date": "2024-02-01", + "status": "closed", + } + ) + + # Create test program + cls.program = cls.env["spp.program"].create( + { + "name": "Emergency Relief Program", + } + ) + + # Create impacts with varying damage levels and verification statuses + cls.impact_verified_critical = cls.env["spp.hazard.impact"].create( + { + "incident_id": cls.incident_active.id, + "registrant_id": cls.registrant_1.id, + "impact_type_id": cls.impact_type.id, + "impact_date": "2024-01-02", + "damage_level": "critical", + "verification_status": "verified", + } + ) + cls.impact_verified_moderate = cls.env["spp.hazard.impact"].create( + { + "incident_id": cls.incident_active.id, + "registrant_id": cls.registrant_2.id, + "impact_type_id": cls.impact_type.id, + "impact_date": "2024-01-02", + "damage_level": "moderate", + "verification_status": "verified", + } + ) + cls.impact_unverified = cls.env["spp.hazard.impact"].create( + { + "incident_id": cls.incident_active.id, + "registrant_id": cls.registrant_3.id, + "impact_type_id": cls.impact_type.id, + "impact_date": "2024-01-02", + "damage_level": "severe", + "verification_status": "reported", + } + ) diff --git a/spp_hazard_programs/tests/test_hazard_programs.py b/spp_hazard_programs/tests/test_hazard_programs.py new file mode 100644 index 00000000..42ccefcb --- /dev/null +++ b/spp_hazard_programs/tests/test_hazard_programs.py @@ -0,0 +1,220 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo import Command +from odoo.tests import tagged + +from .common import HazardProgramsTestCase + + +@tagged("post_install", "-at_install") +class TestProgramHazardIntegration(HazardProgramsTestCase): + """Test program extensions for hazard integration.""" + + def test_target_incident_count(self): + """Test that target_incident_count computes correctly.""" + self.assertEqual(self.program.target_incident_count, 0) + + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.assertEqual(self.program.target_incident_count, 1) + + self.program.write({"target_incident_ids": [Command.link(self.incident_closed.id)]}) + self.assertEqual(self.program.target_incident_count, 2) + + def test_is_emergency_program_with_active_incident(self): + """Test is_emergency_program is True when linked to active incidents.""" + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.assertTrue(self.program.is_emergency_program) + + def test_is_emergency_program_with_closed_incident(self): + """Test is_emergency_program is False when only linked to closed incidents.""" + self.program.write({"target_incident_ids": [Command.link(self.incident_closed.id)]}) + self.assertFalse(self.program.is_emergency_program) + + def test_is_emergency_program_no_incidents(self): + """Test is_emergency_program is False with no linked incidents.""" + self.assertFalse(self.program.is_emergency_program) + + def test_is_emergency_program_alert_status(self): + """Test is_emergency_program is True for alert status.""" + self.incident_active.status = "alert" + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.assertTrue(self.program.is_emergency_program) + + def test_is_emergency_program_recovery_status(self): + """Test is_emergency_program is True for recovery status.""" + self.incident_active.status = "recovery" + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.assertTrue(self.program.is_emergency_program) + + def test_affected_registrant_count_any_damage(self): + """Test affected count with 'any' damage level (only verified impacts).""" + self.program.write( + { + "target_incident_ids": [Command.link(self.incident_active.id)], + "qualifying_damage_levels": "any", + } + ) + # 2 verified impacts (registrant_1=critical, registrant_2=moderate) + # registrant_3 is unverified so excluded + self.assertEqual(self.program.affected_registrant_count, 2) + + def test_affected_registrant_count_critical_only(self): + """Test affected count with 'critical_only' damage level.""" + self.program.write( + { + "target_incident_ids": [Command.link(self.incident_active.id)], + "qualifying_damage_levels": "critical_only", + } + ) + # Only registrant_1 has critical damage and is verified + self.assertEqual(self.program.affected_registrant_count, 1) + + def test_affected_registrant_count_severe_up(self): + """Test affected count with 'severe_up' damage level.""" + self.program.write( + { + "target_incident_ids": [Command.link(self.incident_active.id)], + "qualifying_damage_levels": "severe_up", + } + ) + # Only registrant_1 (critical, verified) qualifies + # registrant_2 is moderate (below threshold), registrant_3 is unverified + self.assertEqual(self.program.affected_registrant_count, 1) + + def test_affected_registrant_count_moderate_up(self): + """Test affected count with 'moderate_up' damage level.""" + self.program.write( + { + "target_incident_ids": [Command.link(self.incident_active.id)], + "qualifying_damage_levels": "moderate_up", + } + ) + # registrant_1 (critical, verified) and registrant_2 (moderate, verified) + self.assertEqual(self.program.affected_registrant_count, 2) + + def test_affected_registrant_count_no_incidents(self): + """Test affected count is 0 with no linked incidents.""" + self.assertEqual(self.program.affected_registrant_count, 0) + + def test_get_damage_level_domain_any(self): + """Test damage level domain for 'any'.""" + self.program.qualifying_damage_levels = "any" + domain = self.program._get_damage_level_domain() + self.assertEqual(domain, []) + + def test_get_damage_level_domain_moderate_up(self): + """Test damage level domain for 'moderate_up'.""" + self.program.qualifying_damage_levels = "moderate_up" + domain = self.program._get_damage_level_domain() + self.assertEqual(len(domain), 1) + self.assertIn("moderate", domain[0][2]) + self.assertIn("severe", domain[0][2]) + self.assertIn("critical", domain[0][2]) + + def test_get_damage_level_domain_severe_up(self): + """Test damage level domain for 'severe_up'.""" + self.program.qualifying_damage_levels = "severe_up" + domain = self.program._get_damage_level_domain() + self.assertEqual(len(domain), 1) + self.assertNotIn("moderate", domain[0][2]) + self.assertIn("severe", domain[0][2]) + self.assertIn("critical", domain[0][2]) + + def test_get_damage_level_domain_critical_only(self): + """Test damage level domain for 'critical_only'.""" + self.program.qualifying_damage_levels = "critical_only" + domain = self.program._get_damage_level_domain() + self.assertEqual(len(domain), 1) + self.assertIn("critical", domain[0][2]) + self.assertNotIn("moderate", domain[0][2]) + self.assertNotIn("severe", domain[0][2]) + + def test_get_emergency_eligible_registrants(self): + """Test getting eligible registrants for emergency program.""" + self.program.write( + { + "target_incident_ids": [Command.link(self.incident_active.id)], + "qualifying_damage_levels": "any", + } + ) + registrants = self.program.get_emergency_eligible_registrants() + self.assertEqual(len(registrants), 2) + self.assertIn(self.registrant_1, registrants) + self.assertIn(self.registrant_2, registrants) + # Unverified registrant should not be included + self.assertNotIn(self.registrant_3, registrants) + + def test_get_emergency_eligible_registrants_no_incidents(self): + """Test eligible registrants returns empty when no incidents linked.""" + registrants = self.program.get_emergency_eligible_registrants() + self.assertEqual(len(registrants), 0) + + def test_get_emergency_eligible_registrants_with_filter(self): + """Test eligible registrants respects damage level filter.""" + self.program.write( + { + "target_incident_ids": [Command.link(self.incident_active.id)], + "qualifying_damage_levels": "critical_only", + } + ) + registrants = self.program.get_emergency_eligible_registrants() + self.assertEqual(len(registrants), 1) + self.assertIn(self.registrant_1, registrants) + + def test_action_view_target_incidents(self): + """Test action returns correct window action for target incidents.""" + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + action = self.program.action_view_target_incidents() + self.assertEqual(action["type"], "ir.actions.act_window") + self.assertEqual(action["res_model"], "spp.hazard.incident") + self.assertEqual(action["domain"], [("id", "in", self.program.target_incident_ids.ids)]) + + def test_action_view_affected_registrants(self): + """Test action returns correct window action for affected registrants.""" + self.program.write( + { + "target_incident_ids": [Command.link(self.incident_active.id)], + "qualifying_damage_levels": "any", + } + ) + action = self.program.action_view_affected_registrants() + self.assertEqual(action["type"], "ir.actions.act_window") + self.assertEqual(action["res_model"], "res.partner") + # Should contain the 2 verified registrants + self.assertEqual(len(action["domain"][0][2]), 2) + + def test_bidirectional_many2many(self): + """Test that the many2many relationship is bidirectional.""" + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.assertIn(self.program, self.incident_active.program_ids) + self.assertIn(self.incident_active, self.program.target_incident_ids) + + +@tagged("post_install", "-at_install") +class TestIncidentProgramIntegration(HazardProgramsTestCase): + """Test incident extensions for program integration.""" + + def test_program_count(self): + """Test that program_count computes correctly.""" + self.assertEqual(self.incident_active.program_count, 0) + + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.incident_active.invalidate_recordset(["program_ids", "program_count"]) + self.assertEqual(self.incident_active.program_count, 1) + + def test_program_count_multiple_programs(self): + """Test program_count with multiple programs linked.""" + program_2 = self.env["spp.program"].create({"name": "Second Emergency Program"}) + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + program_2.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.incident_active.invalidate_recordset(["program_ids", "program_count"]) + self.assertEqual(self.incident_active.program_count, 2) + + def test_action_view_programs(self): + """Test action returns correct window action for programs.""" + self.program.write({"target_incident_ids": [Command.link(self.incident_active.id)]}) + self.incident_active.invalidate_recordset(["program_ids", "program_count"]) + action = self.incident_active.action_view_programs() + self.assertEqual(action["type"], "ir.actions.act_window") + self.assertEqual(action["res_model"], "spp.program") + self.assertEqual(action["domain"], [("id", "in", self.incident_active.program_ids.ids)]) diff --git a/spp_hazard_programs/views/incident_views.xml b/spp_hazard_programs/views/incident_views.xml new file mode 100644 index 00000000..85cadf76 --- /dev/null +++ b/spp_hazard_programs/views/incident_views.xml @@ -0,0 +1,54 @@ + + + + + view.hazard.incident.form.programs + spp.hazard.incident + + 50 + + + + + + + + + + + + + + + + + + + + + + + + view.hazard.incident.list.programs + spp.hazard.incident + + 50 + + + + + + + diff --git a/spp_hazard_programs/views/program_views.xml b/spp_hazard_programs/views/program_views.xml new file mode 100644 index 00000000..56ece548 --- /dev/null +++ b/spp_hazard_programs/views/program_views.xml @@ -0,0 +1,114 @@ + + + + + view.program.form.hazard + spp.program + + 50 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + view.program.list.hazard + spp.program + + 50 + + + + + + + + + + + view.program.search.hazard + spp.program + + 50 + + + + + + + + + diff --git a/spp_import_match/README.rst b/spp_import_match/README.rst new file mode 100644 index 00000000..c35f1ede --- /dev/null +++ b/spp_import_match/README.rst @@ -0,0 +1,152 @@ +==================== +OpenSPP Import Match +==================== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:b57dea1315fd1f9af8f15720c5332b182c0a01fddd827a3daa73a8e950d41faa + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2FOpenSPP2-lightgray.png?logo=github + :target: https://github.com/OpenSPP/OpenSPP2/tree/19.0/spp_import_match + :alt: OpenSPP/OpenSPP2 + +|badge1| |badge2| |badge3| + +Extends Odoo's base import functionality to match incoming records +against existing data during bulk imports. Prevents duplicate creation +by comparing imported rows to database records using configurable field +combinations. Supports overwriting matched records and asynchronous +processing for large datasets. + +Key Capabilities +~~~~~~~~~~~~~~~~ + +- Define matching rules per model using field combinations to identify + existing records +- Match on sub-fields within related records (e.g., household ID within + individual) +- Apply conditional matching rules only when specific imported values + are present +- Skip duplicate creation or update existing records when matches are + found +- Process imports with more than 100 records asynchronously using + ``queue_job`` +- Clear one2many/many2many associations before update to prevent + duplicate entries + +Key Models +~~~~~~~~~~ + ++-----------------------------+----------------------------------------+ +| Model | Description | ++=============================+========================================+ +| ``spp.import.match`` | Matching rule configuration for a | +| | specific model | ++-----------------------------+----------------------------------------+ +| ``spp.import.match.fields`` | Individual fields used in a rule, | +| | supports sub-fields | ++-----------------------------+----------------------------------------+ + +Configuration +~~~~~~~~~~~~~ + +After installing: + +1. Navigate to **Registry > Configuration > Import Match** +2. Create a new matching rule and select the target model (e.g., + ``res.partner``) +3. Add one or more fields to match on (e.g., national ID, or first name + + date of birth) +4. Enable **Overwrite Match** to update existing records when matches + are found +5. For conditional matching, enable **Is Conditional** on a field and + specify the expected imported value + +UI Location +~~~~~~~~~~~ + +- **Menu**: Registry > Configuration > Import Match +- **Import Dialog**: Matching applies automatically during CSV import + via the standard Odoo import interface +- **Queue Jobs**: Registry > Queue Jobs > Jobs (to monitor asynchronous + imports) + +Security +~~~~~~~~ + +================================ ========= +Group Access +================================ ========= +``spp_security.group_spp_admin`` Full CRUD +================================ ========= + +Extension Points +~~~~~~~~~~~~~~~~ + +- Override ``spp.import.match._match_find()`` to customize matching + logic for specific use cases +- Override ``spp.import.match._usable_rules()`` to filter which rules + apply based on context +- Inherits ``base.load()`` to inject matching logic into all model + imports + +Dependencies +~~~~~~~~~~~~ + +``base``, ``spp_base_common``, ``base_import``, ``queue_job``, +``spp_security`` + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* OpenSPP.org + +Maintainers +----------- + +.. |maintainer-jeremi| image:: https://github.com/jeremi.png?size=40px + :target: https://github.com/jeremi + :alt: jeremi +.. |maintainer-gonzalesedwin1123| image:: https://github.com/gonzalesedwin1123.png?size=40px + :target: https://github.com/gonzalesedwin1123 + :alt: gonzalesedwin1123 + +Current maintainers: + +|maintainer-jeremi| |maintainer-gonzalesedwin1123| + +This module is part of the `OpenSPP/OpenSPP2 `_ project on GitHub. + +You are welcome to contribute. \ No newline at end of file diff --git a/spp_import_match/__init__.py b/spp_import_match/__init__.py new file mode 100644 index 00000000..c4ccea79 --- /dev/null +++ b/spp_import_match/__init__.py @@ -0,0 +1,3 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import models diff --git a/spp_import_match/__manifest__.py b/spp_import_match/__manifest__.py new file mode 100644 index 00000000..aca536e8 --- /dev/null +++ b/spp_import_match/__manifest__.py @@ -0,0 +1,34 @@ +# pylint: disable=pointless-statement +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +{ + "name": "OpenSPP Import Match", + "summary": "OpenSPP Import Match enhances data import processes by intelligently matching incoming records against existing data, preventing duplication and ensuring registry integrity. It provides configurable matching logic and supports seamless updates to existing records during bulk data onboarding.", + "category": "OpenSPP/Integration", + "version": "19.0.1.3.1", + "sequence": 1, + "author": "OpenSPP.org", + "website": "https://github.com/OpenSPP/OpenSPP2", + "license": "LGPL-3", + "development_status": "Alpha", + "maintainers": ["jeremi", "gonzalesedwin1123"], + "depends": ["base", "spp_base_common", "base_import", "queue_job", "spp_security"], + "data": [ + "data/queue_job_data.xml", + "security/ir.model.access.csv", + "views/import_match_view.xml", + ], + "assets": { + "web.assets_backend": [ + "spp_import_match/static/src/legacy/js/custom_base_import.js", + ], + "web.assets_qweb": [ + "spp_import_match/static/src/legacy/xml/custom_base_import.xml", + ], + }, + "demo": [], + "images": [], + "application": True, + "installable": True, + "auto_install": False, +} diff --git a/spp_import_match/data/queue_job_data.xml b/spp_import_match/data/queue_job_data.xml new file mode 100644 index 00000000..22cc8dba --- /dev/null +++ b/spp_import_match/data/queue_job_data.xml @@ -0,0 +1,21 @@ + + + + _split_file + + + + + _import_one_chunk + + + diff --git a/spp_import_match/i18n/lo.po b/spp_import_match/i18n/lo.po new file mode 100644 index 00000000..7c2febcd --- /dev/null +++ b/spp_import_match/i18n/lo.po @@ -0,0 +1,282 @@ +# Translation of Odoo Server. +# This file contains the translation of the following modules: +# * spp_import_match +# +msgid "" +msgstr "" +"Project-Id-Version: Odoo Server 17.0\n" +"Report-Msgid-Bugs-To: \n" +"Last-Translator: Automatically generated\n" +"Language-Team: none\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: \n" +"Language: lo\n" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/queue_job.py:0 +#, python-format +msgid "Attachment" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_base +msgid "Base" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_base_import_import +msgid "Base Import" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.actions.act_window,help:spp_import_match.action_spp_import_match +msgid "" +"Click the create button to enter the information of the Import Matching." +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__conditional +msgid "Conditional" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#, python-format +msgid "Config:" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/custom_base_import.xml:0 +#, python-format +msgid "Configuration:" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.actions.act_window,help:spp_import_match.action_spp_import_match +msgid "Create a new Import Matching!" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__create_uid +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__create_uid +msgid "Created by" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__create_date +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__create_date +msgid "Created on" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__display_name +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__display_name +msgid "Display Name" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__field_id +msgid "Field" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/import_match.py:0 +#, python-format +msgid "Field '%s', already exists!" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/tests/test_res_partner_import_match.py:0 +#, python-format +msgid "Field 'Name', already exists!" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__field_ids +msgid "Fields" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_spp_import_match_fields +msgid "Fields for Import Matching" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match__field_ids +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__field_id +msgid "Fields to Match in Importing" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__relation +msgid "For relationship fields, the technical name of the target model" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_filter +msgid "Group By" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__id +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__id +msgid "ID" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/custom_base_import.xml:0 +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#: model:ir.ui.menu,name:spp_import_match.menu_spp_import_match +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_filter +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_form +#, python-format +msgid "Import Match" +msgstr "" + +#. module: spp_import_match +#: model:ir.actions.act_window,name:spp_import_match.action_spp_import_match +#: model:ir.model,name:spp_import_match.model_spp_import_match +msgid "Import Matching" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#, python-format +msgid "Import in the background" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/base_import.py:0 +#, python-format +msgid "Import {model_name} from file {file_name}" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/base_import.py:0 +#, python-format +msgid "" +"Import {model_name} from file {file_name} - #{chunk} - lines {row_from} to " +"{row_to}" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__imported_value +msgid "Imported Value" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__write_uid +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__write_uid +msgid "Last Updated by" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__write_date +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__write_date +msgid "Last Updated on" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__match_id +msgid "Match" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_form +msgid "Match Details" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__model_id +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__model_name +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__model_id +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_filter +msgid "Model" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__model_description +msgid "Model Description" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match__model_id +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__model_id +msgid "Model for Import Matching" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/import_match.py:0 +#, python-format +msgid "Multiple matches found for '%s'!" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__name +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__name +msgid "Name" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__overwrite_match +msgid "Overwrite Match" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_queue_job +msgid "Queue Job" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__relation +msgid "Related Model" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__sequence +msgid "Sequence" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__sub_field_id +msgid "Sub Fields to Match in Importing" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__sub_field_id +msgid "Sub-Field" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/js/custom_base_import.js:0 +#, python-format +msgid "Successfully added on Queue" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__imported_value +msgid "This will be used as a condition to disregard this field if matched" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#, python-format +msgid "" +"When checked, the import will be executed as a background job, after " +"splitting your file in small chunks that will be processed independently. " +"Use this to import very large files." +msgstr "" diff --git a/spp_import_match/i18n/spp_import_match.pot b/spp_import_match/i18n/spp_import_match.pot new file mode 100644 index 00000000..9f1c2967 --- /dev/null +++ b/spp_import_match/i18n/spp_import_match.pot @@ -0,0 +1,281 @@ +# Translation of Odoo Server. +# This file contains the translation of the following modules: +# * spp_import_match +# +msgid "" +msgstr "" +"Project-Id-Version: Odoo Server 17.0\n" +"Report-Msgid-Bugs-To: \n" +"Last-Translator: \n" +"Language-Team: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: \n" +"Plural-Forms: \n" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/queue_job.py:0 +#, python-format +msgid "Attachment" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_base +msgid "Base" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_base_import_import +msgid "Base Import" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.actions.act_window,help:spp_import_match.action_spp_import_match +msgid "" +"Click the create button to enter the information of the Import Matching." +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__conditional +msgid "Conditional" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#, python-format +msgid "Config:" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/custom_base_import.xml:0 +#, python-format +msgid "Configuration:" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.actions.act_window,help:spp_import_match.action_spp_import_match +msgid "Create a new Import Matching!" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__create_uid +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__create_uid +msgid "Created by" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__create_date +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__create_date +msgid "Created on" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__display_name +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__display_name +msgid "Display Name" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__field_id +msgid "Field" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/import_match.py:0 +#, python-format +msgid "Field '%s', already exists!" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/tests/test_res_partner_import_match.py:0 +#, python-format +msgid "Field 'Name', already exists!" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__field_ids +msgid "Fields" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_spp_import_match_fields +msgid "Fields for Import Matching" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match__field_ids +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__field_id +msgid "Fields to Match in Importing" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__relation +msgid "For relationship fields, the technical name of the target model" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_filter +msgid "Group By" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__id +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__id +msgid "ID" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/custom_base_import.xml:0 +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#: model:ir.ui.menu,name:spp_import_match.menu_spp_import_match +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_filter +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_form +#, python-format +msgid "Import Match" +msgstr "" + +#. module: spp_import_match +#: model:ir.actions.act_window,name:spp_import_match.action_spp_import_match +#: model:ir.model,name:spp_import_match.model_spp_import_match +msgid "Import Matching" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#, python-format +msgid "Import in the background" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/base_import.py:0 +#, python-format +msgid "Import {model_name} from file {file_name}" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/base_import.py:0 +#, python-format +msgid "" +"Import {model_name} from file {file_name} - #{chunk} - lines {row_from} to " +"{row_to}" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__imported_value +msgid "Imported Value" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__write_uid +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__write_uid +msgid "Last Updated by" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__write_date +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__write_date +msgid "Last Updated on" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__match_id +msgid "Match" +msgstr "" + +#. module: spp_import_match +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_form +msgid "Match Details" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__model_id +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__model_name +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__model_id +#: model_terms:ir.ui.view,arch_db:spp_import_match.view_spp_import_match_filter +msgid "Model" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__model_description +msgid "Model Description" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match__model_id +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__model_id +msgid "Model for Import Matching" +msgstr "" + +#. module: spp_import_match +#. odoo-python +#: code:addons/spp_import_match/models/import_match.py:0 +#, python-format +msgid "Multiple matches found for '%s'!" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__name +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__name +msgid "Name" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__overwrite_match +msgid "Overwrite Match" +msgstr "" + +#. module: spp_import_match +#: model:ir.model,name:spp_import_match.model_queue_job +msgid "Queue Job" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__relation +msgid "Related Model" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match__sequence +msgid "Sequence" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__sub_field_id +msgid "Sub Fields to Match in Importing" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,field_description:spp_import_match.field_spp_import_match_fields__sub_field_id +msgid "Sub-Field" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/js/custom_base_import.js:0 +#, python-format +msgid "Successfully added on Queue" +msgstr "" + +#. module: spp_import_match +#: model:ir.model.fields,help:spp_import_match.field_spp_import_match_fields__imported_value +msgid "This will be used as a condition to disregard this field if matched" +msgstr "" + +#. module: spp_import_match +#. odoo-javascript +#: code:addons/spp_import_match/static/src/legacy/xml/custom_base_import.xml:0 +#, python-format +msgid "" +"When checked, the import will be executed as a background job, after " +"splitting your file in small chunks that will be processed independently. " +"Use this to import very large files." +msgstr "" diff --git a/spp_import_match/models/__init__.py b/spp_import_match/models/__init__.py new file mode 100644 index 00000000..31a3493f --- /dev/null +++ b/spp_import_match/models/__init__.py @@ -0,0 +1,6 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from . import base +from . import base_import +from . import import_match +from . import queue_job diff --git a/spp_import_match/models/base.py b/spp_import_match/models/base.py new file mode 100644 index 00000000..93faebe6 --- /dev/null +++ b/spp_import_match/models/base.py @@ -0,0 +1,98 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +import logging + +from odoo import api, models + +_logger = logging.getLogger(__name__) + + +class Base(models.AbstractModel): + _inherit = "base" + + @api.model + def load(self, fields, data): + usable, field_to_match = self.env["spp.import.match"]._usable_rules( + self._name, + fields, + option_config_ids=self.env.context.get("import_match_ids", []), + ) + model_id = self.env["ir.model"].search([("model", "=", self._name)]) + overwrite_match = True + import_match = self.env["spp.import.match"].search([("model_id", "=", model_id.id)]) + if import_match: + overwrite_match = import_match.overwrite_match + + if usable: + newdata = list() + if ".id" in fields: + column = fields.index(".id") + fields[column] = "id" + for values in data: + dbid = int(values[column]) + values[column] = self.browse(dbid).get_external_id().get(dbid) + import_fields = list(map(models.fix_import_export_id_paths, fields)) + # Odoo 19: _convert_records requires savepoint parameter + converted_data = list(self._convert_records(self._extract_records(import_fields, data), savepoint=True)) + + if "id" not in fields: + fields.append("id") + import_fields.append(["id"]) + clean_fields = [] + for f in import_fields: + field_name = f[0] + if len(f) > 1: + field_name += "/" + f[1] + clean_fields.append(field_name) + for dbid, xmlid, record, info in converted_data: + if len(clean_fields) > len(data[info["record"]]): + data[info["record"]].append(None) + + _logger.debug("CLEAN FIELDS: %s", clean_fields) + _logger.debug("Processing record at index %d", info["record"]) + row = dict(zip(clean_fields, data[info["record"]], strict=True)) + + match = self + if xmlid: + _logger.debug("XMLID: %s", xmlid) + row["id"] = xmlid + newdata.append(tuple(row[f] for f in clean_fields)) + continue + elif dbid: + _logger.debug("DBID: %s", dbid) + match = self.browse(dbid) + else: + match = self.env["spp.import.match"]._match_find(self, record, row) + _logger.debug("MATCH found: record_id=%s", match.id if match else None) + + match.export_data(fields) + + ext_id = match.get_external_id() + row["id"] = ext_id[match.id] if match else row.get("id", "") + if match: + if overwrite_match: + flat_fields_to_remove = [item for sublist in field_to_match for item in sublist] + for fields_pop in flat_fields_to_remove: + # Set one2many and many2many fields to False if matched + # to avoid duplicates in one2many or many2many when exporting data + if fields_pop in row and match._fields[fields_pop].type in [ + "one2many", + "many2many", + ]: + row[fields_pop] = False + newdata.append(tuple(row[f] for f in clean_fields)) + else: + newdata.append(tuple(row[f] for f in fields)) + data = newdata + return super().load(fields, data) + + def write(self, vals): + # nosemgrep: odoo-sudo-without-context - reading model metadata requires sudo + model = self.env["ir.model"].sudo().search([("model", "=", self._name)]) + new_vals = vals.copy() + for rec in vals: + field_name = rec + if not vals[field_name]: + field = self.env["ir.model.fields"].search([("model_id", "=", model.id), ("name", "=", field_name)]) + if field and field.ttype in ("one2many", "many2many"): + new_vals.pop(rec) + return super().write(new_vals) diff --git a/spp_import_match/models/base_import.py b/spp_import_match/models/base_import.py new file mode 100644 index 00000000..2ac91bde --- /dev/null +++ b/spp_import_match/models/base_import.py @@ -0,0 +1,203 @@ +import base64 +import csv +import logging +from io import BytesIO, StringIO, TextIOWrapper +from os.path import splitext + +from odoo import _, models +from odoo.models import fix_import_export_id_paths + +from odoo.addons.queue_job.exception import FailedJobError + +_logger = logging.getLogger(__name__) +# options defined in base_import/import.js +OPT_HAS_HEADER = "headers" +OPT_SEPARATOR = "separator" +OPT_QUOTING = "quoting" +OPT_ENCODING = "encoding" +# options defined in base_import_async/import.js +OPT_USE_QUEUE = "use_queue" +OPT_CHUNK_SIZE = "chunk_size" +# option not available in UI, but usable from scripts +OPT_PRIORITY = "priority" + +INIT_PRIORITY = 100 +DEFAULT_CHUNK_SIZE = 100 + + +class ImportValidationError(Exception): + """ + This class is made to correctly format all the different error types that + can occur during the pre-validation of the import that is made before + calling the data loading itself. The Error data structure is meant to copy + the one of the errors raised during the data loading. It simplifies the + error management at client side as all errors can be treated the same way. + + This exception is typically raised when there is an error during data + parsing (image, int, dates, etc..) or if the user did not select at least + one field to map with a column. + """ + + def __init__(self, message, **kwargs): + super().__init__(message) + self.type = kwargs.get("error_type", "error") + self.message = message + self.record = False + self.not_matching_error = True + self.field_path = [kwargs["field"]] if kwargs.get("field") else False + self.field_type = kwargs.get("field_type") + + +class SPPBaseImport(models.TransientModel): + _inherit = "base_import.import" + + def execute_import(self, fields, columns, options, dryrun=False): + try: + input_file_data, import_fields = self._convert_import_data(fields, options) + # Parse date and float field + input_file_data = self._parse_import_data(input_file_data, import_fields, options) + except ImportValidationError as error: + return {"messages": [error.__dict__]} + + _logger.info("Started Import: %s with rows %d", self.res_model, len(input_file_data)) + + import_match_ids = options.get("import_match_ids", []) + + if dryrun: + _logger.info("Doing dry-run import") + if import_match_ids: + self = self.with_context(import_match_ids=import_match_ids) + return super().execute_import(fields, columns, options, dryrun=True) + + if len(input_file_data) <= 100: + _logger.info("Doing normal import") + if import_match_ids: + self = self.with_context(import_match_ids=import_match_ids) + return super().execute_import(fields, columns, options, dryrun=False) + + _logger.info("Started Asynchronous Import: %s", self.res_model) + # asynchronous import + data = input_file_data + + # get the translated model name to build + # a meaningful job description + search_result = self.env["ir.model"].name_search(self.res_model, operator="=") + if search_result: + translated_model_name = search_result[0][1] + else: + translated_model_name = self._description + description = _("Import {model_name} from file {file_name}").format( + model_name=translated_model_name, file_name=self.file_name + ) + attachment = self._create_csv_attachment(import_fields, data, options, self.file_name) + delayed_job = self.with_delay(description=description)._split_file( + model_name=self.res_model, + translated_model_name=translated_model_name, + attachment=attachment, + options=options, + split_context=self.env.context, + file_name=self.file_name, + ) + self._link_attachment_to_job(delayed_job, attachment) + return {"async": True} + + def _link_attachment_to_job(self, delayed_job, attachment): + queue_job = self.env["queue.job"].search([("uuid", "=", delayed_job.uuid)], limit=1) + attachment.write({"res_model": "queue.job", "res_id": queue_job.id}) + + def _create_csv_attachment(self, fields, data, options, file_name): + # write csv + f = StringIO() + writer = csv.writer( + f, + delimiter=str(options.get(OPT_SEPARATOR)) or ",", + quotechar=str(options.get(OPT_QUOTING)), + ) + encoding = options.get(OPT_ENCODING) or "utf-8" + writer.writerow(fields) + for row in data: + writer.writerow(row) + # create attachment + datas = base64.encodebytes(f.getvalue().encode(encoding)) + attachment = self.env["ir.attachment"].create({"name": file_name, "datas": datas}) + return attachment + + def _read_csv_attachment(self, attachment, options): + decoded_datas = base64.decodebytes(attachment.datas) + encoding = options.get(OPT_ENCODING) or "utf-8" + f = TextIOWrapper(BytesIO(decoded_datas), encoding=encoding) + reader = csv.reader( + f, + delimiter=str(options.get(OPT_SEPARATOR)) or ",", + quotechar=str(options.get(OPT_QUOTING)), + ) + + fields = next(reader) + data = [row for row in reader] + return fields, data + + @staticmethod + def _extract_chunks(model_obj, fields, data, chunk_size): + """Split the data on record boundaries, in chunks of minimum chunk_size""" + fields = list(map(fix_import_export_id_paths, fields)) + row_from = 0 + for rows in model_obj._extract_records(fields, data): + rows = rows[1]["rows"] + if rows["to"] - row_from + 1 >= chunk_size: + yield row_from, rows["to"] + row_from = rows["to"] + 1 + if row_from < len(data): + yield row_from, len(data) - 1 + + def _split_file( + self, + model_name, + translated_model_name, + attachment, + options, + split_context, + file_name="file.csv", + ): + """Split a CSV attachment in smaller import jobs""" + model_obj = self.env[model_name] + fields, data = self._read_csv_attachment(attachment, options) + padding = len(str(len(data))) + priority = options.get(OPT_PRIORITY, INIT_PRIORITY) + if options.get(OPT_HAS_HEADER): + header_offset = 1 + else: + header_offset = 0 + chunk_size = options.get(OPT_CHUNK_SIZE) or DEFAULT_CHUNK_SIZE + for row_from, row_to in self._extract_chunks(model_obj, fields, data, chunk_size): + chunk = str(priority - INIT_PRIORITY).zfill(padding) + description = _( + "Import {model_name} from file {file_name} - #{chunk} - lines {row_from} to {row_to}" + ).format( + model_name=translated_model_name, + file_name=file_name, + chunk=chunk, + row_from=row_from + 1 + header_offset, + row_to=row_to + 1 + header_offset, + ) + # create a CSV attachment and enqueue the job + root, ext = splitext(file_name) + attachment = self._create_csv_attachment( + fields, + data[row_from : row_to + 1], + options, + file_name=root + "-" + chunk + ext, + ) + delayed_job = self.with_delay(description=description, priority=priority)._import_one_chunk( + model_name=model_name, attachment=attachment, options=options, context=split_context + ) + self._link_attachment_to_job(delayed_job, attachment) + priority += 1 + + def _import_one_chunk(self, model_name, attachment, options, context): + model_obj = self.env[model_name].with_context(**context) + fields, data = self._read_csv_attachment(attachment, options) + result = model_obj.load(fields, data) + error_message = [message["message"] for message in result["messages"] if message["type"] == "error"] + if error_message: + raise FailedJobError("\n".join(error_message)) + return result diff --git a/spp_import_match/models/import_match.py b/spp_import_match/models/import_match.py new file mode 100644 index 00000000..cdaef4bf --- /dev/null +++ b/spp_import_match/models/import_match.py @@ -0,0 +1,146 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import logging + +from odoo import _, api, fields, models +from odoo.exceptions import ValidationError + +_logger = logging.getLogger(__name__) + + +class SPPImportMatch(models.Model): + _name = "spp.import.match" + _description = "Import Matching" + _order = "sequence, name" + + name = fields.Char() + sequence = fields.Integer(index=True) + overwrite_match = fields.Boolean() + model_id = fields.Many2one( + "ir.model", + "Model", + required=True, + ondelete="cascade", + domain=[("transient", "=", False)], + help="Model for Import Matching", + ) + model_name = fields.Char(related="model_id.model") + model_description = fields.Char(related="model_id.name") + field_ids = fields.One2many( + "spp.import.match.fields", + "match_id", + string="Fields", + required=True, + help="Fields to Match in Importing", + ) + + @api.onchange("model_id") + def _onchange_model_id(self): + for rec in self: + rec.field_ids = None + + @api.model + def _match_find(self, model, converted_row, imported_row): + usable, field_to_match = self._usable_rules(model._name, converted_row) + usable = self.browse(usable) + for combination in usable: + combination_valid = True + domain = list() + for field in combination.field_ids: + if field.is_conditional: + if imported_row[field.name] != field.imported_value: + combination_valid = False + break + if field.field_id.name in converted_row: + row_value = converted_row[field.field_id.name] + field_value = field.field_id.name + add_to_domain = True + if field.sub_field_id: + tuple_val = row_value[0][2] + add_to_domain = False + if field.sub_field_id.name in tuple_val: + row_value = tuple_val[field.sub_field_id.name] + add_to_domain = True + field_value = field.field_id.name + "." + field.sub_field_id.name + if add_to_domain: + domain.append((field_value, "=", row_value)) + if not combination_valid: + continue + match = model.search(domain) + if len(match) == 1: + return match + elif len(match) > 1: + raise ValidationError(_("Multiple matches found for '%s'!").format(match[0].name)) + + return model + + @api.model + def _usable_rules(self, model_name, fields, option_config_ids=False): + result = self + domain = [("model_name", "=", model_name)] + if option_config_ids and isinstance(option_config_ids, list): + domain.append(("id", "in", option_config_ids)) + available = self.search(domain) + field_to_match = [] + for record in available: + field_to_match.append(record.field_ids.mapped("name")) + for f in record.field_ids: + if f.name in fields or f.field_id.name in fields: + result |= record + _logger.info("FIELD TO MATCH: %s", field_to_match) + _logger.info("RESULT: %s", result.ids) + return result.ids, field_to_match + + +class SPPImportMatchFields(models.Model): + _name = "spp.import.match.fields" + _description = "Fields for Import Matching" + + name = fields.Char(compute="_compute_name") + field_id = fields.Many2one( + "ir.model.fields", + string="Field", + required=True, + ondelete="cascade", + domain="[('model_id', '=', model_id)]", + help="Fields to Match in Importing", + ) + relation = fields.Char(related="field_id.relation") + sub_field_id = fields.Many2one( + "ir.model.fields", + string="Sub-Field", + ondelete="cascade", + help="Sub Fields to Match in Importing", + ) + match_id = fields.Many2one("spp.import.match", string="Match", ondelete="cascade") + model_id = fields.Many2one(related="match_id.model_id") + is_conditional = fields.Boolean() + imported_value = fields.Char(help="This will be used as a condition to disregard this field if matched") + + def _compute_name(self): + for rec in self: + name = rec.field_id.name + if rec.sub_field_id: + name = rec.field_id.name + "/" + rec.sub_field_id.name + rec.name = name + + @api.onchange("field_id") + def _onchange_field_id(self): + for rec in self: + field_id = rec.field_id.id + field_type = rec.field_id.ttype + fields_list = [] + if field_type not in ("many2many", "one2many", "many2one"): + for field in rec.match_id.field_ids: + new_id_str = str(field.id) + new_id_str_2 = "".join(letter for letter in new_id_str if letter.isalnum()) + if "NewIdvirtual" not in new_id_str_2: + fields_list.append(field.field_id.id) + + duplicate_counter = 0 + for duplicate_field in fields_list: + if duplicate_field == field_id: + duplicate_counter += 1 + + if duplicate_counter > 1: + raise ValidationError(_("Field '%s', already exists!") % rec.field_id.field_description) diff --git a/spp_import_match/models/queue_job.py b/spp_import_match/models/queue_job.py new file mode 100644 index 00000000..4040e9bc --- /dev/null +++ b/spp_import_match/models/queue_job.py @@ -0,0 +1,19 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. +from odoo import _, models + + +class SPPQueueJob(models.Model): + """Job status and result""" + + _inherit = "queue.job" + + def _related_action_attachment(self): + res_id = self.kwargs.get("att_id") + action = { + "name": _("Attachment"), + "type": "ir.actions.act_window", + "res_model": "ir.attachment", + "view_mode": "form", + "res_id": res_id, + } + return action diff --git a/spp_import_match/pyproject.toml b/spp_import_match/pyproject.toml new file mode 100644 index 00000000..4231d0cc --- /dev/null +++ b/spp_import_match/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/spp_import_match/readme/DESCRIPTION.md b/spp_import_match/readme/DESCRIPTION.md new file mode 100644 index 00000000..4c636005 --- /dev/null +++ b/spp_import_match/readme/DESCRIPTION.md @@ -0,0 +1,49 @@ +Extends Odoo's base import functionality to match incoming records against existing data during bulk imports. Prevents duplicate creation by comparing imported rows to database records using configurable field combinations. Supports overwriting matched records and asynchronous processing for large datasets. + +### Key Capabilities + +- Define matching rules per model using field combinations to identify existing records +- Match on sub-fields within related records (e.g., household ID within individual) +- Apply conditional matching rules only when specific imported values are present +- Skip duplicate creation or update existing records when matches are found +- Process imports with more than 100 records asynchronously using `queue_job` +- Clear one2many/many2many associations before update to prevent duplicate entries + +### Key Models + +| Model | Description | +| ------------------------- | -------------------------------------------------------- | +| `spp.import.match` | Matching rule configuration for a specific model | +| `spp.import.match.fields` | Individual fields used in a rule, supports sub-fields | + +### Configuration + +After installing: + +1. Navigate to **Registry > Configuration > Import Match** +2. Create a new matching rule and select the target model (e.g., `res.partner`) +3. Add one or more fields to match on (e.g., national ID, or first name + date of birth) +4. Enable **Overwrite Match** to update existing records when matches are found +5. For conditional matching, enable **Is Conditional** on a field and specify the expected imported value + +### UI Location + +- **Menu**: Registry > Configuration > Import Match +- **Import Dialog**: Matching applies automatically during CSV import via the standard Odoo import interface +- **Queue Jobs**: Registry > Queue Jobs > Jobs (to monitor asynchronous imports) + +### Security + +| Group | Access | +| ------------------------------ | --------- | +| `spp_security.group_spp_admin` | Full CRUD | + +### Extension Points + +- Override `spp.import.match._match_find()` to customize matching logic for specific use cases +- Override `spp.import.match._usable_rules()` to filter which rules apply based on context +- Inherits `base.load()` to inject matching logic into all model imports + +### Dependencies + +`base`, `spp_base_common`, `base_import`, `queue_job`, `spp_security` diff --git a/spp_import_match/security/ir.model.access.csv b/spp_import_match/security/ir.model.access.csv new file mode 100644 index 00000000..3ec969c2 --- /dev/null +++ b/spp_import_match/security/ir.model.access.csv @@ -0,0 +1,3 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_spp_import_match_admin,SPP Import Matching Admin Access,spp_import_match.model_spp_import_match,spp_security.group_spp_admin,1,1,1,1 +access_spp_import_match_fields_admin,SPP Import Matching Fields Admin Access,spp_import_match.model_spp_import_match_fields,spp_security.group_spp_admin,1,1,1,1 diff --git a/spp_import_match/static/description/icon.png b/spp_import_match/static/description/icon.png new file mode 100644 index 00000000..c7dbdaaf Binary files /dev/null and b/spp_import_match/static/description/icon.png differ diff --git a/spp_import_match/static/description/index.html b/spp_import_match/static/description/index.html new file mode 100644 index 00000000..b3369e2e --- /dev/null +++ b/spp_import_match/static/description/index.html @@ -0,0 +1,521 @@ + + + + + +OpenSPP Import Match + + + +
+

OpenSPP Import Match

+ + +

Alpha License: LGPL-3 OpenSPP/OpenSPP2

+

Extends Odoo’s base import functionality to match incoming records +against existing data during bulk imports. Prevents duplicate creation +by comparing imported rows to database records using configurable field +combinations. Supports overwriting matched records and asynchronous +processing for large datasets.

+
+

Key Capabilities

+
    +
  • Define matching rules per model using field combinations to identify +existing records
  • +
  • Match on sub-fields within related records (e.g., household ID within +individual)
  • +
  • Apply conditional matching rules only when specific imported values +are present
  • +
  • Skip duplicate creation or update existing records when matches are +found
  • +
  • Process imports with more than 100 records asynchronously using +queue_job
  • +
  • Clear one2many/many2many associations before update to prevent +duplicate entries
  • +
+
+
+

Key Models

+ ++++ + + + + + + + + + + + + + +
ModelDescription
spp.import.matchMatching rule configuration for a +specific model
spp.import.match.fieldsIndividual fields used in a rule, +supports sub-fields
+
+
+

Configuration

+

After installing:

+
    +
  1. Navigate to Registry > Configuration > Import Match
  2. +
  3. Create a new matching rule and select the target model (e.g., +res.partner)
  4. +
  5. Add one or more fields to match on (e.g., national ID, or first name ++ date of birth)
  6. +
  7. Enable Overwrite Match to update existing records when matches +are found
  8. +
  9. For conditional matching, enable Is Conditional on a field and +specify the expected imported value
  10. +
+
+
+

UI Location

+
    +
  • Menu: Registry > Configuration > Import Match
  • +
  • Import Dialog: Matching applies automatically during CSV import +via the standard Odoo import interface
  • +
  • Queue Jobs: Registry > Queue Jobs > Jobs (to monitor asynchronous +imports)
  • +
+
+
+

Security

+ ++++ + + + + + + + + + + +
GroupAccess
spp_security.group_spp_adminFull CRUD
+
+
+

Extension Points

+
    +
  • Override spp.import.match._match_find() to customize matching +logic for specific use cases
  • +
  • Override spp.import.match._usable_rules() to filter which rules +apply based on context
  • +
  • Inherits base.load() to inject matching logic into all model +imports
  • +
+
+
+

Dependencies

+

base, spp_base_common, base_import, queue_job, +spp_security

+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production.

+
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • OpenSPP.org
  • +
+
+
+

Maintainers

+

Current maintainers:

+

jeremi gonzalesedwin1123

+

This module is part of the OpenSPP/OpenSPP2 project on GitHub.

+

You are welcome to contribute.

+
+
+
+
+ + diff --git a/spp_import_match/static/src/legacy/custom_base_import.xml b/spp_import_match/static/src/legacy/custom_base_import.xml new file mode 100644 index 00000000..45653cad --- /dev/null +++ b/spp_import_match/static/src/legacy/custom_base_import.xml @@ -0,0 +1,16 @@ + + diff --git a/spp_import_match/static/src/legacy/js/custom_base_import.js b/spp_import_match/static/src/legacy/js/custom_base_import.js new file mode 100644 index 00000000..01ab3a0a --- /dev/null +++ b/spp_import_match/static/src/legacy/js/custom_base_import.js @@ -0,0 +1,54 @@ +/** @odoo-module */ +import {BaseImportModel} from "@base_import/import_model"; +import {patch} from "@web/core/utils/patch"; +import {_t} from "@web/core/l10n/translation"; + +patch(BaseImportModel.prototype, { + setup() { + super.setup(); + }, + + async _callImport(dryrun, args) { + try { + const res = await this.orm.silent.call( + "base_import.import", + "execute_import", + args, + { + dryrun, + context: { + ...this.context, + tracking_disable: this.importOptions.tracking_disable, + }, + } + ); + if ("async" in res) { + if (res.async === true) { + this.displayNotification(_t("Successfully added on Queue")); + history.go(-1); + } + } + console.log(res); + return res; + } catch (error) { + // This pattern isn't optimal but it is need to have + // similar behaviours as in legacy. That is, catching + // all import errors and showing them inside the top + // "messages" area. + return {error}; + } + }, + + displayNotification(message) { + this.env.services.action.doAction({ + type: "ir.actions.client", + tag: "display_notification", + params: { + title: "Queued", + message: message, + type: "success", + sticky: false, + }, + }); + }, +}); diff --git a/spp_import_match/static/src/legacy/xml/custom_base_import.xml b/spp_import_match/static/src/legacy/xml/custom_base_import.xml new file mode 100644 index 00000000..e1fdcfd2 --- /dev/null +++ b/spp_import_match/static/src/legacy/xml/custom_base_import.xml @@ -0,0 +1,21 @@ + + diff --git a/spp_import_match/tests/__init__.py b/spp_import_match/tests/__init__.py new file mode 100644 index 00000000..950a7d8a --- /dev/null +++ b/spp_import_match/tests/__init__.py @@ -0,0 +1,4 @@ +from . import test_res_partner_import_match +from . import test_import_match_model +from . import test_base_write +from . import test_queue_job diff --git a/spp_import_match/tests/res_partner_group_async.csv b/spp_import_match/tests/res_partner_group_async.csv new file mode 100644 index 00000000..b46db2a7 --- /dev/null +++ b/spp_import_match/tests/res_partner_group_async.csv @@ -0,0 +1,105 @@ +name,email +John,john@example.com +Jane,jane@example.com +Michael,michael@example.com +Emily,emily@example.com +Chris,chris@example.com +Sarah,sarah@example.com +David,david@example.com +Anna,anna@example.com +James,james@example.com +Laura,laura@example.com +Tom,tom@example.com +Sophia,sophia@example.com +Brian,brian@example.com +Rachel,rachel@example.com +Robert,robert@example.com +Linda,linda@example.com +Daniel,daniel@example.com +Samantha,samantha@example.com +Joseph,joseph@example.com +Emma,emma@example.com +Oliver,oliver@example.com +Henry,henry@example.com +Mia,mia@example.com +Noah,noah@example.com +Ava,ava@example.com +Ethan,ethan@example.com +Olivia,olivia@example.com +Lucas,lucas@example.com +Chloe,chloe@example.com +Liam,liam@example.com +Isabella,isabella@example.com +Jack,jack@example.com +Amelia,amelia@example.com +Ben,ben@example.com +Sophia,sophia@mail.com +Elijah,elijah@example.com +Lily,lily@example.com +Matthew,matthew@example.com +Grace,grace@example.com +Alexander,alexander@example.com +Zoe,zoe@example.com +Ryan,ryan@example.com +Hannah,hannah@example.com +Leo,leo@example.com +Scarlett,scarlett@example.com +Samuel,samuel@example.com +Abigail,abigail@example.com +Nathan,nathan@example.com +Ellie,ellie@example.com +Mason,mason@example.com +Natalie,natalie@example.com +Jacob,jacob@example.com +Sophie,sophie@example.com +William,william@example.com +Leah,leah@example.com +Dylan,dylan@example.com +Eva,eva@example.com +Isaac,isaac@example.com +Victoria,victoria@example.com +Eli,eli@example.com +Luna,luna@example.com +Aiden,aiden@example.com +Bella,bella@example.com +Logan,logan@example.com +Julia,julia@example.com +Owen,owen@example.com +Madeline,madeline@example.com +Caleb,caleb@example.com +Avery,avery@example.com +Carter,carter@example.com +Stella,stella@example.com +Sebastian,sebastian@example.com +Sienna,sienna@example.com +Jason,jason@example.com +Violet,violet@example.com +Luke,luke@example.com +Lucy,lucy@example.com +Isaiah,isaiah@example.com +Hazel,hazel@example.com +Adam,adam@example.com +Ruby,ruby@example.com +Connor,connor@example.com +Lydia,lydia@example.com +Zachary,zachary@example.com +Harper,harper@example.com +Jaxon,jaxon@example.com +Penelope,penelope@example.com +Evan,evan@example.com +Aurora,aurora@example.com +Miles,miles@example.com +Aria,aria@example.com +Brandon,brandon@example.com +Alice,alice@example.com +Eliot,eliot@example.com +Piper,piper@example.com +Hunter,hunter@example.com +Laila,laila@example.com +Jordan,jordan@example.com +Athena,athena@example.com +Theo,theo@example.com +Jasmine,jasmine@example.com +Colton,colton@example.com +Skye,skye@example.com +Dom,Dom@example.com diff --git a/spp_import_match/tests/res_partner_group_name.csv b/spp_import_match/tests/res_partner_group_name.csv new file mode 100644 index 00000000..a834ede8 --- /dev/null +++ b/spp_import_match/tests/res_partner_group_name.csv @@ -0,0 +1,2 @@ +Name,Email +Renaud,renaudhh@gmail.com diff --git a/spp_import_match/tests/res_partner_name.csv b/spp_import_match/tests/res_partner_name.csv new file mode 100644 index 00000000..8205d54e --- /dev/null +++ b/spp_import_match/tests/res_partner_name.csv @@ -0,0 +1,2 @@ +given_name,family_name,name,email +Renaud,Rufino,Renaud Rufino,rufinorenaud@gmail.com diff --git a/spp_import_match/tests/test_base_write.py b/spp_import_match/tests/test_base_write.py new file mode 100644 index 00000000..0858c565 --- /dev/null +++ b/spp_import_match/tests/test_base_write.py @@ -0,0 +1,36 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestBaseWrite(TransactionCase): + """Test the Base.write() override that removes falsy o2m/m2m fields.""" + + def test_write_removes_falsy_one2many(self): + """Test that write removes falsy one2many values from vals.""" + partner = self.env["res.partner"].create({"name": "WriteTest"}) + # Writing False to a one2many field should not cause errors + # The override should strip it from vals + partner.write({"name": "WriteTest Updated", "child_ids": False}) + self.assertEqual(partner.name, "WriteTest Updated") + + def test_write_removes_falsy_many2many(self): + """Test that write removes falsy many2many values from vals.""" + partner = self.env["res.partner"].create({"name": "WriteTestM2M"}) + # Writing False to category_id (many2many) should be stripped + partner.write({"name": "WriteTestM2M Updated", "category_id": False}) + self.assertEqual(partner.name, "WriteTestM2M Updated") + + def test_write_keeps_truthy_values(self): + """Test that write keeps non-falsy field values unchanged.""" + partner = self.env["res.partner"].create({"name": "WriteKeep"}) + partner.write({"name": "WriteKeep Updated", "email": "test@test.com"}) + self.assertEqual(partner.name, "WriteKeep Updated") + self.assertEqual(partner.email, "test@test.com") + + def test_write_keeps_falsy_non_relational(self): + """Test that write keeps falsy values for non-relational fields.""" + partner = self.env["res.partner"].create({"name": "WriteFalsyChar", "email": "before@test.com"}) + partner.write({"email": False}) + self.assertFalse(partner.email) diff --git a/spp_import_match/tests/test_import_match_model.py b/spp_import_match/tests/test_import_match_model.py new file mode 100644 index 00000000..5c236b57 --- /dev/null +++ b/spp_import_match/tests/test_import_match_model.py @@ -0,0 +1,164 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.exceptions import ValidationError +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestImportMatchModel(TransactionCase): + """Test spp.import.match and spp.import.match.fields models.""" + + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.res_partner_model = cls.env["ir.model"].search([("model", "=", "res.partner")]) + cls.name_field = cls.env["ir.model.fields"].search( + [("name", "=", "name"), ("model_id", "=", cls.res_partner_model.id)] + ) + cls.email_field = cls.env["ir.model.fields"].search( + [("name", "=", "email"), ("model_id", "=", cls.res_partner_model.id)] + ) + + def _create_match_rule(self, field_ids_data=None): + """Helper to create a match rule with fields.""" + match = self.env["spp.import.match"].create( + { + "model_id": self.res_partner_model.id, + "overwrite_match": True, + } + ) + if field_ids_data: + for data in field_ids_data: + data["match_id"] = match.id + self.env["spp.import.match.fields"].create(data) + return match + + def test_onchange_model_id_clears_fields(self): + """Test that changing model_id clears field_ids.""" + match = self._create_match_rule([{"field_id": self.name_field.id}]) + self.assertEqual(len(match.field_ids), 1) + match._onchange_model_id() + self.assertFalse(match.field_ids) + + def test_usable_rules_returns_matching(self): + """Test _usable_rules returns rules when fields match.""" + match = self._create_match_rule([{"field_id": self.name_field.id}]) + result_ids, field_to_match = self.env["spp.import.match"]._usable_rules("res.partner", ["name", "email"]) + self.assertIn(match.id, result_ids) + self.assertTrue(len(field_to_match) > 0) + + def test_usable_rules_no_match(self): + """Test _usable_rules returns empty when no fields match.""" + self._create_match_rule([{"field_id": self.name_field.id}]) + result_ids, _ = self.env["spp.import.match"]._usable_rules("res.partner", ["phone"]) + self.assertEqual(result_ids, []) + + def test_usable_rules_with_option_config_ids(self): + """Test _usable_rules filters by option_config_ids.""" + match1 = self._create_match_rule([{"field_id": self.name_field.id}]) + match2 = self._create_match_rule([{"field_id": self.email_field.id}]) + # Only match1 should be returned + result_ids, _ = self.env["spp.import.match"]._usable_rules( + "res.partner", ["name", "email"], option_config_ids=[match1.id] + ) + self.assertIn(match1.id, result_ids) + self.assertNotIn(match2.id, result_ids) + + def test_match_find_single_match(self): + """Test _match_find returns single match.""" + partner = self.env["res.partner"].create({"name": "UniqueMatchTest12345"}) + match = self._create_match_rule([{"field_id": self.name_field.id}]) + result = match._match_find( + self.env["res.partner"], + {"name": "UniqueMatchTest12345"}, + {"name": "UniqueMatchTest12345", "id": None}, + ) + self.assertEqual(result, partner) + + def test_match_find_no_match(self): + """Test _match_find returns empty model when no match found.""" + match = self._create_match_rule([{"field_id": self.name_field.id}]) + result = match._match_find( + self.env["res.partner"], + {"name": "NonExistentPartner99999"}, + {"name": "NonExistentPartner99999", "id": None}, + ) + # Should return the model (empty recordset) + self.assertFalse(result.id) + + def test_match_find_multiple_matches_raises(self): + """Test _match_find raises ValidationError on multiple matches.""" + self.env["res.partner"].create({"name": "DuplicateMatchTest"}) + self.env["res.partner"].create({"name": "DuplicateMatchTest"}) + match = self._create_match_rule([{"field_id": self.name_field.id}]) + with self.assertRaises(ValidationError): + match._match_find( + self.env["res.partner"], + {"name": "DuplicateMatchTest"}, + {"name": "DuplicateMatchTest", "id": None}, + ) + + def test_match_find_conditional_skip(self): + """Test _match_find skips rule when conditional value doesn't match.""" + self.env["res.partner"].create({"name": "ConditionalTest"}) + match = self._create_match_rule( + [ + { + "field_id": self.name_field.id, + "is_conditional": True, + "imported_value": "WrongValue", + } + ] + ) + result = match._match_find( + self.env["res.partner"], + {"name": "ConditionalTest"}, + {"name": "ConditionalTest", "id": None}, + ) + # Should not find match because conditional value doesn't match + self.assertFalse(result.id) + + def test_match_find_conditional_match(self): + """Test _match_find uses rule when conditional value matches.""" + partner = self.env["res.partner"].create({"name": "ConditionalMatchTest"}) + match = self._create_match_rule( + [ + { + "field_id": self.name_field.id, + "is_conditional": True, + "imported_value": "ConditionalMatchTest", + } + ] + ) + result = match._match_find( + self.env["res.partner"], + {"name": "ConditionalMatchTest"}, + {"name": "ConditionalMatchTest", "id": None}, + ) + self.assertEqual(result, partner) + + def test_field_compute_name(self): + """Test _compute_name for match fields.""" + match = self._create_match_rule([{"field_id": self.name_field.id}]) + self.assertEqual(match.field_ids[0].name, "name") + + def test_field_compute_name_with_sub_field(self): + """Test _compute_name includes sub_field when present.""" + # Find a relational field on res.partner for sub_field testing + child_ids_field = self.env["ir.model.fields"].search( + [ + ("name", "=", "child_ids"), + ("model_id", "=", self.res_partner_model.id), + ], + limit=1, + ) + if child_ids_field: + match = self._create_match_rule( + [ + { + "field_id": child_ids_field.id, + "sub_field_id": self.name_field.id, + } + ] + ) + self.assertEqual(match.field_ids[0].name, "child_ids/name") diff --git a/spp_import_match/tests/test_queue_job.py b/spp_import_match/tests/test_queue_job.py new file mode 100644 index 00000000..3aff1bda --- /dev/null +++ b/spp_import_match/tests/test_queue_job.py @@ -0,0 +1,23 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +from odoo.tests import TransactionCase, tagged + + +@tagged("post_install", "-at_install") +class TestSPPQueueJob(TransactionCase): + """Test queue.job extension for attachment related action.""" + + def test_related_action_attachment(self): + """Test _related_action_attachment returns correct action dict.""" + job = self.env["queue.job"].new({"kwargs": {"att_id": 42}}) + action = job._related_action_attachment() + self.assertEqual(action["type"], "ir.actions.act_window") + self.assertEqual(action["res_model"], "ir.attachment") + self.assertEqual(action["view_mode"], "form") + self.assertEqual(action["res_id"], 42) + + def test_related_action_attachment_no_att_id(self): + """Test _related_action_attachment when att_id is not in kwargs.""" + job = self.env["queue.job"].new({"kwargs": {}}) + action = job._related_action_attachment() + self.assertIsNone(action["res_id"]) diff --git a/spp_import_match/tests/test_res_partner_import_match.py b/spp_import_match/tests/test_res_partner_import_match.py new file mode 100644 index 00000000..4e1f7d82 --- /dev/null +++ b/spp_import_match/tests/test_res_partner_import_match.py @@ -0,0 +1,173 @@ +import logging +import os + +from odoo import _ +from odoo.exceptions import ValidationError +from odoo.tests import TransactionCase + +_logger = logging.getLogger(__name__) + +OPTIONS = { + "import_skip_records": [], + "import_set_empty_fields": [], + "fallback_values": {}, + "name_create_enabled_fields": {}, + "encoding": "ascii", + "separator": ",", + "quoting": '"', + "date_format": "", + "datetime_format": "", + "float_thousand_separator": ",", + "float_decimal_separator": ".", + "advanced": True, + "has_headers": True, + "keep_matches": False, + "limit": 2000, + "sheets": [], + "sheet": "", + "skip": 0, + "tracking_disable": True, +} + + +class TestResPartnerImportMatch(TransactionCase): + @staticmethod + def get_file_path_1(): + return f"{os.path.dirname(os.path.abspath(__file__))}/res_partner_group_name.csv" + + @staticmethod + def get_file_path_2(): + return f"{os.path.dirname(os.path.abspath(__file__))}/res_partner_name.csv" + + @staticmethod + def get_file_path_3(): + return f"{os.path.dirname(os.path.abspath(__file__))}/res_partner_group_async.csv" + + def setUp(self): + super().setUp() + self._test_hh = self.env["res.partner"].create( + { + "name": "Renaud", + "is_registrant": True, + "is_group": True, + "email": "renaud@gmail.com", + } + ) + self._test_applicant = self.env["res.partner"].create( + { + "name": "Rufino Renaud", + "family_name": "Rufino", + "given_name": "Renaud", + "is_group": False, + "is_registrant": True, + "phone": "+639266716911", + "email": "rufinorenaud12@gmail.com", + } + ) + + def _base_import_record(self, res_model, file_path): + with open(file_path, encoding="utf-8") as f: + csv_file = str.encode(f.read(), "utf-8") + csv_file_name = f.name + + base_import = self.env["base_import.import"].create( + { + "res_model": res_model, + "file": csv_file, + "file_name": csv_file_name, + "file_type": "csv", + } + ) + return base_import + + def create_matching_given_family_name(self): + res_partner = self.env["ir.model"].search([("model", "=", "res.partner")]) + vals = {"model_id": res_partner.id, "overwrite_match": True} + import_match = self.env["spp.import.match"].create(vals) + given_name_field = self.env["ir.model.fields"].search( + [("name", "=", "given_name"), ("model_id", "=", res_partner.id)] + ) + + self.env["spp.import.match.fields"].create({"field_id": given_name_field.id, "match_id": import_match.id}) + + family_name_field = self.env["ir.model.fields"].search( + [("name", "=", "family_name"), ("model_id", "=", res_partner.id)] + ) + + self.env["spp.import.match.fields"].create({"field_id": family_name_field.id, "match_id": import_match.id}) + + return import_match + + def create_matching_name(self): + res_partner = self.env["ir.model"].search([("model", "=", "res.partner")]) + vals = {"model_id": res_partner.id, "overwrite_match": True} + import_match = self.env["spp.import.match"].create(vals) + name_field = self.env["ir.model.fields"].search([("name", "=", "name"), ("model_id", "=", res_partner.id)]) + + self.env["spp.import.match.fields"].create({"field_id": name_field.id, "match_id": import_match.id}) + + return import_match + + def test_01_res_partner_change_email_by_name(self): + """Change email based on given_name, family_name.""" + self.create_matching_given_family_name() + file_path = self.get_file_path_2() + record = self._base_import_record("res.partner", file_path) + record.execute_import(["given_name", "family_name", "name", "email"], [], OPTIONS) + + self._test_applicant.env.cache.invalidate() + self.assertEqual(self._test_applicant.email, "rufinorenaud@gmail.com") + + def test_02_res_partner_change_email_by_group_name(self): + """Change email based on name.""" + self.create_matching_name() + file_path = self.get_file_path_1() + record = self._base_import_record("res.partner", file_path) + + record.execute_import(["name", "email"], ["name", "email"], OPTIONS) + self._test_hh.env.cache.invalidate() + self.assertEqual(self._test_hh.email, "renaudhh@gmail.com") + + def test_03_res_partner_group_async(self): + """Trigger Async.""" + file_path = self.get_file_path_3() + record = self._base_import_record("res.partner", file_path) + + async_rec = record.execute_import(["name", "email"], ["name", "email"], OPTIONS) + self._test_hh.env.cache.invalidate() + self.assertEqual(async_rec["async"], True) + + def test_04_res_partner_group_async_dryrun(self): + """Trigger Async.""" + file_path = self.get_file_path_3() + record = self._base_import_record("res.partner", file_path) + + async_rec = record.execute_import(["name", "email"], ["name", "email"], OPTIONS, True) + self._test_hh.env.cache.invalidate() + self.assertTrue(async_rec, "Result should have value") + + def test_05_check_duplication_on_import_match_config(self): + """Check duplication on import match config.""" + import_match = self.create_matching_name() + with self.assertRaisesRegex(ValidationError, _("Field 'Name', already exists!")): + import_match.write( + { + "field_ids": [ + (0, 0, {"field_id": import_match.field_ids[0].field_id.id, "match_id": import_match.id}) + ] + } + ) + import_match.field_ids[0]._onchange_field_id() + + self.assertEqual(len(import_match.field_ids), 2) + + def test_06_test_match_find(self): + """Test match find.""" + import_match = self.create_matching_name() + import_match.field_ids[0].imported_value = "Rufin Renaud" + import_match.field_ids[0].is_conditional = True + result = import_match._match_find( + import_match.model_id, {"name": "Rufino Renaud"}, {"name": "Rufino Renaud", "id": None} + ) + + self.assertEqual(result, import_match.model_id) diff --git a/spp_import_match/views/import_match_view.xml b/spp_import_match/views/import_match_view.xml new file mode 100644 index 00000000..7091abf8 --- /dev/null +++ b/spp_import_match/views/import_match_view.xml @@ -0,0 +1,130 @@ + + + + view_spp_import_match_tree + spp.import.match + + + + + + + + + + view_spp_import_match_form + spp.import.match + +
+ +

+ +

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ + + view_spp_import_match_filter + spp.import.match + + + + + + + + + + + + + + Import Matching + ir.actions.act_window + spp.import.match + list,form + {} + [] + + +

+ Create a new Import Matching! +

+

+ Click the create button to enter the information of the Import Matching. +

+
+
+ + + + list + + + + + + form + + + + + +
diff --git a/spp_key_management/models/key_manager.py b/spp_key_management/models/key_manager.py index d6938941..16ed5741 100644 --- a/spp_key_management/models/key_manager.py +++ b/spp_key_management/models/key_manager.py @@ -29,6 +29,7 @@ _logger = logging.getLogger(__name__) try: + from cryptography.exceptions import InvalidTag from cryptography.hazmat.primitives.ciphers.aead import AESGCM CRYPTOGRAPHY_AVAILABLE = True @@ -288,7 +289,12 @@ def decrypt(self, ciphertext_b64, purpose, key_id, aad=None): aesgcm = AESGCM(key) # Decrypt - plaintext = aesgcm.decrypt(nonce, ciphertext, aad) + try: + plaintext = aesgcm.decrypt(nonce, ciphertext, aad) + except InvalidTag as e: + raise ValueError( + "Decryption failed: authentication tag verification failed (wrong key or corrupted data)" + ) from e return plaintext.decode("utf-8") @api.model diff --git a/spp_key_management/models/key_provider_aws_kms.py b/spp_key_management/models/key_provider_aws_kms.py index dd26cf86..97d88ef2 100644 --- a/spp_key_management/models/key_provider_aws_kms.py +++ b/spp_key_management/models/key_provider_aws_kms.py @@ -20,7 +20,7 @@ import logging import os -from odoo import models +from odoo import _, models from odoo.exceptions import UserError from odoo.tools import config @@ -241,7 +241,7 @@ def get_index_salt(self, purpose): ) encrypted_salt = response["CiphertextBlob"] except ClientError as e: - raise UserError(f"Failed to encrypt salt with AWS KMS: {e}") from e + raise UserError(_("Failed to encrypt salt with AWS KMS: %s") % str(e)) from e EncryptionKey.create( { @@ -269,7 +269,7 @@ def _decrypt_data_key(self, encrypted_key): response = client.decrypt(CiphertextBlob=encrypted_key) return response["Plaintext"] except ClientError as e: - raise UserError(f"Failed to decrypt key with AWS KMS: {e}") from e + raise UserError(_("Failed to decrypt key with AWS KMS: %s") % str(e)) from e def rotate_key(self, key_id): """Request key rotation in AWS KMS. @@ -424,7 +424,7 @@ def create_signing_key(self, key_id, key_type="ECC_NIST_P256"): return key_arn except ClientError as e: - raise UserError(f"Failed to create AWS KMS signing key: {e}") from e + raise UserError(_("Failed to create AWS KMS signing key: %s") % str(e)) from e def _get_signing_key_id(self, key_id): """Get the AWS KMS key ID or alias for a signing key. @@ -573,4 +573,4 @@ def get_public_key_from_kms(self, key_id): response = client.get_public_key(KeyId=kms_key_id) return response["PublicKey"] except ClientError as e: - raise UserError(f"Failed to get public key from AWS KMS: {e}") from e + raise UserError(_("Failed to get public key from AWS KMS: %s") % str(e)) from e diff --git a/spp_key_management/models/key_provider_gcp_kms.py b/spp_key_management/models/key_provider_gcp_kms.py index 6e614ebc..2d206a16 100644 --- a/spp_key_management/models/key_provider_gcp_kms.py +++ b/spp_key_management/models/key_provider_gcp_kms.py @@ -19,7 +19,7 @@ import logging import os -from odoo import models +from odoo import _, models from odoo.exceptions import UserError from odoo.tools import config @@ -295,7 +295,7 @@ def get_index_salt(self, purpose): response = client.encrypt(name=key_path, plaintext=new_salt) encrypted_salt = response.ciphertext except gcp_exceptions.GoogleAPICallError as e: - raise UserError(f"Failed to encrypt salt with GCP KMS: {e}") from e + raise UserError(_("Failed to encrypt salt with GCP KMS: %s") % str(e)) from e EncryptionKey.create( { @@ -476,7 +476,7 @@ def create_signing_key(self, key_id, key_type="EC_SIGN_P256_SHA256"): return crypto_key.primary.name except gcp_exceptions.GoogleAPICallError as e: - raise UserError(f"Failed to create GCP KMS signing key: {e}") from e + raise UserError(_("Failed to create GCP KMS signing key: %s") % str(e)) from e def sign_with_kms(self, key_id, data, algorithm="EC_SIGN_P256_SHA256"): """Sign data using GCP KMS. @@ -596,4 +596,4 @@ def get_public_key_from_kms(self, key_id): return response.pem.encode() except gcp_exceptions.GoogleAPICallError as e: - raise UserError(f"Failed to get public key from GCP KMS: {e}") from e + raise UserError(_("Failed to get public key from GCP KMS: %s") % str(e)) from e diff --git a/spp_key_management/models/key_provider_vault.py b/spp_key_management/models/key_provider_vault.py index 0b4c3c42..75f38985 100644 --- a/spp_key_management/models/key_provider_vault.py +++ b/spp_key_management/models/key_provider_vault.py @@ -21,7 +21,7 @@ import logging import os -from odoo import models +from odoo import _, models from odoo.exceptions import UserError from odoo.tools import config @@ -90,7 +90,7 @@ def _get_vault_client(self): raise UserError(f"Unknown Vault auth method: {auth_method}") if not client.is_authenticated(): - raise UserError("Failed to authenticate with Vault") + raise UserError(_("Failed to authenticate with Vault")) return client @@ -346,7 +346,7 @@ def create_signing_key(self, key_id, key_type="ed25519"): if "already exists" in str(e): _logger.debug("Vault Transit key already exists: %s", key_id) return {"key_id": key_id, "key_type": vault_key_type} - raise UserError(f"Failed to create Vault signing key: {e}") from e + raise UserError(_("Failed to create Vault signing key: %s") % str(e)) from e def sign_with_transit(self, key_id, data, algorithm="ed25519"): """Sign data using Vault Transit engine. diff --git a/spp_oauth/README.rst b/spp_oauth/README.rst new file mode 100644 index 00000000..7d681a1b --- /dev/null +++ b/spp_oauth/README.rst @@ -0,0 +1,167 @@ +================== +OpenSPP API: Oauth +================== + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:25a909cff59dac7bf6fb0a36671aac5a2b03bbd13eda1b5085bde0c2f467c93f + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Alpha-red.png + :target: https://odoo-community.org/page/development-status + :alt: Alpha +.. |badge2| image:: https://img.shields.io/badge/license-LGPL--3-blue.png + :target: http://www.gnu.org/licenses/lgpl-3.0-standalone.html + :alt: License: LGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OpenSPP%2FOpenSPP2-lightgray.png?logo=github + :target: https://github.com/OpenSPP/OpenSPP2/tree/19.0/spp_oauth + :alt: OpenSPP/OpenSPP2 + +|badge1| |badge2| |badge3| + +OAuth 2.0 authentication framework for securing OpenSPP API +communications using JWT tokens signed with RSA keys. Provides utility +functions to generate and verify JWT signatures using the RS256 +algorithm. Stores RSA key pairs as system parameters and exposes +configuration UI for key management. + +Key Capabilities +~~~~~~~~~~~~~~~~ + +- Generate JWT tokens signed with RSA private keys using + ``calculate_signature()`` +- Verify and decode JWT tokens using RSA public keys via + ``verify_and_decode_signature()`` +- Store and retrieve RSA key pairs (4096-bit recommended) through system + parameters +- Configure OAuth keys through Settings UI with password-protected + fields + +Key Models +~~~~~~~~~~ + ++-------------------------+--------------------------------------------+ +| Model | Description | ++=========================+============================================+ +| ``res.config.settings`` | Extended to add OAuth private and public | +| | key fields | ++-------------------------+--------------------------------------------+ + +Utility Functions +~~~~~~~~~~~~~~~~~ + ++-----------------------------------+----------------------------------+ +| Function | Purpose | ++===================================+==================================+ +| ``calculate_signature()`` | Encodes JWT with header and | +| | payload using RS256 | ++-----------------------------------+----------------------------------+ +| ``verify_and_decode_signature()`` | Decodes and verifies JWT token, | +| | returns payload | ++-----------------------------------+----------------------------------+ +| ``OpenSPPOAuthJWTException`` | Custom exception for OAuth JWT | +| | errors with logging | ++-----------------------------------+----------------------------------+ + +Configuration +~~~~~~~~~~~~~ + +After installing: + +1. Navigate to **Settings > General Settings** +2. Scroll to **SPP OAuth Settings** app block +3. Enter RSA private key (4096-bit recommended) in the **Private Key** + field +4. Enter corresponding RSA public key in the **Public Key** field +5. Save settings + +The keys are stored as system parameters: + +- ``spp_oauth.oauth_priv_key`` +- ``spp_oauth.oauth_pub_key`` + +UI Location +~~~~~~~~~~~ + +- **Settings App Block**: SPP OAuth Settings (within Settings > General + Settings) +- **Access**: Available to users with Settings access + +Security +~~~~~~~~ + +=================== ============================= +Group Access +=================== ============================= +``base.group_user`` Read/Write (no create/delete) +=================== ============================= + +Keys are displayed as password fields in the UI but stored as plain text +in ``ir.config_parameter``. + +Extension Points +~~~~~~~~~~~~~~~~ + +- Import ``calculate_signature()`` and ``verify_and_decode_signature()`` + from ``odoo.addons.spp_oauth.tools`` to implement OAuth 2.0 + authentication in custom API endpoints +- Catch ``OpenSPPOAuthJWTException`` for OAuth-specific error handling + in API controllers + +Dependencies +~~~~~~~~~~~~ + +``spp_security``, ``base`` + +**External Python**: ``pyjwt>=2.4.0`` + +.. IMPORTANT:: + This is an alpha version, the data model and design can change at any time without warning. + Only for development or testing purpose, do not use in production. + +**Table of contents** + +.. contents:: + :local: + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* OpenSPP.org + +Maintainers +----------- + +.. |maintainer-jeremi| image:: https://github.com/jeremi.png?size=40px + :target: https://github.com/jeremi + :alt: jeremi +.. |maintainer-gonzalesedwin1123| image:: https://github.com/gonzalesedwin1123.png?size=40px + :target: https://github.com/gonzalesedwin1123 + :alt: gonzalesedwin1123 +.. |maintainer-reichie020212| image:: https://github.com/reichie020212.png?size=40px + :target: https://github.com/reichie020212 + :alt: reichie020212 + +Current maintainers: + +|maintainer-jeremi| |maintainer-gonzalesedwin1123| |maintainer-reichie020212| + +This module is part of the `OpenSPP/OpenSPP2 `_ project on GitHub. + +You are welcome to contribute. \ No newline at end of file diff --git a/spp_oauth/__init__.py b/spp_oauth/__init__.py new file mode 100644 index 00000000..0650744f --- /dev/null +++ b/spp_oauth/__init__.py @@ -0,0 +1 @@ +from . import models diff --git a/spp_oauth/__manifest__.py b/spp_oauth/__manifest__.py new file mode 100644 index 00000000..2468bbe2 --- /dev/null +++ b/spp_oauth/__manifest__.py @@ -0,0 +1,25 @@ +# pylint: disable=pointless-statement +{ + "name": "OpenSPP API: Oauth", + "summary": "The module establishes an OAuth 2.0 authentication framework, securing OpenSPP API communication for integrated systems and applications.", + "category": "OpenSPP", + "version": "19.0.1.3.1", + "author": "OpenSPP.org", + "development_status": "Alpha", + "maintainers": ["jeremi", "gonzalesedwin1123", "reichie020212"], + "external_dependencies": {"python": ["pyjwt>=2.4.0"]}, + "website": "https://github.com/OpenSPP/OpenSPP2", + "license": "LGPL-3", + "depends": [ + "spp_security", + "base", + ], + "data": [ + "security/ir.model.access.csv", + "data/ir_config_parameter_data.xml", + "views/res_config_view.xml", + ], + "application": False, + "auto_install": False, + "installable": True, +} diff --git a/spp_oauth/data/ir_config_parameter_data.xml b/spp_oauth/data/ir_config_parameter_data.xml new file mode 100644 index 00000000..24d8b929 --- /dev/null +++ b/spp_oauth/data/ir_config_parameter_data.xml @@ -0,0 +1,11 @@ + + + + spp_oauth.oauth_priv_key + YourPrivateKeyHere + + + spp_oauth.oauth_pub_key + YourPublicKeyHere + + diff --git a/spp_oauth/i18n/lo.po b/spp_oauth/i18n/lo.po new file mode 100644 index 00000000..92df6a36 --- /dev/null +++ b/spp_oauth/i18n/lo.po @@ -0,0 +1,14 @@ +# Translation of Odoo Server. +# This file contains the translation of the following modules: +# +msgid "" +msgstr "" +"Project-Id-Version: Odoo Server 17.0\n" +"Report-Msgid-Bugs-To: \n" +"Last-Translator: Automatically generated\n" +"Language-Team: none\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: \n" +"Language: lo\n" diff --git a/spp_oauth/i18n/spp_oauth.pot b/spp_oauth/i18n/spp_oauth.pot new file mode 100644 index 00000000..d69eb7e7 --- /dev/null +++ b/spp_oauth/i18n/spp_oauth.pot @@ -0,0 +1,49 @@ +# Translation of Odoo Server. +# This file contains the translation of the following modules: +# * spp_oauth +# +msgid "" +msgstr "" +"Project-Id-Version: Odoo Server 17.0\n" +"Report-Msgid-Bugs-To: \n" +"Last-Translator: \n" +"Language-Team: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: \n" +"Plural-Forms: \n" + +#. module: spp_oauth +#: model:ir.model,name:spp_oauth.model_res_config_settings +msgid "Config Settings" +msgstr "" + +#. module: spp_oauth +#: model:ir.model.fields,field_description:spp_oauth.field_res_config_settings__oauth_priv_key +msgid "OAuth Private Key" +msgstr "" + +#. module: spp_oauth +#: model:ir.model.fields,field_description:spp_oauth.field_res_config_settings__oauth_pub_key +msgid "OAuth Public Key" +msgstr "" + +#. module: spp_oauth +#: model_terms:ir.ui.view,arch_db:spp_oauth.spp_oauth_config_view +msgid "OAuth Settings (4096 bits RSA keys)" +msgstr "" + +#. module: spp_oauth +#: model_terms:ir.ui.view,arch_db:spp_oauth.spp_oauth_config_view +msgid "Private Key" +msgstr "" + +#. module: spp_oauth +#: model_terms:ir.ui.view,arch_db:spp_oauth.spp_oauth_config_view +msgid "Public Key" +msgstr "" + +#. module: spp_oauth +#: model_terms:ir.ui.view,arch_db:spp_oauth.spp_oauth_config_view +msgid "SPP OAuth Settings" +msgstr "" diff --git a/spp_oauth/models/__init__.py b/spp_oauth/models/__init__.py new file mode 100644 index 00000000..0deb68c4 --- /dev/null +++ b/spp_oauth/models/__init__.py @@ -0,0 +1 @@ +from . import res_config_settings diff --git a/spp_oauth/models/res_config_settings.py b/spp_oauth/models/res_config_settings.py new file mode 100644 index 00000000..c0ee50f9 --- /dev/null +++ b/spp_oauth/models/res_config_settings.py @@ -0,0 +1,14 @@ +from odoo import fields, models + + +class RegistryConfig(models.TransientModel): + _inherit = "res.config.settings" + + oauth_priv_key = fields.Char( + string="OAuth Private Key", + config_parameter="spp_oauth.oauth_priv_key", + ) + oauth_pub_key = fields.Char( + string="OAuth Public Key", + config_parameter="spp_oauth.oauth_pub_key", + ) diff --git a/spp_oauth/pyproject.toml b/spp_oauth/pyproject.toml new file mode 100644 index 00000000..4231d0cc --- /dev/null +++ b/spp_oauth/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/spp_oauth/readme/DESCRIPTION.md b/spp_oauth/readme/DESCRIPTION.md new file mode 100644 index 00000000..41239387 --- /dev/null +++ b/spp_oauth/readme/DESCRIPTION.md @@ -0,0 +1,60 @@ +OAuth 2.0 authentication framework for securing OpenSPP API communications using JWT tokens signed with RSA keys. Provides utility functions to generate and verify JWT signatures using the RS256 algorithm. Stores RSA key pairs as system parameters and exposes configuration UI for key management. + +### Key Capabilities + +- Generate JWT tokens signed with RSA private keys using `calculate_signature()` +- Verify and decode JWT tokens using RSA public keys via `verify_and_decode_signature()` +- Store and retrieve RSA key pairs (4096-bit recommended) through system parameters +- Configure OAuth keys through Settings UI with password-protected fields + +### Key Models + +| Model | Description | +| --------------------- | ------------------------------------------------------- | +| `res.config.settings` | Extended to add OAuth private and public key fields | + +### Utility Functions + +| Function | Purpose | +| ------------------------------- | ---------------------------------------------------- | +| `calculate_signature()` | Encodes JWT with header and payload using RS256 | +| `verify_and_decode_signature()` | Decodes and verifies JWT token, returns payload | +| `OpenSPPOAuthJWTException` | Custom exception for OAuth JWT errors with logging | + +### Configuration + +After installing: + +1. Navigate to **Settings > General Settings** +2. Scroll to **SPP OAuth Settings** app block +3. Enter RSA private key (4096-bit recommended) in the **Private Key** field +4. Enter corresponding RSA public key in the **Public Key** field +5. Save settings + +The keys are stored as system parameters: +- `spp_oauth.oauth_priv_key` +- `spp_oauth.oauth_pub_key` + +### UI Location + +- **Settings App Block**: SPP OAuth Settings (within Settings > General Settings) +- **Access**: Available to users with Settings access + +### Security + +| Group | Access | +| ------------------ | -------------------------------------- | +| `base.group_user` | Read/Write (no create/delete) | + +Keys are displayed as password fields in the UI but stored as plain text in `ir.config_parameter`. + +### Extension Points + +- Import `calculate_signature()` and `verify_and_decode_signature()` from `odoo.addons.spp_oauth.tools` to implement OAuth 2.0 authentication in custom API endpoints +- Catch `OpenSPPOAuthJWTException` for OAuth-specific error handling in API controllers + +### Dependencies + +`spp_security`, `base` + +**External Python**: `pyjwt>=2.4.0` diff --git a/spp_oauth/security/ir.model.access.csv b/spp_oauth/security/ir.model.access.csv new file mode 100644 index 00000000..fb353758 --- /dev/null +++ b/spp_oauth/security/ir.model.access.csv @@ -0,0 +1,2 @@ +id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink +access_res_config_settings_spp_oauth_user,res.config.settings spp_oauth user,base.model_res_config_settings,base.group_user,1,1,0,0 diff --git a/spp_oauth/static/description/icon.png b/spp_oauth/static/description/icon.png new file mode 100644 index 00000000..c7dbdaaf Binary files /dev/null and b/spp_oauth/static/description/icon.png differ diff --git a/spp_oauth/static/description/index.html b/spp_oauth/static/description/index.html new file mode 100644 index 00000000..372b609b --- /dev/null +++ b/spp_oauth/static/description/index.html @@ -0,0 +1,542 @@ + + + + + +OpenSPP API: Oauth + + + +
+

OpenSPP API: Oauth

+ + +

Alpha License: LGPL-3 OpenSPP/OpenSPP2

+

OAuth 2.0 authentication framework for securing OpenSPP API +communications using JWT tokens signed with RSA keys. Provides utility +functions to generate and verify JWT signatures using the RS256 +algorithm. Stores RSA key pairs as system parameters and exposes +configuration UI for key management.

+
+

Key Capabilities

+
    +
  • Generate JWT tokens signed with RSA private keys using +calculate_signature()
  • +
  • Verify and decode JWT tokens using RSA public keys via +verify_and_decode_signature()
  • +
  • Store and retrieve RSA key pairs (4096-bit recommended) through system +parameters
  • +
  • Configure OAuth keys through Settings UI with password-protected +fields
  • +
+
+
+

Key Models

+ ++++ + + + + + + + + + + +
ModelDescription
res.config.settingsExtended to add OAuth private and public +key fields
+
+
+

Utility Functions

+ ++++ + + + + + + + + + + + + + + + + +
FunctionPurpose
calculate_signature()Encodes JWT with header and +payload using RS256
verify_and_decode_signature()Decodes and verifies JWT token, +returns payload
OpenSPPOAuthJWTExceptionCustom exception for OAuth JWT +errors with logging
+
+
+

Configuration

+

After installing:

+
    +
  1. Navigate to Settings > General Settings
  2. +
  3. Scroll to SPP OAuth Settings app block
  4. +
  5. Enter RSA private key (4096-bit recommended) in the Private Key +field
  6. +
  7. Enter corresponding RSA public key in the Public Key field
  8. +
  9. Save settings
  10. +
+

The keys are stored as system parameters:

+
    +
  • spp_oauth.oauth_priv_key
  • +
  • spp_oauth.oauth_pub_key
  • +
+
+
+

UI Location

+
    +
  • Settings App Block: SPP OAuth Settings (within Settings > General +Settings)
  • +
  • Access: Available to users with Settings access
  • +
+
+
+

Security

+ ++++ + + + + + + + + + + +
GroupAccess
base.group_userRead/Write (no create/delete)
+

Keys are displayed as password fields in the UI but stored as plain text +in ir.config_parameter.

+
+
+

Extension Points

+
    +
  • Import calculate_signature() and verify_and_decode_signature() +from odoo.addons.spp_oauth.tools to implement OAuth 2.0 +authentication in custom API endpoints
  • +
  • Catch OpenSPPOAuthJWTException for OAuth-specific error handling +in API controllers
  • +
+
+
+

Dependencies

+

spp_security, base

+

External Python: pyjwt>=2.4.0

+
+

Important

+

This is an alpha version, the data model and design can change at any time without warning. +Only for development or testing purpose, do not use in production.

+
+

Table of contents

+ +
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • OpenSPP.org
  • +
+
+
+

Maintainers

+

Current maintainers:

+

jeremi gonzalesedwin1123 reichie020212

+

This module is part of the OpenSPP/OpenSPP2 project on GitHub.

+

You are welcome to contribute.

+
+
+
+
+ + diff --git a/spp_oauth/tests/__init__.py b/spp_oauth/tests/__init__.py new file mode 100644 index 00000000..89149670 --- /dev/null +++ b/spp_oauth/tests/__init__.py @@ -0,0 +1,2 @@ +from . import test_rsa_encode_decode +from . import test_oauth_errors diff --git a/spp_oauth/tests/common.py b/spp_oauth/tests/common.py new file mode 100644 index 00000000..9e012298 --- /dev/null +++ b/spp_oauth/tests/common.py @@ -0,0 +1,37 @@ +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + +from odoo.tests import TransactionCase + +MOCK_PRIVATE_KEY = "any_private_key" + + +class Common(TransactionCase): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.env["ir.config_parameter"].sudo().set_param("spp_oauth.oauth_priv_key", None) + cls.env["ir.config_parameter"].sudo().set_param("spp_oauth.oauth_pub_key", None) + + def set_parameters(self): + # Generate test RSA keys + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend()) + public_key = private_key.public_key() + + self.env["ir.config_parameter"].sudo().set_param( + "spp_oauth.oauth_priv_key", + private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ).decode("utf-8"), + ) + + self.env["ir.config_parameter"].sudo().set_param( + "spp_oauth.oauth_pub_key", + public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ).decode("utf-8"), + ) diff --git a/spp_oauth/tests/test_oauth_errors.py b/spp_oauth/tests/test_oauth_errors.py new file mode 100644 index 00000000..3321253f --- /dev/null +++ b/spp_oauth/tests/test_oauth_errors.py @@ -0,0 +1,72 @@ +# Part of OpenSPP. See LICENSE file for full copyright and licensing details. + +import uuid + +from ..tools.oauth_exception import OpenSPPOAuthJWTException +from ..tools.rsa_encode_decode import ( + calculate_signature, + get_private_key, + get_public_key, + verify_and_decode_signature, +) +from .common import Common + + +class TestOAuthErrors(Common): + """Test error handling in OAuth JWT operations.""" + + def test_get_private_key_not_configured(self): + """Test that missing private key raises exception.""" + # Keys are cleared in setUpClass, so no set_parameters() call + with self.assertRaises(OpenSPPOAuthJWTException): + get_private_key(self.env) + + def test_get_public_key_not_configured(self): + """Test that missing public key raises exception.""" + with self.assertRaises(OpenSPPOAuthJWTException): + get_public_key(self.env) + + def test_verify_invalid_token(self): + """Test that an invalid JWT token raises exception.""" + self.set_parameters() + with self.assertRaises(OpenSPPOAuthJWTException): + verify_and_decode_signature( + env=self.env, + access_token="invalid.jwt.token", + ) + + def test_verify_tampered_token(self): + """Test that a tampered JWT token raises exception.""" + self.set_parameters() + token = calculate_signature( + env=self.env, + header=None, + payload={"data": "original"}, + ) + # Tamper with the token by modifying a character + tampered = token[:-5] + "XXXXX" + with self.assertRaises(OpenSPPOAuthJWTException): + verify_and_decode_signature( + env=self.env, + access_token=tampered, + ) + + def test_exception_message(self): + """Test that OpenSPPOAuthJWTException preserves message.""" + exc = OpenSPPOAuthJWTException("test error message") + self.assertEqual(str(exc), "test error message") + + def test_calculate_signature_with_header(self): + """Test calculate_signature with explicit header dict.""" + self.set_parameters() + token = calculate_signature( + env=self.env, + header={"alg": "RS256", "typ": "JWT"}, + payload={"test": str(uuid.uuid4())}, + ) + self.assertIsNotNone(token) + decoded = verify_and_decode_signature( + env=self.env, + access_token=token, + ) + self.assertIn("test", decoded) diff --git a/spp_oauth/tests/test_rsa_encode_decode.py b/spp_oauth/tests/test_rsa_encode_decode.py new file mode 100644 index 00000000..de4b6dfc --- /dev/null +++ b/spp_oauth/tests/test_rsa_encode_decode.py @@ -0,0 +1,64 @@ +import uuid + +from ..tools.rsa_encode_decode import calculate_signature +from .common import Common + + +class TestRSA(Common): + def test_01_get_private_key(self): + self.set_parameters() + + from ..tools.rsa_encode_decode import get_private_key + + private_key = get_private_key(self.env) + self.assertTrue(private_key is not None) + + def test_02_get_public_key(self): + self.set_parameters() + + from ..tools.rsa_encode_decode import get_public_key + + public_key = get_public_key(self.env) + self.assertTrue(public_key is not None) + + def test_03_calculate_signature(self): + self.set_parameters() + + from ..tools.rsa_encode_decode import calculate_signature + + openapi_token = str(uuid.uuid4()) + + token = calculate_signature( + env=self.env, + header=None, + payload={ + "database": self.env.cr.dbname, + "token": openapi_token, + }, + ) + self.assertTrue(token is not None) + + def test_04_verify_and_decode_signature(self): + self.set_parameters() + + from ..tools.rsa_encode_decode import verify_and_decode_signature + + openapi_token = str(uuid.uuid4()) + + token = calculate_signature( + env=self.env, + header=None, + payload={ + "database": self.env.cr.dbname, + "token": openapi_token, + }, + ) + self.assertTrue(token is not None) + + decoded = verify_and_decode_signature( + env=self.env, + access_token=token, + ) + self.assertTrue(decoded is not None) + self.assertEqual(decoded.get("database"), self.env.cr.dbname) + self.assertEqual(decoded.get("token"), openapi_token) diff --git a/spp_oauth/tools/__init__.py b/spp_oauth/tools/__init__.py new file mode 100644 index 00000000..e549289a --- /dev/null +++ b/spp_oauth/tools/__init__.py @@ -0,0 +1,3 @@ +from .rsa_encode_decode import calculate_signature +from .rsa_encode_decode import verify_and_decode_signature +from .oauth_exception import OpenSPPOAuthJWTException diff --git a/spp_oauth/tools/oauth_exception.py b/spp_oauth/tools/oauth_exception.py new file mode 100644 index 00000000..f17bcb17 --- /dev/null +++ b/spp_oauth/tools/oauth_exception.py @@ -0,0 +1,9 @@ +import logging + +_logger = logging.getLogger(__name__) + + +class OpenSPPOAuthJWTException(Exception): + def __init__(self, message): + super().__init__(message) + _logger.error("OAuth JWT error: %s", message) diff --git a/spp_oauth/tools/private_key.pem b/spp_oauth/tools/private_key.pem new file mode 100644 index 00000000..e69de29b diff --git a/spp_oauth/tools/public_key.pub b/spp_oauth/tools/public_key.pub new file mode 100644 index 00000000..e69de29b diff --git a/spp_oauth/tools/rsa_encode_decode.py b/spp_oauth/tools/rsa_encode_decode.py new file mode 100644 index 00000000..25dbd70d --- /dev/null +++ b/spp_oauth/tools/rsa_encode_decode.py @@ -0,0 +1,65 @@ +import jwt + +from .oauth_exception import OpenSPPOAuthJWTException + +JWT_ALGORITHM = "RS256" + + +def get_private_key(env): + """ + Retrieves the OAuth private key from Odoo's system parameters. + + :param env: The Odoo environment. + :return: The private key as a string. + :raises OpenSPPOAuthJWTException: If the private key is not configured. + """ + # nosemgrep: odoo-sudo-without-context - system parameter access requires sudo + priv_key = env["ir.config_parameter"].sudo().get_param("spp_oauth.oauth_priv_key") + if not priv_key: + raise OpenSPPOAuthJWTException("OAuth private key not configured in settings.") + return priv_key + + +def get_public_key(env): + """ + Retrieves the OAuth public key from Odoo's system parameters. + + :param env: The Odoo environment. + :return: The public key as a string. + :raises OpenSPPOAuthJWTException: If the public key is not configured. + """ + # nosemgrep: odoo-sudo-without-context - system parameter access requires sudo + pub_key = env["ir.config_parameter"].sudo().get_param("spp_oauth.oauth_pub_key") + if not pub_key: + raise OpenSPPOAuthJWTException("OAuth public key not configured in settings.") + return pub_key + + +def calculate_signature(env, header, payload): + """ + Calculates a JWT signature. + + :param env: The Odoo environment. + :param header: The JWT header. + :param payload: The JWT payload. + :return: The encoded JWT. + """ + + privkey = get_private_key(env) + return jwt.encode(headers=header, payload=payload, key=privkey, algorithm=JWT_ALGORITHM) + + +def verify_and_decode_signature(env, access_token): + """ + Verifies and decodes a JWT access token. + + :param env: The Odoo environment. + :param access_token: The JWT to verify and decode. + :return: The decoded payload. + :raises OpenSPPOAuthJWTException: If verification fails or for any other JWT error. + """ + pubkey = get_public_key(env) + try: + return jwt.decode(access_token, key=pubkey, algorithms=[JWT_ALGORITHM]) + except Exception as e: + raise OpenSPPOAuthJWTException(str(e)) from e diff --git a/spp_oauth/views/res_config_view.xml b/spp_oauth/views/res_config_view.xml new file mode 100644 index 00000000..cdbda30b --- /dev/null +++ b/spp_oauth/views/res_config_view.xml @@ -0,0 +1,27 @@ + + + + spp_oauth_config_view + res.config.settings + + + + + + + + + + + + + + + + + +