|
| 1 | +# Copyright 2024 Hunki Enterprises BV |
| 2 | +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl-3.0) |
| 3 | + |
| 4 | +from psycopg2.sql import SQL, Identifier |
| 5 | + |
| 6 | +from odoo import fields, models |
| 7 | + |
| 8 | + |
| 9 | +class SqlExport(models.Model): |
| 10 | + _inherit = "sql.export" |
| 11 | + |
| 12 | + export_delta = fields.Boolean( |
| 13 | + string="Delta", |
| 14 | + help="With this checked, the full result of the query " |
| 15 | + "will be stored as table in the database, but the file generated will " |
| 16 | + "only contain rows not existing in the n-1st export", |
| 17 | + ) |
| 18 | + |
| 19 | + def write(self, vals): |
| 20 | + """Delete previous results when we change the query""" |
| 21 | + if "query" in vals: |
| 22 | + for this in self: |
| 23 | + this._export_delta_cleanup(keep_last=False) |
| 24 | + return super().write(vals) |
| 25 | + |
| 26 | + def _execute_sql_request( |
| 27 | + self, |
| 28 | + params=None, |
| 29 | + mode="fetchall", |
| 30 | + rollback=True, |
| 31 | + view_name=False, |
| 32 | + copy_options="CSV HEADER DELIMITER ';'", |
| 33 | + header=False, |
| 34 | + ): |
| 35 | + delta_id = self.env.context.get("export_delta_id") |
| 36 | + |
| 37 | + if delta_id: |
| 38 | + original_query = self.env.cr.mogrify(self.query, params).decode("utf-8") |
| 39 | + result_table = self._export_delta_table_name(delta_id) |
| 40 | + table_query = SQL( |
| 41 | + "WITH result as ({0}) SELECT * INTO TABLE {1} FROM result" |
| 42 | + ).format(SQL(original_query), Identifier(result_table)) |
| 43 | + previous_result_table = self._export_delta_existing_tables()[-1:] |
| 44 | + if previous_result_table: |
| 45 | + result_query = SQL("SELECT * FROM {0} EXCEPT SELECT * FROM {1}").format( |
| 46 | + Identifier(result_table), |
| 47 | + Identifier(previous_result_table[0]), |
| 48 | + ) |
| 49 | + else: |
| 50 | + result_query = SQL("SELECT * FROM {0}").format(Identifier(result_table)) |
| 51 | + self.env.cr.execute(table_query) |
| 52 | + # inject new query in cache for super to use |
| 53 | + self._cache["query"] = result_query |
| 54 | + result = super()._execute_sql_request( |
| 55 | + params=None, |
| 56 | + mode=mode, |
| 57 | + rollback=rollback, |
| 58 | + view_name=view_name, |
| 59 | + copy_options=copy_options, |
| 60 | + header=header, |
| 61 | + ) |
| 62 | + self.invalidate_recordset(["query"]) |
| 63 | + self._export_delta_cleanup(keep_last=True) |
| 64 | + else: |
| 65 | + result = super()._execute_sql_request( |
| 66 | + params=params, |
| 67 | + mode=mode, |
| 68 | + rollback=rollback, |
| 69 | + view_name=view_name, |
| 70 | + copy_options=copy_options, |
| 71 | + header=header, |
| 72 | + ) |
| 73 | + |
| 74 | + return result |
| 75 | + |
| 76 | + def _export_delta_table_name(self, identifier): |
| 77 | + """ |
| 78 | + Return the name of a table to store data for delta export, must end with |
| 79 | + {identifier} |
| 80 | + """ |
| 81 | + return f"sql_export_delta_{self.id}_{identifier}" |
| 82 | + |
| 83 | + def _export_delta_existing_tables(self): |
| 84 | + """Return all table names used for storing data for delta export""" |
| 85 | + self.env.cr.execute( |
| 86 | + "SELECT table_name FROM information_schema.tables WHERE table_name LIKE %s", |
| 87 | + (self._export_delta_table_name("%"),), |
| 88 | + ) |
| 89 | + return sorted( |
| 90 | + [name for name, in self.env.cr.fetchall()], |
| 91 | + key=lambda name: int(name[len(self._export_delta_table_name("")) :]), |
| 92 | + ) |
| 93 | + |
| 94 | + def _export_delta_cleanup(self, keep_last=True): |
| 95 | + """Delete tables storing data for delta export""" |
| 96 | + table_names = self._export_delta_existing_tables()[: -1 if keep_last else None] |
| 97 | + for table_name in table_names: |
| 98 | + self.env.cr.execute(SQL("DROP TABLE {0}").format(Identifier(table_name))) |
0 commit comments