Browse Source

[MIG] account_export_csv: Migration to 14.0

pull/756/head
Kévin Roche 4 years ago
parent
commit
5ba61cb127
  1. 4
      account_export_csv/__manifest__.py
  2. 1
      account_export_csv/security/.~lock.ir.model.access.csv#
  3. 2
      account_export_csv/security/ir.model.access.csv
  4. 2
      account_export_csv/tests/test_account_export_csv.py
  5. 22
      account_export_csv/wizard/account_export_csv.py

4
account_export_csv/__manifest__.py

@ -1,11 +1,12 @@
# Copyright 2013 Camptocamp SA # Copyright 2013 Camptocamp SA
# Copyright 2017 ACSONE SA/NV # Copyright 2017 ACSONE SA/NV
# Copyright 2017 Akretion
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl). # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{ {
"name": "Account Export CSV", "name": "Account Export CSV",
"summary": "Adds accounting CSV export", "summary": "Adds accounting CSV export",
"version": "12.0.1.2.0",
"version": "14.0.0.0.1",
"depends": [ "depends": [
"account", "account",
"date_range", "date_range",
@ -14,6 +15,7 @@
"website": "https://github.com/OCA/account-financial-reporting", "website": "https://github.com/OCA/account-financial-reporting",
"license": "AGPL-3", "license": "AGPL-3",
"data": [ "data": [
"security/ir.model.access.csv",
"wizard/account_export_csv_view.xml", "wizard/account_export_csv_view.xml",
], ],
"installable": True, "installable": True,

1
account_export_csv/security/.~lock.ir.model.access.csv#

@ -0,0 +1 @@
Kévin Roche,kevin,kevin-Ubuntu-7B16,12.02.2021 22:22,file:///home/kevin/.config/libreoffice/4;

2
account_export_csv/security/ir.model.access.csv

@ -0,0 +1,2 @@
id,name,model_id:id,group_id:id,perm_read,perm_write,perm_create,perm_unlink
access_account_csv_export,access_account_csv_export,model_account_csv_export,base.group_user,1,1,1,1

2
account_export_csv/tests/test_account_export_csv.py

@ -45,7 +45,7 @@ class TestAccountExportCsv(TransactionCase):
} }
) )
report_wizard.action_manual_export_journal_entries() report_wizard.action_manual_export_journal_entries()
res = base64.decodestring(report_wizard.data)
res = base64.decodebytes(report_wizard.data)
line_number = self.env["account.move.line"].search_count([]) line_number = self.env["account.move.line"].search_count([])
# check the number of lines in file: include header + EOF line # check the number of lines in file: include header + EOF line
self.assertEqual(len(res.decode().split("\r\n")), line_number + 2) self.assertEqual(len(res.decode().split("\r\n")), line_number + 2)

22
account_export_csv/wizard/account_export_csv.py

@ -106,7 +106,7 @@ class AccountCSVExport(models.TransientModel):
writer = AccountingWriter(file_data) writer = AccountingWriter(file_data)
writer.writerows(rows) writer.writerows(rows)
file_value = file_data.getvalue() file_value = file_data.getvalue()
self.write({"data": base64.encodestring(file_value)})
self.write({"data": base64.encodebytes(file_value)})
finally: finally:
file_data.close() file_data.close()
return { return {
@ -162,7 +162,7 @@ class AccountCSVExport(models.TransientModel):
writer = AccountingWriter(file_data) writer = AccountingWriter(file_data)
writer.writerows(rows) writer.writerows(rows)
file_value = file_data.getvalue() file_value = file_data.getvalue()
self.write({"data": base64.encodestring(file_value)})
self.write({"data": base64.encodebytes(file_value)})
finally: finally:
file_data.close() file_data.close()
return { return {
@ -223,17 +223,14 @@ class AccountCSVExport(models.TransientModel):
Memory Memory
We also write the data to the wizard with SQL query as write seems We also write the data to the wizard with SQL query as write seems
to use too much memory as well. to use too much memory as well.
Those improvements permitted to improve the export from a 100k line to Those improvements permitted to improve the export from a 100k line to
200k lines 200k lines
with default `limit_memory_hard = 805306368` (768MB) with more lines, with default `limit_memory_hard = 805306368` (768MB) with more lines,
you might encounter a MemoryError when trying to download the file even you might encounter a MemoryError when trying to download the file even
if it has been generated. if it has been generated.
To be able to export bigger volume of data, it is advised to set To be able to export bigger volume of data, it is advised to set
limit_memory_hard to 2097152000 (2 GB) to generate the file and let limit_memory_hard to 2097152000 (2 GB) to generate the file and let
Odoo load it in the wizard when trying to download it. Odoo load it in the wizard when trying to download it.
Tested with up to a generation of 700k entry lines Tested with up to a generation of 700k entry lines
""" """
self.ensure_one() self.ensure_one()
@ -245,13 +242,14 @@ class AccountCSVExport(models.TransientModel):
file_data.seek(0) file_data.seek(0)
base64.encode(file_data, base64_data) base64.encode(file_data, base64_data)
base64_data.seek(0) base64_data.seek(0)
self.env.cr.execute(
"""
UPDATE account_csv_export
SET data = %s
WHERE id = %s""",
(base64_data.read(), self.id),
)
self.write({"data": base64_data.read()})
# self.env.cr.execute(
# """
# UPDATE account_csv_export
# SET data = %s
# WHERE id = %s""",
# (base64_data.read(), self.id),
# )
return { return {
"type": "ir.actions.act_window", "type": "ir.actions.act_window",
"res_model": "account.csv.export", "res_model": "account.csv.export",

Loading…
Cancel
Save