You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

354 lines
13 KiB

  1. # Copyright 2013 Camptocamp SA
  2. # Copyright 2017 ACSONE SA/NV
  3. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
  4. import itertools
  5. import tempfile
  6. from io import StringIO, BytesIO
  7. import base64
  8. import csv
  9. import codecs
  10. from odoo import api, fields, models, _
  11. class AccountingWriter(object):
  12. """
  13. A CSV writer which will write rows to CSV file "f",
  14. which is encoded in the given encoding.
  15. """
  16. def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
  17. # Redirect output to a queue
  18. self.queue = StringIO()
  19. # created a writer with Excel formating settings
  20. self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
  21. self.stream = f
  22. self.encoder = codecs.getincrementalencoder(encoding)()
  23. def writerow(self, row):
  24. # we ensure that we do not try to encode none or bool
  25. row = (x or '' for x in row)
  26. self.writer.writerow(row)
  27. # Fetch UTF-8 output from the queue ...
  28. data = self.queue.getvalue()
  29. # ... and reencode it into the target encoding as BytesIO
  30. data = self.encoder.encode(data)
  31. # write to the target stream
  32. self.stream.write(data)
  33. # seek() or truncate() have side effect if not used combinated
  34. self.queue.truncate(0)
  35. self.queue.seek(0)
  36. # https://stackoverflow.com/questions/4330812/how-do-i-clear-a-stringio-object
  37. # It fails when you use `self.queue = StringIO()` only add one line
  38. def writerows(self, rows):
  39. for row in rows:
  40. self.writerow(row)
  41. # https://docs.python.org/3/library/io.html#io.IOBase.close
  42. self.queue.close()
  43. class AccountCSVExport(models.TransientModel):
  44. _name = 'account.csv.export'
  45. _description = 'Export Accounting'
  46. data = fields.Binary('CSV', readonly=True)
  47. company_id = fields.Many2one(
  48. comodel_name='res.company', string='Company', invisible=True,
  49. default=lambda self: self._get_company_default())
  50. date_start = fields.Date(required=True)
  51. date_end = fields.Date(required=True)
  52. date_range_id = fields.Many2one(
  53. comodel_name='date.range', string='Date range')
  54. journal_ids = fields.Many2many(
  55. comodel_name='account.journal', string='Journals',
  56. default=lambda s: s._get_journal_default(),
  57. help='If empty, use all journals, only used for journal entries')
  58. export_filename = fields.Char(
  59. string='Export CSV Filename', size=128, default='account_export.csv')
  60. @api.model
  61. def _get_journal_default(self):
  62. """ Implements your own default """
  63. return False
  64. @api.model
  65. def _get_company_default(self):
  66. return self.env.user.company_id
  67. @api.onchange('date_range_id')
  68. def _onchange_date_range(self):
  69. if self.date_range_id:
  70. self.date_start = self.date_range_id.date_start
  71. self.date_end = self.date_range_id.date_end
  72. @api.onchange('date_start', 'date_end')
  73. def _onchange_dates(self):
  74. if self.date_range_id:
  75. if self.date_start != self.date_range_id.date_start or \
  76. self.date_end != self.date_range_id.date_end:
  77. self.date_range_id = False
  78. def action_manual_export_account(self):
  79. self.ensure_one()
  80. rows = self._get_data("account")
  81. file_data = BytesIO()
  82. try:
  83. writer = AccountingWriter(file_data)
  84. writer.writerows(rows)
  85. file_value = file_data.getvalue()
  86. self.write({'data': base64.encodestring(file_value)})
  87. finally:
  88. file_data.close()
  89. return {
  90. 'type': 'ir.actions.act_window',
  91. 'res_model': 'account.csv.export',
  92. 'view_mode': 'form',
  93. 'res_id': self.id,
  94. 'views': [(False, 'form')],
  95. 'target': 'new',
  96. }
  97. def _get_header_account(self):
  98. return [
  99. _('CODE'),
  100. _('NAME'),
  101. _('DEBIT'),
  102. _('CREDIT'),
  103. _('BALANCE'),
  104. ]
  105. def _get_rows_account(self, journal_ids):
  106. """
  107. Return list to generate rows of the CSV file
  108. """
  109. self.ensure_one()
  110. self.env.cr.execute("""
  111. select ac.code,ac.name,
  112. sum(debit) as sum_debit,
  113. sum(credit) as sum_credit,
  114. sum(debit) - sum(credit) as balance
  115. from account_move_line as aml,account_account as ac
  116. where aml.account_id = ac.id
  117. AND aml.date >= %(date_start)s
  118. AND aml.date <= %(date_end)s
  119. group by ac.id,ac.code,ac.name
  120. order by ac.code
  121. """, {'date_start': self.date_start,
  122. 'date_end': self.date_end})
  123. res = self.env.cr.fetchall()
  124. rows = []
  125. for line in res:
  126. rows.append(list(line))
  127. return rows
  128. def action_manual_export_analytic(self):
  129. self.ensure_one()
  130. rows = self._get_data("analytic")
  131. file_data = BytesIO()
  132. try:
  133. writer = AccountingWriter(file_data)
  134. writer.writerows(rows)
  135. file_value = file_data.getvalue()
  136. self.write({'data': base64.encodestring(file_value)})
  137. finally:
  138. file_data.close()
  139. return {
  140. 'type': 'ir.actions.act_window',
  141. 'res_model': 'account.csv.export',
  142. 'view_mode': 'form',
  143. 'view_type': 'form',
  144. 'res_id': self.id,
  145. 'views': [(False, 'form')],
  146. 'target': 'new',
  147. }
  148. def _get_header_analytic(self):
  149. return [
  150. _('ANALYTIC CODE'),
  151. _('ANALYTIC NAME'),
  152. _('CODE'),
  153. _('ACCOUNT NAME'),
  154. _('DEBIT'),
  155. _('CREDIT'),
  156. _('BALANCE'),
  157. ]
  158. def _get_rows_analytic(self, journal_ids):
  159. """
  160. Return list to generate rows of the CSV file
  161. """
  162. self.ensure_one()
  163. self.env.cr.execute(""" select aac.code as analytic_code,
  164. aac.name as analytic_name,
  165. ac.code,ac.name,
  166. sum(debit) as sum_debit,
  167. sum(credit) as sum_credit,
  168. sum(debit) - sum(credit) as balance
  169. from account_move_line
  170. left outer join account_analytic_account as aac
  171. on (account_move_line.analytic_account_id = aac.id)
  172. inner join account_account as ac
  173. on account_move_line.account_id = ac.id
  174. AND account_move_line.date >= %(date_start)s
  175. AND account_move_line.date <= %(date_end)s
  176. group by aac.id,aac.code,aac.name,ac.id,ac.code,ac.name
  177. order by aac.code
  178. """, {'date_start': self.date_start,
  179. 'date_end': self.date_end})
  180. res = self.env.cr.fetchall()
  181. rows = []
  182. for line in res:
  183. rows.append(list(line))
  184. return rows
  185. def action_manual_export_journal_entries(self):
  186. """
  187. Here we use TemporaryFile to avoid full filling the Odoo worker
  188. Memory
  189. We also write the data to the wizard with SQL query as write seems
  190. to use too much memory as well.
  191. Those improvements permitted to improve the export from a 100k line to
  192. 200k lines
  193. with default `limit_memory_hard = 805306368` (768MB) with more lines,
  194. you might encounter a MemoryError when trying to download the file even
  195. if it has been generated.
  196. To be able to export bigger volume of data, it is advised to set
  197. limit_memory_hard to 2097152000 (2 GB) to generate the file and let
  198. Odoo load it in the wizard when trying to download it.
  199. Tested with up to a generation of 700k entry lines
  200. """
  201. self.ensure_one()
  202. rows = self._get_data("journal_entries")
  203. with tempfile.TemporaryFile() as file_data:
  204. writer = AccountingWriter(file_data)
  205. writer.writerows(rows)
  206. with tempfile.TemporaryFile() as base64_data:
  207. file_data.seek(0)
  208. base64.encode(file_data, base64_data)
  209. base64_data.seek(0)
  210. self.env.cr.execute("""
  211. UPDATE account_csv_export
  212. SET data = %s
  213. WHERE id = %s""", (base64_data.read(), self.id))
  214. return {
  215. 'type': 'ir.actions.act_window',
  216. 'res_model': 'account.csv.export',
  217. 'view_mode': 'form',
  218. 'res_id': self.id,
  219. 'views': [(False, 'form')],
  220. 'target': 'new',
  221. }
  222. def _get_header_journal_entries(self):
  223. return [
  224. # Standard Sage export fields
  225. _('DATE'),
  226. _('JOURNAL CODE'),
  227. _('ACCOUNT CODE'),
  228. _('PARTNER NAME'),
  229. _('REF'),
  230. _('DESCRIPTION'),
  231. _('DEBIT'),
  232. _('CREDIT'),
  233. _('FULL RECONCILE'),
  234. _('ANALYTIC ACCOUNT CODE'),
  235. # Other fields
  236. _('ENTRY NUMBER'),
  237. _('ACCOUNT NAME'),
  238. _('BALANCE'),
  239. _('AMOUNT CURRENCY'),
  240. _('CURRENCY'),
  241. _('ANALYTIC ACCOUNT NAME'),
  242. _('JOURNAL'),
  243. _('TAX CODE'),
  244. _('TAX NAME'),
  245. _('BANK STATEMENT'),
  246. ]
  247. def _get_rows_journal_entries(self, journal_ids):
  248. """
  249. Create a generator of rows of the CSV file
  250. """
  251. self.ensure_one()
  252. self.env.cr.execute("""
  253. SELECT
  254. account_move_line.date AS date,
  255. account_journal.name as journal,
  256. account_account.code AS account_code,
  257. res_partner.name AS partner_name,
  258. account_move_line.ref AS ref,
  259. account_move_line.name AS description,
  260. account_move_line.debit AS debit,
  261. account_move_line.credit AS credit,
  262. account_full_reconcile.name as full_reconcile,
  263. account_analytic_account.code AS analytic_account_code,
  264. account_move.name AS entry_number,
  265. account_account.name AS account_name,
  266. account_move_line.debit - account_move_line.credit AS balance,
  267. account_move_line.amount_currency AS amount_currency,
  268. res_currency.name AS currency,
  269. account_analytic_account.name AS analytic_account_name,
  270. account_journal.name as journal,
  271. acct.description as tax_code,
  272. acct.name as tax_name,
  273. account_bank_statement.name AS bank_statement
  274. FROM
  275. public.account_move_line
  276. JOIN account_account on
  277. (account_account.id=account_move_line.account_id)
  278. JOIN account_journal on
  279. (account_journal.id = account_move_line.journal_id)
  280. LEFT JOIN res_currency on
  281. (res_currency.id=account_move_line.currency_id)
  282. LEFT JOIN account_full_reconcile on
  283. (account_full_reconcile.id = account_move_line.full_reconcile_id)
  284. LEFT JOIN res_partner on
  285. (res_partner.id=account_move_line.partner_id)
  286. LEFT JOIN account_move on
  287. (account_move.id=account_move_line.move_id)
  288. LEFT JOIN account_analytic_account on
  289. (account_analytic_account.id=account_move_line.analytic_account_id)
  290. LEFT JOIN account_bank_statement on
  291. (account_bank_statement.id=account_move_line.statement_id)
  292. LEFT JOIN account_tax acct on
  293. (acct.id=account_move_line.tax_line_id)
  294. WHERE account_move_line.date >= %(date_start)s
  295. AND account_move_line.date <= %(date_end)s
  296. AND account_journal.id IN %(journal_ids)s
  297. ORDER BY account_move_line.date
  298. """, {'journal_ids': tuple(journal_ids),
  299. 'date_start': self.date_start,
  300. 'date_end': self.date_end})
  301. while 1:
  302. # http://initd.org/psycopg/docs/cursor.html#cursor.fetchmany
  303. # Set cursor.arraysize to minimize network round trips
  304. self.env.cr.arraysize = 100
  305. rows = self.env.cr.fetchmany()
  306. if not rows:
  307. break
  308. for row in rows:
  309. yield row
  310. def _get_data(self, result_type):
  311. self.ensure_one()
  312. get_header_func = getattr(
  313. self, ("_get_header_%s" % (result_type)), None)
  314. get_rows_func = getattr(self, ("_get_rows_%s" % (result_type)), None)
  315. if self.journal_ids:
  316. journal_ids = [x.id for x in self.journal_ids]
  317. else:
  318. j_obj = self.env["account.journal"]
  319. journal_ids = j_obj.search([]).ids
  320. rows = itertools.chain((get_header_func(),),
  321. get_rows_func(journal_ids))
  322. return rows