|
|
@ -20,6 +20,7 @@ |
|
|
|
############################################################################## |
|
|
|
|
|
|
|
import time |
|
|
|
import tempfile |
|
|
|
import StringIO |
|
|
|
import cStringIO |
|
|
|
import base64 |
|
|
@ -45,16 +46,20 @@ class AccountUnicodeWriter(object): |
|
|
|
self.stream = f |
|
|
|
self.encoder = codecs.getincrementalencoder(encoding)() |
|
|
|
|
|
|
|
def writerow(self, row): |
|
|
|
def writerow(self, row, base64_compress=False): |
|
|
|
#we ensure that we do not try to encode none or bool |
|
|
|
row = [x or u'' for x in row] |
|
|
|
|
|
|
|
encoded_row = [] |
|
|
|
for c in row: |
|
|
|
if type(c) == unicode: |
|
|
|
encoded_row.append(c.encode("utf-8")) |
|
|
|
val = c.encode("utf-8") |
|
|
|
if base64_compress: |
|
|
|
val = base64.encodestring(val) |
|
|
|
else: |
|
|
|
encoded_row.append(c) |
|
|
|
val = c |
|
|
|
|
|
|
|
encoded_row.append(val) |
|
|
|
|
|
|
|
self.writer.writerow(encoded_row) |
|
|
|
# Fetch UTF-8 output from the queue ... |
|
|
@ -67,9 +72,9 @@ class AccountUnicodeWriter(object): |
|
|
|
# empty queue |
|
|
|
self.queue.truncate(0) |
|
|
|
|
|
|
|
def writerows(self, rows): |
|
|
|
def writerows(self, rows, base64_compress=False): |
|
|
|
for row in rows: |
|
|
|
self.writerow(row) |
|
|
|
self.writerow(row, base64_compress=base64_compress) |
|
|
|
|
|
|
|
class AccountCSVExport(orm.TransientModel): |
|
|
|
_name = 'account.csv.export' |
|
|
@ -80,6 +85,9 @@ class AccountCSVExport(orm.TransientModel): |
|
|
|
'company_id': fields.many2one('res.company', 'Company', invisible=True), |
|
|
|
'fiscalyear_id': fields.many2one('account.fiscalyear', 'Fiscalyear', required=True), |
|
|
|
'periods': fields.many2many('account.period','rel_wizard_period','wizard_id','period_id','Periods',help='All periods in the fiscal year if empty'), |
|
|
|
'journal_ids': fields.many2many('account.journal','rel_wizard_journal','wizard_id','journal_id','Journals', help='If empty, use all journals'), |
|
|
|
'company_id': fields.many2one('res.company', 'Company', invisible=True), |
|
|
|
'fiscalyear_id': fields.many2one('account.fiscalyear', 'Fiscalyear', required=True), |
|
|
|
'export_filename': fields.char('Export CSV Filename', size=128), |
|
|
|
} |
|
|
|
|
|
|
@ -207,6 +215,125 @@ class AccountCSVExport(orm.TransientModel): |
|
|
|
rows.append(list(line)) |
|
|
|
return rows |
|
|
|
|
|
|
|
|
|
|
|
def action_manual_export_journal_entries(self, cr, uid, ids, context=None): |
|
|
|
""" |
|
|
|
Here we use TemporaryFile to avoid full filling the OpenERP worker Memory |
|
|
|
We also write the data to the wizard with SQL query as write seams to use |
|
|
|
too much memory as well |
|
|
|
|
|
|
|
Thos improvment permitted to improve the export from a 100k line to 400k lines |
|
|
|
""" |
|
|
|
#XXX check why it still fail with more than 500k and when |
|
|
|
this = self.browse(cr, uid, ids)[0] |
|
|
|
rows = self.get_data(cr, uid, ids, "journal_entries", context) |
|
|
|
with tempfile.TemporaryFile() as file_data: |
|
|
|
writer = AccountUnicodeWriter(file_data) |
|
|
|
writer.writerows(rows) |
|
|
|
del rows |
|
|
|
with tempfile.TemporaryFile() as base64_data: |
|
|
|
file_data.seek(0) |
|
|
|
base64.encode(file_data, base64_data) |
|
|
|
base64_data.seek(0) |
|
|
|
cr.execute("""UPDATE account_csv_export SET data = %s WHERE id = %s""", (base64_data.read(), ids[0]) ) |
|
|
|
return { |
|
|
|
'type': 'ir.actions.act_window', |
|
|
|
'res_model': 'account.csv.export', |
|
|
|
'view_mode': 'form', |
|
|
|
'view_type': 'form', |
|
|
|
'res_id': this.id, |
|
|
|
'views': [(False, 'form')], |
|
|
|
'target': 'new', |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
def _get_header_journal_entries(self, cr, uid, ids, context=None): |
|
|
|
|
|
|
|
return [ |
|
|
|
# Standard Sage export fields |
|
|
|
_(u'DATE'), |
|
|
|
_(u'JOURNAL CODE'), |
|
|
|
_(u'ACCOUNT CODE'), |
|
|
|
_(u'PARTNER NAME'), |
|
|
|
_(u'REF'), |
|
|
|
_(u'DESCRIPTION'), |
|
|
|
_(u'DEBIT'), |
|
|
|
_(u'CREDIT'), |
|
|
|
_(u'FULL RECONCILE'), |
|
|
|
_(u'PARTIAL RECONCILE'), |
|
|
|
_(u'ANALYTIC ACCOUNT CODE'), |
|
|
|
|
|
|
|
# Other fields |
|
|
|
_(u'ENTRY NUMBER'), |
|
|
|
_(u'ACCOUNT NAME'), |
|
|
|
_(u'BALANCE'), |
|
|
|
_(u'AMOUNT CURRENCY'), |
|
|
|
_(u'CURRENCY'), |
|
|
|
_(u'ANALYTIC ACCOUNT NAME'), |
|
|
|
_(u'JOURNAL'), |
|
|
|
_(u'MONTH'), |
|
|
|
_(u'FISCAL YEAR'), |
|
|
|
_(u'TAX CODE CODE'), |
|
|
|
_(u'TAX CODE NAME'), |
|
|
|
_(u'TAX AMOUNT'), |
|
|
|
] |
|
|
|
|
|
|
|
|
|
|
|
def _get_rows_journal_entries(self, cr, uid, ids, fiscalyear_id, period_range_ids, company_id, context=None): |
|
|
|
""" |
|
|
|
Return list to generate rows of the CSV file |
|
|
|
""" |
|
|
|
cr.execute(""" |
|
|
|
SELECT |
|
|
|
account_move_line.date AS date, |
|
|
|
account_journal.name as journal, |
|
|
|
account_account.code AS account_code, |
|
|
|
res_partner.name AS partner_name, |
|
|
|
account_move_line.ref AS ref, |
|
|
|
account_move_line.name AS description, |
|
|
|
account_move_line.debit AS debit, |
|
|
|
account_move_line.credit AS credit, |
|
|
|
account_move_reconcile.name as full_reconcile, |
|
|
|
account_move_line.reconcile_partial_id AS partial_reconcile_id, |
|
|
|
account_analytic_account.code AS analytic_account_code, |
|
|
|
|
|
|
|
account_move.name AS entry_number, |
|
|
|
account_account.name AS account_name, |
|
|
|
account_move_line.debit - account_move_line.credit AS balance, |
|
|
|
account_move_line.amount_currency AS amount_currency, |
|
|
|
res_currency.name AS currency, |
|
|
|
account_analytic_account.name AS analytic_account_name, |
|
|
|
account_journal.name as journal, |
|
|
|
account_period.code AS month, |
|
|
|
account_fiscalyear.name as fiscal_year, |
|
|
|
account_tax_code.code AS aml_tax_code_code, |
|
|
|
account_tax_code.name AS aml_tax_code_name, |
|
|
|
account_move_line.tax_amount AS aml_tax_amount |
|
|
|
FROM |
|
|
|
public.account_move_line |
|
|
|
JOIN account_account on (account_account.id=account_move_line.account_id) |
|
|
|
JOIN account_period on (account_period.id=account_move_line.period_id) |
|
|
|
JOIN account_fiscalyear on (account_fiscalyear.id=account_period.fiscalyear_id) |
|
|
|
JOIN account_journal on (account_journal.id = account_move_line.journal_id) |
|
|
|
LEFT JOIN res_currency on (res_currency.id=account_move_line.currency_id) |
|
|
|
LEFT JOIN account_move_reconcile on (account_move_reconcile.id = account_move_line.reconcile_id) |
|
|
|
LEFT JOIN res_partner on (res_partner.id=account_move_line.partner_id) |
|
|
|
LEFT JOIN account_move on (account_move.id=account_move_line.move_id) |
|
|
|
LEFT JOIN account_tax on (account_tax.id=account_move_line.account_tax_id) |
|
|
|
LEFT JOIN account_tax_code on (account_tax_code.id=account_move_line.tax_code_id) |
|
|
|
LEFT JOIN account_analytic_account on (account_analytic_account.id=account_move_line.analytic_account_id) |
|
|
|
WHERE account_period.id IN %(period_ids)s |
|
|
|
ORDER BY account_move_line.date |
|
|
|
""", |
|
|
|
{'period_ids': tuple(period_range_ids)} |
|
|
|
) |
|
|
|
res = cr.fetchall() |
|
|
|
rows = [] |
|
|
|
for line in res: |
|
|
|
rows.append(list(line)) |
|
|
|
return rows |
|
|
|
|
|
|
|
|
|
|
|
def get_data(self, cr, uid, ids,result_type,context=None): |
|
|
|
get_header_func = getattr(self,("_get_header_%s"%(result_type)), None) |
|
|
|
get_rows_func = getattr(self,("_get_rows_%s"%(result_type)), None) |
|
|
|