You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

908 lines
33 KiB

11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
11 years ago
  1. # -*- coding: utf-8 -*-
  2. from __future__ import absolute_import
  3. from email.utils import parseaddr
  4. import functools
  5. import htmlentitydefs
  6. import itertools
  7. import logging
  8. import operator
  9. import re
  10. from ast import literal_eval
  11. from openerp.tools import mute_logger
  12. # Validation Library https://pypi.python.org/pypi/validate_email/1.1
  13. from .validate_email import validate_email
  14. import openerp
  15. from openerp.osv import orm
  16. from openerp.osv import fields
  17. from openerp.osv.orm import browse_record
  18. from openerp.tools.translate import _
  19. pattern = re.compile("&(\w+?);")
  20. _logger = logging.getLogger('base.partner.merge')
  21. # http://www.php2python.com/wiki/function.html-entity-decode/
  22. def html_entity_decode_char(m, defs=None):
  23. if defs is None:
  24. defs = htmlentitydefs.entitydefs
  25. try:
  26. return defs[m.group(1)]
  27. except KeyError:
  28. return m.group(0)
  29. def html_entity_decode(string):
  30. return pattern.sub(html_entity_decode_char, string)
  31. def sanitize_email(partner_email):
  32. assert isinstance(partner_email, basestring) and partner_email
  33. result = re.subn(r';|/|:', ',',
  34. html_entity_decode(partner_email or ''))[0].split(',')
  35. emails = [parseaddr(email)[1]
  36. for item in result
  37. for email in item.split()]
  38. return [email.lower()
  39. for email in emails
  40. if validate_email(email)]
  41. def is_integer_list(ids):
  42. return all(isinstance(i, (int, long)) for i in ids)
  43. class ResPartner(orm.Model):
  44. _inherit = 'res.partner'
  45. _columns = {
  46. 'id': fields.integer('Id', readonly=True),
  47. 'create_date': fields.datetime('Create Date', readonly=True),
  48. }
  49. class MergePartnerLine(orm.TransientModel):
  50. _name = 'base.partner.merge.line'
  51. _columns = {
  52. 'wizard_id': fields.many2one('base.partner.merge.automatic.wizard',
  53. 'Wizard'),
  54. 'min_id': fields.integer('MinID'),
  55. 'aggr_ids': fields.char('Ids', required=True),
  56. }
  57. _order = 'min_id asc'
  58. class MergePartnerAutomatic(orm.TransientModel):
  59. """
  60. The idea behind this wizard is to create a list of potential partners to
  61. merge. We use two objects, the first one is the wizard for the end-user.
  62. And the second will contain the partner list to merge.
  63. """
  64. _name = 'base.partner.merge.automatic.wizard'
  65. _columns = {
  66. # Group by
  67. 'group_by_email': fields.boolean('Email'),
  68. 'group_by_name': fields.boolean('Name'),
  69. 'group_by_is_company': fields.boolean('Is Company'),
  70. 'group_by_vat': fields.boolean('VAT'),
  71. 'group_by_parent_id': fields.boolean('Parent Company'),
  72. 'state': fields.selection([('option', 'Option'),
  73. ('selection', 'Selection'),
  74. ('finished', 'Finished')],
  75. 'State',
  76. readonly=True,
  77. required=True),
  78. 'number_group': fields.integer("Group of Contacts", readonly=True),
  79. 'current_line_id': fields.many2one('base.partner.merge.line',
  80. 'Current Line'),
  81. 'line_ids': fields.one2many('base.partner.merge.line',
  82. 'wizard_id', 'Lines'),
  83. 'partner_ids': fields.many2many('res.partner', string='Contacts'),
  84. 'dst_partner_id': fields.many2one('res.partner',
  85. string='Destination Contact'),
  86. 'exclude_contact': fields.boolean('A user associated to the contact'),
  87. 'exclude_journal_item': fields.boolean('Journal Items associated'
  88. ' to the contact'),
  89. 'maximum_group': fields.integer("Maximum of Group of Contacts"),
  90. }
  91. def default_get(self, cr, uid, fields, context=None):
  92. if context is None:
  93. context = {}
  94. res = super(MergePartnerAutomatic, self
  95. ).default_get(cr, uid, fields, context)
  96. if (context.get('active_model') == 'res.partner'
  97. and context.get('active_ids')):
  98. partner_ids = context['active_ids']
  99. res['state'] = 'selection'
  100. res['partner_ids'] = partner_ids
  101. res['dst_partner_id'] = self._get_ordered_partner(cr, uid,
  102. partner_ids,
  103. context=context
  104. )[-1].id
  105. return res
  106. _defaults = {
  107. 'state': 'option'
  108. }
  109. def get_fk_on(self, cr, table):
  110. q = """ SELECT cl1.relname as table,
  111. att1.attname as column
  112. FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
  113. pg_attribute as att1, pg_attribute as att2
  114. WHERE con.conrelid = cl1.oid
  115. AND con.confrelid = cl2.oid
  116. AND array_lower(con.conkey, 1) = 1
  117. AND con.conkey[1] = att1.attnum
  118. AND att1.attrelid = cl1.oid
  119. AND cl2.relname = %s
  120. AND att2.attname = 'id'
  121. AND array_lower(con.confkey, 1) = 1
  122. AND con.confkey[1] = att2.attnum
  123. AND att2.attrelid = cl2.oid
  124. AND con.contype = 'f'
  125. """
  126. return cr.execute(q, (table,))
  127. def _update_foreign_keys(self, cr, uid, src_partners,
  128. dst_partner, context=None):
  129. _logger.debug('_update_foreign_keys for dst_partner: %s for '
  130. 'src_partners: %r',
  131. dst_partner.id,
  132. list(map(operator.attrgetter('id'), src_partners)))
  133. # find the many2one relation to a partner
  134. proxy = self.pool.get('res.partner')
  135. self.get_fk_on(cr, 'res_partner')
  136. # ignore two tables
  137. for table, column in cr.fetchall():
  138. if 'base_partner_merge_' in table:
  139. continue
  140. partner_ids = tuple(map(int, src_partners))
  141. query = ("SELECT column_name FROM information_schema.columns"
  142. " WHERE table_name LIKE '%s'") % (table)
  143. cr.execute(query, ())
  144. columns = []
  145. for data in cr.fetchall():
  146. if data[0] != column:
  147. columns.append(data[0])
  148. query_dic = {
  149. 'table': table,
  150. 'column': column,
  151. 'value': columns[0],
  152. }
  153. if len(columns) <= 1:
  154. # unique key treated
  155. query = """
  156. UPDATE "%(table)s" as ___tu
  157. SET %(column)s = %%s
  158. WHERE
  159. %(column)s = %%s AND
  160. NOT EXISTS (
  161. SELECT 1
  162. FROM "%(table)s" as ___tw
  163. WHERE
  164. %(column)s = %%s AND
  165. ___tu.%(value)s = ___tw.%(value)s
  166. )""" % query_dic
  167. for partner_id in partner_ids:
  168. cr.execute(query, (dst_partner.id, partner_id,
  169. dst_partner.id))
  170. else:
  171. cr.execute("SAVEPOINT recursive_partner_savepoint")
  172. try:
  173. query = ('UPDATE "%(table)s" SET %(column)s = %%s WHERE '
  174. '%(column)s IN %%s') % query_dic
  175. cr.execute(query, (dst_partner.id, partner_ids,))
  176. if (column == proxy._parent_name
  177. and table == 'res_partner'):
  178. query = """
  179. WITH RECURSIVE cycle(id, parent_id) AS (
  180. SELECT id, parent_id FROM res_partner
  181. UNION
  182. SELECT cycle.id, res_partner.parent_id
  183. FROM res_partner, cycle
  184. WHERE res_partner.id = cycle.parent_id
  185. AND cycle.id != cycle.parent_id
  186. )
  187. SELECT id FROM cycle
  188. WHERE id = parent_id AND id = %s
  189. """
  190. cr.execute(query, (dst_partner.id,))
  191. if cr.fetchall():
  192. cr.execute("ROLLBACK TO SAVEPOINT "
  193. "recursive_partner_savepoint")
  194. finally:
  195. cr.execute("RELEASE SAVEPOINT "
  196. "recursive_partner_savepoint")
  197. def _update_reference_fields(self, cr, uid, src_partners, dst_partner,
  198. context=None):
  199. _logger.debug('_update_reference_fields for dst_partner: %s for '
  200. 'src_partners: %r',
  201. dst_partner.id,
  202. list(map(operator.attrgetter('id'), src_partners)))
  203. def update_records(model, src, field_model='model', field_id='res_id',
  204. context=None):
  205. proxy = self.pool.get(model)
  206. if proxy is None:
  207. return
  208. domain = [(field_model, '=', 'res.partner'),
  209. (field_id, '=', src.id)]
  210. ids = proxy.search(cr, openerp.SUPERUSER_ID,
  211. domain, context=context)
  212. return proxy.write(cr, openerp.SUPERUSER_ID, ids,
  213. {field_id: dst_partner.id}, context=context)
  214. update_records = functools.partial(update_records, context=context)
  215. for partner in src_partners:
  216. update_records('base.calendar', src=partner,
  217. field_model='model_id.model')
  218. update_records('ir.attachment', src=partner,
  219. field_model='res_model')
  220. update_records('mail.followers', src=partner,
  221. field_model='res_model')
  222. update_records('mail.message', src=partner)
  223. update_records('marketing.campaign.workitem', src=partner,
  224. field_model='object_id.model')
  225. update_records('ir.model.data', src=partner)
  226. proxy = self.pool['ir.model.fields']
  227. domain = [('ttype', '=', 'reference')]
  228. record_ids = proxy.search(cr, openerp.SUPERUSER_ID, domain,
  229. context=context)
  230. for record in proxy.browse(cr, openerp.SUPERUSER_ID, record_ids,
  231. context=context):
  232. try:
  233. proxy_model = self.pool[record.model]
  234. except KeyError:
  235. # ignore old tables
  236. continue
  237. if record.model == 'ir.property':
  238. continue
  239. field_type = proxy_model._columns.get(record.name).__class__._type
  240. if field_type == 'function':
  241. continue
  242. for partner in src_partners:
  243. domain = [
  244. (record.name, '=', 'res.partner,%d' % partner.id)
  245. ]
  246. model_ids = proxy_model.search(cr, openerp.SUPERUSER_ID,
  247. domain, context=context)
  248. values = {
  249. record.name: 'res.partner,%d' % dst_partner.id,
  250. }
  251. proxy_model.write(cr, openerp.SUPERUSER_ID, model_ids, values,
  252. context=context)
  253. def _update_values(self, cr, uid, src_partners, dst_partner, context=None):
  254. _logger.debug('_update_values for dst_partner: %s for src_partners: '
  255. '%r',
  256. dst_partner.id,
  257. list(map(operator.attrgetter('id'), src_partners)))
  258. columns = dst_partner._columns
  259. def write_serializer(column, item):
  260. if isinstance(item, browse_record):
  261. return item.id
  262. else:
  263. return item
  264. values = dict()
  265. for column, field in columns.iteritems():
  266. if (field._type not in ('many2many', 'one2many')
  267. and not isinstance(field, fields.function)):
  268. for item in itertools.chain(src_partners, [dst_partner]):
  269. if item[column]:
  270. values[column] = write_serializer(column,
  271. item[column])
  272. values.pop('id', None)
  273. parent_id = values.pop('parent_id', None)
  274. dst_partner.write(values)
  275. if parent_id and parent_id != dst_partner.id:
  276. try:
  277. dst_partner.write({'parent_id': parent_id})
  278. except (orm.except_orm, orm.except_orm):
  279. _logger.info('Skip recursive partner hierarchies for '
  280. 'parent_id %s of partner: %s',
  281. parent_id, dst_partner.id)
  282. @mute_logger('openerp.osv.expression', 'openerp.osv.orm')
  283. def _merge(self, cr, uid, partner_ids, dst_partner=None, context=None):
  284. proxy = self.pool.get('res.partner')
  285. partner_ids = proxy.exists(cr, uid, list(partner_ids),
  286. context=context)
  287. if len(partner_ids) < 2:
  288. return
  289. if len(partner_ids) > 3:
  290. raise orm.except_orm(
  291. _('Error'),
  292. _("For safety reasons, you cannot merge more than 3 contacts "
  293. "together. You can re-open the wizard several times if "
  294. "needed."))
  295. if (openerp.SUPERUSER_ID != uid
  296. and len(set(partner.email for partner
  297. in proxy.browse(cr, uid, partner_ids,
  298. context=context))) > 1):
  299. raise orm.except_orm(
  300. _('Error'),
  301. _("All contacts must have the same email. Only the "
  302. "Administrator can merge contacts with different emails."))
  303. if dst_partner and dst_partner.id in partner_ids:
  304. src_partners = proxy.browse(cr, uid,
  305. [id for id in partner_ids
  306. if id != dst_partner.id],
  307. context=context)
  308. else:
  309. ordered_partners = self._get_ordered_partner(cr, uid, partner_ids,
  310. context)
  311. dst_partner = ordered_partners[-1]
  312. src_partners = ordered_partners[:-1]
  313. _logger.info("dst_partner: %s", dst_partner.id)
  314. if (openerp.SUPERUSER_ID != uid
  315. and self._model_is_installed(cr, uid, 'account.move.line',
  316. context=context)
  317. and self.pool.get('account.move.line'
  318. ).search(cr, openerp.SUPERUSER_ID,
  319. [('partner_id',
  320. 'in',
  321. [partner.id for partner
  322. in src_partners])],
  323. context=context)):
  324. raise orm.except_orm(
  325. _('Error'),
  326. _("Only the destination contact may be linked to existing "
  327. "Journal Items. Please ask the Administrator if you need to"
  328. " merge several contacts linked to existing Journal "
  329. "Items."))
  330. call_it = lambda function: function(cr, uid, src_partners,
  331. dst_partner, context=context)
  332. call_it(self._update_foreign_keys)
  333. call_it(self._update_reference_fields)
  334. call_it(self._update_values)
  335. _logger.info('(uid = %s) merged the partners %r with %s',
  336. uid,
  337. list(map(operator.attrgetter('id'), src_partners)),
  338. dst_partner.id)
  339. dst_partner.message_post(
  340. body='%s %s' % (
  341. _("Merged with the following partners:"),
  342. ", ".join(
  343. '%s<%s>(ID %s)' % (p.name, p.email or 'n/a', p.id)
  344. for p in src_partners
  345. )
  346. )
  347. )
  348. for partner in src_partners:
  349. partner.unlink()
  350. def clean_emails(self, cr, uid, context=None):
  351. """
  352. Clean the email address of the partner, if there is an email field
  353. with a minimum of two addresses, the system will create a new partner,
  354. with the information of the previous one and will copy the new cleaned
  355. email into the email field.
  356. """
  357. if context is None:
  358. context = {}
  359. proxy_model = self.pool['ir.model.fields']
  360. field_ids = proxy_model.search(cr, uid,
  361. [('model', '=', 'res.partner'),
  362. ('ttype', 'like', '%2many')],
  363. context=context)
  364. fields = proxy_model.read(cr, uid, field_ids, context=context)
  365. reset_fields = dict((field['name'], []) for field in fields)
  366. proxy_partner = self.pool['res.partner']
  367. context['active_test'] = False
  368. ids = proxy_partner.search(cr, uid, [], context=context)
  369. fields = ['name', 'var' 'partner_id' 'is_company', 'email']
  370. partners = proxy_partner.read(cr, uid, ids, fields, context=context)
  371. partners.sort(key=operator.itemgetter('id'))
  372. partners_len = len(partners)
  373. _logger.info('partner_len: %r', partners_len)
  374. for idx, partner in enumerate(partners):
  375. if not partner['email']:
  376. continue
  377. percent = (idx / float(partners_len)) * 100.0
  378. _logger.info('idx: %r', idx)
  379. _logger.info('percent: %r', percent)
  380. try:
  381. emails = sanitize_email(partner['email'])
  382. head, tail = emails[:1], emails[1:]
  383. email = head[0] if head else False
  384. proxy_partner.write(cr, uid, [partner['id']],
  385. {'email': email}, context=context)
  386. for email in tail:
  387. values = dict(reset_fields, email=email)
  388. proxy_partner.copy(cr, uid, partner['id'], values,
  389. context=context)
  390. except Exception:
  391. _logger.exception("There is a problem with this partner: %r",
  392. partner)
  393. raise
  394. return True
  395. def close_cb(self, cr, uid, ids, context=None):
  396. return {'type': 'ir.actions.act_window_close'}
  397. def _generate_query(self, fields, maximum_group=100):
  398. group_fields = ', '.join(fields)
  399. filters = []
  400. for field in fields:
  401. if field in ['email', 'name']:
  402. filters.append((field, 'IS NOT', 'NULL'))
  403. criteria = ' AND '.join('%s %s %s' % (field, operator, value)
  404. for field, operator, value in filters)
  405. text = [
  406. "SELECT min(id), array_agg(id)",
  407. "FROM res_partner",
  408. ]
  409. if criteria:
  410. text.append('WHERE %s' % criteria)
  411. text.extend([
  412. "GROUP BY %s" % group_fields,
  413. "HAVING COUNT(*) >= 2",
  414. "ORDER BY min(id)",
  415. ])
  416. if maximum_group:
  417. text.extend([
  418. "LIMIT %s" % maximum_group,
  419. ])
  420. return ' '.join(text)
  421. def _compute_selected_groupby(self, this):
  422. group_by_str = 'group_by_'
  423. group_by_len = len(group_by_str)
  424. fields = [
  425. key[group_by_len:]
  426. for key in self._columns.keys()
  427. if key.startswith(group_by_str)
  428. ]
  429. groups = [
  430. field
  431. for field in fields
  432. if getattr(this, '%s%s' % (group_by_str, field), False)
  433. ]
  434. if not groups:
  435. raise orm.except_orm(_('Error'),
  436. _("You have to specify a filter for your "
  437. "selection"))
  438. return groups
  439. def next_cb(self, cr, uid, ids, context=None):
  440. """
  441. Don't compute any thing
  442. """
  443. context = dict(context or {}, active_test=False)
  444. this = self.browse(cr, uid, ids[0], context=context)
  445. if this.current_line_id:
  446. this.current_line_id.unlink()
  447. return self._next_screen(cr, uid, this, context)
  448. def _get_ordered_partner(self, cr, uid, partner_ids, context=None):
  449. partners = self.pool.get('res.partner'
  450. ).browse(cr, uid,
  451. list(partner_ids),
  452. context=context)
  453. ordered_partners = sorted(
  454. sorted(
  455. partners,
  456. key=operator.attrgetter('create_date'),
  457. reverse=True
  458. ),
  459. key=operator.attrgetter('active'),
  460. reverse=True
  461. )
  462. return ordered_partners
  463. def _next_screen(self, cr, uid, this, context=None):
  464. this.refresh()
  465. values = {}
  466. if this.line_ids:
  467. # in this case, we try to find the next record.
  468. current_line = this.line_ids[0]
  469. current_partner_ids = literal_eval(current_line.aggr_ids)
  470. values.update({
  471. 'current_line_id': current_line.id,
  472. 'partner_ids': [(6, 0, current_partner_ids)],
  473. 'dst_partner_id': self._get_ordered_partner(
  474. cr, uid,
  475. current_partner_ids,
  476. context
  477. )[-1].id,
  478. 'state': 'selection',
  479. })
  480. else:
  481. values.update({
  482. 'current_line_id': False,
  483. 'partner_ids': [],
  484. 'state': 'finished',
  485. })
  486. this.write(values)
  487. return {
  488. 'type': 'ir.actions.act_window',
  489. 'res_model': this._name,
  490. 'res_id': this.id,
  491. 'view_mode': 'form',
  492. 'target': 'new',
  493. }
  494. def _model_is_installed(self, cr, uid, model, context=None):
  495. proxy = self.pool.get('ir.model')
  496. domain = [('model', '=', model)]
  497. return proxy.search_count(cr, uid, domain, context=context) > 0
  498. def _partner_use_in(self, cr, uid, aggr_ids, models, context=None):
  499. """
  500. Check if there is no occurence of this group of partner in the selected
  501. model
  502. """
  503. for model, field in models.iteritems():
  504. proxy = self.pool.get(model)
  505. domain = [(field, 'in', aggr_ids)]
  506. if proxy.search_count(cr, uid, domain, context=context):
  507. return True
  508. return False
  509. def compute_models(self, cr, uid, ids, context=None):
  510. """
  511. Compute the different models needed by the system if you want to
  512. exclude some partners.
  513. """
  514. assert is_integer_list(ids)
  515. this = self.browse(cr, uid, ids[0], context=context)
  516. models = {}
  517. if this.exclude_contact:
  518. models['res.users'] = 'partner_id'
  519. if (self._model_is_installed(cr, uid, 'account.move.line',
  520. context=context)
  521. and this.exclude_journal_item):
  522. models['account.move.line'] = 'partner_id'
  523. return models
  524. def _process_query(self, cr, uid, ids, query, context=None):
  525. """
  526. Execute the select request and write the result in this wizard
  527. """
  528. proxy = self.pool.get('base.partner.merge.line')
  529. this = self.browse(cr, uid, ids[0], context=context)
  530. models = self.compute_models(cr, uid, ids, context=context)
  531. cr.execute(query)
  532. counter = 0
  533. for min_id, aggr_ids in cr.fetchall():
  534. if models and self._partner_use_in(cr, uid, aggr_ids, models,
  535. context=context):
  536. continue
  537. values = {
  538. 'wizard_id': this.id,
  539. 'min_id': min_id,
  540. 'aggr_ids': aggr_ids,
  541. }
  542. proxy.create(cr, uid, values, context=context)
  543. counter += 1
  544. values = {
  545. 'state': 'selection',
  546. 'number_group': counter,
  547. }
  548. this.write(values)
  549. _logger.info("counter: %s", counter)
  550. def start_process_cb(self, cr, uid, ids, context=None):
  551. """
  552. Start the process.
  553. * Compute the selected groups (with duplication)
  554. * If the user has selected the 'exclude_XXX' fields, avoid the
  555. partners.
  556. """
  557. assert is_integer_list(ids)
  558. context = dict(context or {}, active_test=False)
  559. this = self.browse(cr, uid, ids[0], context=context)
  560. groups = self._compute_selected_groupby(this)
  561. query = self._generate_query(groups, this.maximum_group)
  562. self._process_query(cr, uid, ids, query, context=context)
  563. return self._next_screen(cr, uid, this, context)
  564. def automatic_process_cb(self, cr, uid, ids, context=None):
  565. assert is_integer_list(ids)
  566. this = self.browse(cr, uid, ids[0], context=context)
  567. this.start_process_cb()
  568. this.refresh()
  569. for line in this.line_ids:
  570. partner_ids = literal_eval(line.aggr_ids)
  571. self._merge(cr, uid, partner_ids, context=context)
  572. line.unlink()
  573. cr.commit()
  574. this.write({'state': 'finished'})
  575. return {
  576. 'type': 'ir.actions.act_window',
  577. 'res_model': this._name,
  578. 'res_id': this.id,
  579. 'view_mode': 'form',
  580. 'target': 'new',
  581. }
  582. def parent_migration_process_cb(self, cr, uid, ids, context=None):
  583. assert is_integer_list(ids)
  584. context = dict(context or {}, active_test=False)
  585. this = self.browse(cr, uid, ids[0], context=context)
  586. query = """
  587. SELECT
  588. min(p1.id),
  589. array_agg(DISTINCT p1.id)
  590. FROM
  591. res_partner as p1
  592. INNER join
  593. res_partner as p2
  594. ON
  595. p1.email = p2.email AND
  596. p1.name = p2.name AND
  597. (p1.parent_id = p2.id OR p1.id = p2.parent_id)
  598. WHERE
  599. p2.id IS NOT NULL
  600. GROUP BY
  601. p1.email,
  602. p1.name,
  603. CASE WHEN p1.parent_id = p2.id THEN p2.id
  604. ELSE p1.id
  605. END
  606. HAVING COUNT(*) >= 2
  607. ORDER BY
  608. min(p1.id)
  609. """
  610. self._process_query(cr, uid, ids, query, context=context)
  611. for line in this.line_ids:
  612. partner_ids = literal_eval(line.aggr_ids)
  613. self._merge(cr, uid, partner_ids, context=context)
  614. line.unlink()
  615. cr.commit()
  616. this.write({'state': 'finished'})
  617. cr.execute("""
  618. UPDATE
  619. res_partner
  620. SET
  621. is_company = NULL,
  622. parent_id = NULL
  623. WHERE
  624. parent_id = id
  625. """)
  626. return {
  627. 'type': 'ir.actions.act_window',
  628. 'res_model': this._name,
  629. 'res_id': this.id,
  630. 'view_mode': 'form',
  631. 'target': 'new',
  632. }
  633. def update_all_process_cb(self, cr, uid, ids, context=None):
  634. assert is_integer_list(ids)
  635. # WITH RECURSIVE cycle(id, parent_id) AS (
  636. # SELECT id, parent_id FROM res_partner
  637. # UNION
  638. # SELECT cycle.id, res_partner.parent_id
  639. # FROM res_partner, cycle
  640. # WHERE res_partner.id = cycle.parent_id AND
  641. # cycle.id != cycle.parent_id
  642. # )
  643. # UPDATE res_partner
  644. # SET parent_id = NULL
  645. # WHERE id in (SELECT id FROM cycle WHERE id = parent_id);
  646. this = self.browse(cr, uid, ids[0], context=context)
  647. self.parent_migration_process_cb(cr, uid, ids, context=None)
  648. list_merge = [
  649. {'group_by_vat': True,
  650. 'group_by_email': True,
  651. 'group_by_name': True},
  652. # {'group_by_name': True,
  653. # 'group_by_is_company': True,
  654. # 'group_by_parent_id': True},
  655. # {'group_by_email': True,
  656. # 'group_by_is_company': True,
  657. # 'group_by_parent_id': True},
  658. # {'group_by_name': True,
  659. # 'group_by_vat': True,
  660. # 'group_by_is_company': True,
  661. # 'exclude_journal_item': True},
  662. # {'group_by_email': True,
  663. # 'group_by_vat': True,
  664. # 'group_by_is_company': True,
  665. # 'exclude_journal_item': True},
  666. # {'group_by_email': True,
  667. # 'group_by_is_company': True,
  668. # 'exclude_contact': True,
  669. # 'exclude_journal_item': True},
  670. # {'group_by_name': True,
  671. # 'group_by_is_company': True,
  672. # 'exclude_contact': True,
  673. # 'exclude_journal_item': True}
  674. ]
  675. for merge_value in list_merge:
  676. id = self.create(cr, uid, merge_value, context=context)
  677. self.automatic_process_cb(cr, uid, [id], context=context)
  678. cr.execute("""
  679. UPDATE
  680. res_partner
  681. SET
  682. is_company = NULL
  683. WHERE
  684. parent_id IS NOT NULL AND
  685. is_company IS NOT NULL
  686. """)
  687. # cr.execute("""
  688. # UPDATE
  689. # res_partner as p1
  690. # SET
  691. # is_company = NULL,
  692. # parent_id = (
  693. # SELECT p2.id
  694. # FROM res_partner as p2
  695. # WHERE p2.email = p1.email AND
  696. # p2.parent_id != p2.id
  697. # LIMIT 1
  698. # )
  699. # WHERE
  700. # p1.parent_id = p1.id
  701. # """)
  702. return self._next_screen(cr, uid, this, context)
  703. def merge_cb(self, cr, uid, ids, context=None):
  704. assert is_integer_list(ids)
  705. context = dict(context or {}, active_test=False)
  706. this = self.browse(cr, uid, ids[0], context=context)
  707. partner_ids = set(map(int, this.partner_ids))
  708. if not partner_ids:
  709. this.write({'state': 'finished'})
  710. return {
  711. 'type': 'ir.actions.act_window',
  712. 'res_model': this._name,
  713. 'res_id': this.id,
  714. 'view_mode': 'form',
  715. 'target': 'new',
  716. }
  717. self._merge(cr, uid, partner_ids, this.dst_partner_id,
  718. context=context)
  719. if this.current_line_id:
  720. this.current_line_id.unlink()
  721. return self._next_screen(cr, uid, this, context)
  722. def auto_set_parent_id(self, cr, uid, ids, context=None):
  723. assert is_integer_list(ids)
  724. # select partner who have one least invoice
  725. partner_treated = ['@gmail.com']
  726. cr.execute(""" SELECT p.id, p.email
  727. FROM res_partner as p
  728. LEFT JOIN account_invoice as a
  729. ON p.id = a.partner_id AND a.state in ('open','paid')
  730. WHERE p.grade_id is NOT NULL
  731. GROUP BY p.id
  732. ORDER BY COUNT(a.id) DESC
  733. """)
  734. re_email = re.compile(r".*@")
  735. for id, email in cr.fetchall():
  736. # check email domain
  737. email = re_email.sub("@", email or "")
  738. if not email or email in partner_treated:
  739. continue
  740. partner_treated.append(email)
  741. # don't update the partners if they are more of one who have
  742. # invoice
  743. cr.execute("""
  744. SELECT *
  745. FROM res_partner as p
  746. WHERE p.id != %s AND p.email LIKE '%%%s' AND
  747. EXISTS (SELECT * FROM account_invoice as a
  748. WHERE p.id = a.partner_id
  749. AND a.state in ('open','paid'))
  750. """ % (id, email))
  751. if len(cr.fetchall()) > 1:
  752. _logger.info("%s MORE OF ONE COMPANY", email)
  753. continue
  754. # to display changed values
  755. cr.execute(""" SELECT id,email
  756. FROM res_partner
  757. WHERE parent_id != %s
  758. AND id != %s AND email LIKE '%%%s'
  759. """ % (id, id, email))
  760. _logger.info("%r", cr.fetchall())
  761. # upgrade
  762. cr.execute(""" UPDATE res_partner
  763. SET parent_id = %s
  764. WHERE id != %s AND email LIKE '%%%s'
  765. """ % (id, id, email))
  766. return False