Vincent Renaville
12 years ago
8 changed files with 1104 additions and 0 deletions
-
2base_partner_merge/__init__.py
-
16base_partner_merge/__openerp__.py
-
776base_partner_merge/base_partner_merge.py
-
123base_partner_merge/base_partner_merge_view.xml
-
3base_partner_merge/security/ir.model.access.csv
-
123base_partner_merge/validate_email.py
-
26base_partner_merge/wizard/__init__.py
-
35base_partner_merge/wizard/portal_wizard.py
@ -0,0 +1,2 @@ |
|||||
|
import base_partner_merge |
||||
|
import wizard |
@ -0,0 +1,16 @@ |
|||||
|
{ |
||||
|
'name': 'Base Partner Merge', |
||||
|
'author': 'OpenERP S.A.', |
||||
|
'category': 'Generic Modules/Base', |
||||
|
'version': '0.1', |
||||
|
'description': """backport module, to be removed when we switch to saas2 on the private servers""", |
||||
|
'depends': [ |
||||
|
'base', |
||||
|
'portal', |
||||
|
], |
||||
|
'data': [ |
||||
|
'security/ir.model.access.csv', |
||||
|
'base_partner_merge_view.xml', |
||||
|
], |
||||
|
'installable': True, |
||||
|
} |
@ -0,0 +1,776 @@ |
|||||
|
#!/usr/bin/env python |
||||
|
from __future__ import absolute_import |
||||
|
from email.utils import parseaddr |
||||
|
import functools |
||||
|
import htmlentitydefs |
||||
|
import itertools |
||||
|
import logging |
||||
|
import operator |
||||
|
import re |
||||
|
from ast import literal_eval |
||||
|
from openerp.tools import mute_logger |
||||
|
|
||||
|
# Validation Library https://pypi.python.org/pypi/validate_email/1.1 |
||||
|
from .validate_email import validate_email |
||||
|
|
||||
|
import openerp |
||||
|
from openerp.osv import osv, orm |
||||
|
from openerp.osv import fields |
||||
|
from openerp.osv.orm import browse_record |
||||
|
from openerp.tools.translate import _ |
||||
|
|
||||
|
pattern = re.compile("&(\w+?);") |
||||
|
|
||||
|
_logger = logging.getLogger('base.partner.merge') |
||||
|
|
||||
|
|
||||
|
# http://www.php2python.com/wiki/function.html-entity-decode/ |
||||
|
def html_entity_decode_char(m, defs=htmlentitydefs.entitydefs): |
||||
|
try: |
||||
|
return defs[m.group(1)] |
||||
|
except KeyError: |
||||
|
return m.group(0) |
||||
|
|
||||
|
|
||||
|
def html_entity_decode(string): |
||||
|
return pattern.sub(html_entity_decode_char, string) |
||||
|
|
||||
|
|
||||
|
def sanitize_email(email): |
||||
|
assert isinstance(email, basestring) and email |
||||
|
|
||||
|
result = re.subn(r';|/|:', ',', |
||||
|
html_entity_decode(email or ''))[0].split(',') |
||||
|
|
||||
|
emails = [parseaddr(email)[1] |
||||
|
for item in result |
||||
|
for email in item.split()] |
||||
|
|
||||
|
return [email.lower() |
||||
|
for email in emails |
||||
|
if validate_email(email)] |
||||
|
|
||||
|
|
||||
|
def is_integer_list(ids): |
||||
|
return all(isinstance(i, (int, long)) for i in ids) |
||||
|
|
||||
|
|
||||
|
class ResPartner(osv.Model): |
||||
|
_inherit = 'res.partner' |
||||
|
|
||||
|
_columns = { |
||||
|
'id': fields.integer('Id', readonly=True), |
||||
|
'create_date': fields.datetime('Create Date', readonly=True), |
||||
|
} |
||||
|
|
||||
|
class MergePartnerLine(osv.TransientModel): |
||||
|
_name = 'base.partner.merge.line' |
||||
|
|
||||
|
_columns = { |
||||
|
'wizard_id': fields.many2one('base.partner.merge.automatic.wizard', |
||||
|
'Wizard'), |
||||
|
'min_id': fields.integer('MinID'), |
||||
|
'aggr_ids': fields.char('Ids', required=True), |
||||
|
} |
||||
|
|
||||
|
_order = 'min_id asc' |
||||
|
|
||||
|
|
||||
|
class MergePartnerAutomatic(osv.TransientModel): |
||||
|
""" |
||||
|
The idea behind this wizard is to create a list of potential partners to |
||||
|
merge. We use two objects, the first one is the wizard for the end-user. |
||||
|
And the second will contain the partner list to merge. |
||||
|
""" |
||||
|
_name = 'base.partner.merge.automatic.wizard' |
||||
|
|
||||
|
_columns = { |
||||
|
# Group by |
||||
|
'group_by_email': fields.boolean('Email'), |
||||
|
'group_by_name': fields.boolean('Name'), |
||||
|
'group_by_is_company': fields.boolean('Is Company'), |
||||
|
'group_by_vat': fields.boolean('VAT'), |
||||
|
'group_by_parent_id': fields.boolean('Parent Company'), |
||||
|
|
||||
|
'state': fields.selection([('option', 'Option'), |
||||
|
('selection', 'Selection'), |
||||
|
('finished', 'Finished')], |
||||
|
'State', |
||||
|
readonly=True, |
||||
|
required=True), |
||||
|
'number_group': fields.integer("Group of Contacts", readonly=True), |
||||
|
'current_line_id': fields.many2one('base.partner.merge.line', 'Current Line'), |
||||
|
'line_ids': fields.one2many('base.partner.merge.line', 'wizard_id', 'Lines'), |
||||
|
'partner_ids': fields.many2many('res.partner', string='Contacts'), |
||||
|
'dst_partner_id': fields.many2one('res.partner', string='Destination Contact'), |
||||
|
|
||||
|
'exclude_contact': fields.boolean('A user associated to the contact'), |
||||
|
'exclude_journal_item': fields.boolean('Journal Items associated to the contact'), |
||||
|
'maximum_group': fields.integer("Maximum of Group of Contacts"), |
||||
|
} |
||||
|
|
||||
|
def default_get(self, cr, uid, fields, context=None): |
||||
|
if context is None: |
||||
|
context = {} |
||||
|
res = super(MergePartnerAutomatic, self).default_get(cr, uid, fields, context) |
||||
|
if context.get('active_model') == 'res.partner' and context.get('active_ids'): |
||||
|
partner_ids = context['active_ids'] |
||||
|
res['state'] = 'selection' |
||||
|
res['partner_ids'] = partner_ids |
||||
|
res['dst_partner_id'] = self._get_ordered_partner(cr, uid, partner_ids, context=context)[-1].id |
||||
|
return res |
||||
|
|
||||
|
_defaults = { |
||||
|
'state': 'option' |
||||
|
} |
||||
|
|
||||
|
def get_fk_on(self, cr, table): |
||||
|
q = """ SELECT cl1.relname as table, |
||||
|
att1.attname as column |
||||
|
FROM pg_constraint as con, pg_class as cl1, pg_class as cl2, |
||||
|
pg_attribute as att1, pg_attribute as att2 |
||||
|
WHERE con.conrelid = cl1.oid |
||||
|
AND con.confrelid = cl2.oid |
||||
|
AND array_lower(con.conkey, 1) = 1 |
||||
|
AND con.conkey[1] = att1.attnum |
||||
|
AND att1.attrelid = cl1.oid |
||||
|
AND cl2.relname = %s |
||||
|
AND att2.attname = 'id' |
||||
|
AND array_lower(con.confkey, 1) = 1 |
||||
|
AND con.confkey[1] = att2.attnum |
||||
|
AND att2.attrelid = cl2.oid |
||||
|
AND con.contype = 'f' |
||||
|
""" |
||||
|
return cr.execute(q, (table,)) |
||||
|
|
||||
|
def _update_foreign_keys(self, cr, uid, src_partners, dst_partner, context=None): |
||||
|
_logger.debug('_update_foreign_keys for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners))) |
||||
|
|
||||
|
# find the many2one relation to a partner |
||||
|
proxy = self.pool.get('res.partner') |
||||
|
self.get_fk_on(cr, 'res_partner') |
||||
|
|
||||
|
# ignore two tables |
||||
|
|
||||
|
for table, column in cr.fetchall(): |
||||
|
if 'base_partner_merge_' in table: |
||||
|
continue |
||||
|
partner_ids = tuple(map(int, src_partners)) |
||||
|
|
||||
|
query = "SELECT column_name FROM information_schema.columns WHERE table_name LIKE '%s'" % (table) |
||||
|
cr.execute(query, ()) |
||||
|
columns = [] |
||||
|
for data in cr.fetchall(): |
||||
|
if data[0] != column: |
||||
|
columns.append(data[0]) |
||||
|
|
||||
|
query_dic = { |
||||
|
'table': table, |
||||
|
'column': column, |
||||
|
'value': columns[0], |
||||
|
} |
||||
|
if len(columns) <= 1: |
||||
|
# unique key treated |
||||
|
query = """ |
||||
|
UPDATE "%(table)s" as ___tu |
||||
|
SET %(column)s = %%s |
||||
|
WHERE |
||||
|
%(column)s = %%s AND |
||||
|
NOT EXISTS ( |
||||
|
SELECT 1 |
||||
|
FROM "%(table)s" as ___tw |
||||
|
WHERE |
||||
|
%(column)s = %%s AND |
||||
|
___tu.%(value)s = ___tw.%(value)s |
||||
|
)""" % query_dic |
||||
|
for partner_id in partner_ids: |
||||
|
cr.execute(query, (dst_partner.id, partner_id, dst_partner.id)) |
||||
|
else: |
||||
|
cr.execute("SAVEPOINT recursive_partner_savepoint") |
||||
|
try: |
||||
|
query = 'UPDATE "%(table)s" SET %(column)s = %%s WHERE %(column)s IN %%s' % query_dic |
||||
|
cr.execute(query, (dst_partner.id, partner_ids,)) |
||||
|
|
||||
|
if column == proxy._parent_name and table == 'res_partner': |
||||
|
query = """ |
||||
|
WITH RECURSIVE cycle(id, parent_id) AS ( |
||||
|
SELECT id, parent_id FROM res_partner |
||||
|
UNION |
||||
|
SELECT cycle.id, res_partner.parent_id |
||||
|
FROM res_partner, cycle |
||||
|
WHERE res_partner.id = cycle.parent_id AND |
||||
|
cycle.id != cycle.parent_id |
||||
|
) |
||||
|
SELECT id FROM cycle WHERE id = parent_id AND id = %s |
||||
|
""" |
||||
|
cr.execute(query, (dst_partner.id,)) |
||||
|
if cr.fetchall(): |
||||
|
cr.execute("ROLLBACK TO SAVEPOINT recursive_partner_savepoint") |
||||
|
finally: |
||||
|
cr.execute("RELEASE SAVEPOINT recursive_partner_savepoint") |
||||
|
|
||||
|
def _update_reference_fields(self, cr, uid, src_partners, dst_partner, context=None): |
||||
|
_logger.debug('_update_reference_fields for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners))) |
||||
|
|
||||
|
def update_records(model, src, field_model='model', field_id='res_id', context=None): |
||||
|
proxy = self.pool.get(model) |
||||
|
if proxy is None: |
||||
|
return |
||||
|
domain = [(field_model, '=', 'res.partner'), (field_id, '=', src.id)] |
||||
|
ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context) |
||||
|
return proxy.write(cr, openerp.SUPERUSER_ID, ids, {field_id: dst_partner.id}, context=context) |
||||
|
|
||||
|
update_records = functools.partial(update_records, context=context) |
||||
|
|
||||
|
for partner in src_partners: |
||||
|
update_records('base.calendar', src=partner, field_model='model_id.model') |
||||
|
update_records('ir.attachment', src=partner, field_model='res_model') |
||||
|
update_records('mail.followers', src=partner, field_model='res_model') |
||||
|
update_records('mail.message', src=partner) |
||||
|
update_records('marketing.campaign.workitem', src=partner, field_model='object_id.model') |
||||
|
update_records('ir.model.data', src=partner) |
||||
|
|
||||
|
proxy = self.pool['ir.model.fields'] |
||||
|
domain = [('ttype', '=', 'reference')] |
||||
|
record_ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context) |
||||
|
|
||||
|
for record in proxy.browse(cr, openerp.SUPERUSER_ID, record_ids, context=context): |
||||
|
try: |
||||
|
proxy_model = self.pool[record.model] |
||||
|
except KeyError: |
||||
|
# ignore old tables |
||||
|
continue |
||||
|
|
||||
|
if record.model == 'ir.property': |
||||
|
continue |
||||
|
|
||||
|
field_type = proxy_model._columns.get(record.name).__class__._type |
||||
|
|
||||
|
if field_type == 'function': |
||||
|
continue |
||||
|
|
||||
|
for partner in src_partners: |
||||
|
domain = [ |
||||
|
(record.name, '=', 'res.partner,%d' % partner.id) |
||||
|
] |
||||
|
model_ids = proxy_model.search(cr, openerp.SUPERUSER_ID, domain, context=context) |
||||
|
values = { |
||||
|
record.name: 'res.partner,%d' % dst_partner.id, |
||||
|
} |
||||
|
proxy_model.write(cr, openerp.SUPERUSER_ID, model_ids, values, context=context) |
||||
|
|
||||
|
def _update_values(self, cr, uid, src_partners, dst_partner, context=None): |
||||
|
_logger.debug('_update_values for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners))) |
||||
|
|
||||
|
columns = dst_partner._columns |
||||
|
def write_serializer(column, item): |
||||
|
if isinstance(item, browse_record): |
||||
|
return item.id |
||||
|
else: |
||||
|
return item |
||||
|
|
||||
|
values = dict() |
||||
|
for column, field in columns.iteritems(): |
||||
|
if field._type not in ('many2many', 'one2many') and not isinstance(field, fields.function): |
||||
|
for item in itertools.chain(src_partners, [dst_partner]): |
||||
|
if item[column]: |
||||
|
values[column] = write_serializer(column, item[column]) |
||||
|
|
||||
|
values.pop('id', None) |
||||
|
parent_id = values.pop('parent_id', None) |
||||
|
dst_partner.write(values) |
||||
|
if parent_id and parent_id != dst_partner.id: |
||||
|
try: |
||||
|
dst_partner.write({'parent_id': parent_id}) |
||||
|
except (osv.except_osv, orm.except_orm): |
||||
|
_logger.info('Skip recursive partner hierarchies for parent_id %s of partner: %s', parent_id, dst_partner.id) |
||||
|
|
||||
|
@mute_logger('openerp.osv.expression', 'openerp.osv.orm') |
||||
|
def _merge(self, cr, uid, partner_ids, dst_partner=None, context=None): |
||||
|
proxy = self.pool.get('res.partner') |
||||
|
|
||||
|
partner_ids = proxy.exists(cr, uid, list(partner_ids), context=context) |
||||
|
if len(partner_ids) < 2: |
||||
|
return |
||||
|
|
||||
|
if len(partner_ids) > 3: |
||||
|
raise osv.except_osv(_('Error'), _("For safety reasons, you cannot merge more than 3 contacts together. You can re-open the wizard several times if needed.")) |
||||
|
|
||||
|
if openerp.SUPERUSER_ID != uid and len(set(partner.email for partner in proxy.browse(cr, uid, partner_ids, context=context))) > 1: |
||||
|
raise osv.except_osv(_('Error'), _("All contacts must have the same email. Only the Administrator can merge contacts with different emails.")) |
||||
|
|
||||
|
if dst_partner and dst_partner.id in partner_ids: |
||||
|
src_partners = proxy.browse(cr, uid, [id for id in partner_ids if id != dst_partner.id], context=context) |
||||
|
else: |
||||
|
ordered_partners = self._get_ordered_partner(cr, uid, partner_ids, context) |
||||
|
dst_partner = ordered_partners[-1] |
||||
|
src_partners = ordered_partners[:-1] |
||||
|
_logger.info("dst_partner: %s", dst_partner.id) |
||||
|
|
||||
|
if openerp.SUPERUSER_ID != uid and self._model_is_installed(cr, uid, 'account.move.line', context=context) and \ |
||||
|
self.pool.get('account.move.line').search(cr, openerp.SUPERUSER_ID, [('partner_id', 'in', [partner.id for partner in src_partners])], context=context): |
||||
|
raise osv.except_osv(_('Error'), _("Only the destination contact may be linked to existing Journal Items. Please ask the Administrator if you need to merge several contacts linked to existing Journal Items.")) |
||||
|
|
||||
|
call_it = lambda function: function(cr, uid, src_partners, dst_partner, |
||||
|
context=context) |
||||
|
|
||||
|
call_it(self._update_foreign_keys) |
||||
|
call_it(self._update_reference_fields) |
||||
|
call_it(self._update_values) |
||||
|
|
||||
|
_logger.info('(uid = %s) merged the partners %r with %s', uid, list(map(operator.attrgetter('id'), src_partners)), dst_partner.id) |
||||
|
dst_partner.message_post(body='%s %s'%(_("Merged with the following partners:"), ", ".join('%s<%s>(ID %s)' % (p.name, p.email or 'n/a', p.id) for p in src_partners))) |
||||
|
|
||||
|
for partner in src_partners: |
||||
|
partner.unlink() |
||||
|
|
||||
|
def clean_emails(self, cr, uid, context=None): |
||||
|
""" |
||||
|
Clean the email address of the partner, if there is an email field with |
||||
|
a mimum of two addresses, the system will create a new partner, with the |
||||
|
information of the previous one and will copy the new cleaned email into |
||||
|
the email field. |
||||
|
""" |
||||
|
if context is None: |
||||
|
context = {} |
||||
|
|
||||
|
proxy_model = self.pool['ir.model.fields'] |
||||
|
field_ids = proxy_model.search(cr, uid, [('model', '=', 'res.partner'), |
||||
|
('ttype', 'like', '%2many')], |
||||
|
context=context) |
||||
|
fields = proxy_model.read(cr, uid, field_ids, context=context) |
||||
|
reset_fields = dict((field['name'], []) for field in fields) |
||||
|
|
||||
|
proxy_partner = self.pool['res.partner'] |
||||
|
context['active_test'] = False |
||||
|
ids = proxy_partner.search(cr, uid, [], context=context) |
||||
|
|
||||
|
fields = ['name', 'var' 'partner_id' 'is_company', 'email'] |
||||
|
partners = proxy_partner.read(cr, uid, ids, fields, context=context) |
||||
|
|
||||
|
partners.sort(key=operator.itemgetter('id')) |
||||
|
partners_len = len(partners) |
||||
|
|
||||
|
_logger.info('partner_len: %r', partners_len) |
||||
|
|
||||
|
for idx, partner in enumerate(partners): |
||||
|
if not partner['email']: |
||||
|
continue |
||||
|
|
||||
|
percent = (idx / float(partners_len)) * 100.0 |
||||
|
_logger.info('idx: %r', idx) |
||||
|
_logger.info('percent: %r', percent) |
||||
|
try: |
||||
|
emails = sanitize_email(partner['email']) |
||||
|
head, tail = emails[:1], emails[1:] |
||||
|
email = head[0] if head else False |
||||
|
|
||||
|
proxy_partner.write(cr, uid, [partner['id']], |
||||
|
{'email': email}, context=context) |
||||
|
|
||||
|
for email in tail: |
||||
|
values = dict(reset_fields, email=email) |
||||
|
proxy_partner.copy(cr, uid, partner['id'], values, |
||||
|
context=context) |
||||
|
|
||||
|
except Exception: |
||||
|
_logger.exception("There is a problem with this partner: %r", partner) |
||||
|
raise |
||||
|
return True |
||||
|
|
||||
|
def close_cb(self, cr, uid, ids, context=None): |
||||
|
return {'type': 'ir.actions.act_window_close'} |
||||
|
|
||||
|
def _generate_query(self, fields, maximum_group=100): |
||||
|
group_fields = ', '.join(fields) |
||||
|
|
||||
|
filters = [] |
||||
|
for field in fields: |
||||
|
if field in ['email', 'name']: |
||||
|
filters.append((field, 'IS NOT', 'NULL')) |
||||
|
|
||||
|
criteria = ' AND '.join('%s %s %s' % (field, operator, value) |
||||
|
for field, operator, value in filters) |
||||
|
|
||||
|
text = [ |
||||
|
"SELECT min(id), array_agg(id)", |
||||
|
"FROM res_partner", |
||||
|
] |
||||
|
|
||||
|
if criteria: |
||||
|
text.append('WHERE %s' % criteria) |
||||
|
|
||||
|
text.extend([ |
||||
|
"GROUP BY %s" % group_fields, |
||||
|
"HAVING COUNT(*) >= 2", |
||||
|
"ORDER BY min(id)", |
||||
|
]) |
||||
|
|
||||
|
if maximum_group: |
||||
|
text.extend([ |
||||
|
"LIMIT %s" % maximum_group, |
||||
|
]) |
||||
|
|
||||
|
return ' '.join(text) |
||||
|
|
||||
|
def _compute_selected_groupby(self, this): |
||||
|
group_by_str = 'group_by_' |
||||
|
group_by_len = len(group_by_str) |
||||
|
|
||||
|
fields = [ |
||||
|
key[group_by_len:] |
||||
|
for key in self._columns.keys() |
||||
|
if key.startswith(group_by_str) |
||||
|
] |
||||
|
|
||||
|
groups = [ |
||||
|
field |
||||
|
for field in fields |
||||
|
if getattr(this, '%s%s' % (group_by_str, field), False) |
||||
|
] |
||||
|
|
||||
|
if not groups: |
||||
|
raise osv.except_osv(_('Error'), |
||||
|
_("You have to specify a filter for your selection")) |
||||
|
|
||||
|
return groups |
||||
|
|
||||
|
def next_cb(self, cr, uid, ids, context=None): |
||||
|
""" |
||||
|
Don't compute any thing |
||||
|
""" |
||||
|
context = dict(context or {}, active_test=False) |
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
if this.current_line_id: |
||||
|
this.current_line_id.unlink() |
||||
|
return self._next_screen(cr, uid, this, context) |
||||
|
|
||||
|
def _get_ordered_partner(self, cr, uid, partner_ids, context=None): |
||||
|
partners = self.pool.get('res.partner').browse(cr, uid, list(partner_ids), context=context) |
||||
|
ordered_partners = sorted(sorted(partners, |
||||
|
key=operator.attrgetter('create_date'), reverse=True), |
||||
|
key=operator.attrgetter('active'), reverse=True) |
||||
|
return ordered_partners |
||||
|
|
||||
|
def _next_screen(self, cr, uid, this, context=None): |
||||
|
this.refresh() |
||||
|
values = {} |
||||
|
if this.line_ids: |
||||
|
# in this case, we try to find the next record. |
||||
|
current_line = this.line_ids[0] |
||||
|
current_partner_ids = literal_eval(current_line.aggr_ids) |
||||
|
values.update({ |
||||
|
'current_line_id': current_line.id, |
||||
|
'partner_ids': [(6, 0, current_partner_ids)], |
||||
|
'dst_partner_id': self._get_ordered_partner(cr, uid, current_partner_ids, context)[-1].id, |
||||
|
'state': 'selection', |
||||
|
}) |
||||
|
else: |
||||
|
values.update({ |
||||
|
'current_line_id': False, |
||||
|
'partner_ids': [], |
||||
|
'state': 'finished', |
||||
|
}) |
||||
|
|
||||
|
this.write(values) |
||||
|
|
||||
|
return { |
||||
|
'type': 'ir.actions.act_window', |
||||
|
'res_model': this._name, |
||||
|
'res_id': this.id, |
||||
|
'view_mode': 'form', |
||||
|
'target': 'new', |
||||
|
} |
||||
|
|
||||
|
def _model_is_installed(self, cr, uid, model, context=None): |
||||
|
proxy = self.pool.get('ir.model') |
||||
|
domain = [('model', '=', model)] |
||||
|
return proxy.search_count(cr, uid, domain, context=context) > 0 |
||||
|
|
||||
|
def _partner_use_in(self, cr, uid, aggr_ids, models, context=None): |
||||
|
""" |
||||
|
Check if there is no occurence of this group of partner in the selected |
||||
|
model |
||||
|
""" |
||||
|
for model, field in models.iteritems(): |
||||
|
proxy = self.pool.get(model) |
||||
|
domain = [(field, 'in', aggr_ids)] |
||||
|
if proxy.search_count(cr, uid, domain, context=context): |
||||
|
return True |
||||
|
return False |
||||
|
|
||||
|
def compute_models(self, cr, uid, ids, context=None): |
||||
|
""" |
||||
|
Compute the different models needed by the system if you want to exclude |
||||
|
some partners. |
||||
|
""" |
||||
|
assert is_integer_list(ids) |
||||
|
|
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
|
||||
|
models = {} |
||||
|
if this.exclude_contact: |
||||
|
models['res.users'] = 'partner_id' |
||||
|
|
||||
|
if self._model_is_installed(cr, uid, 'account.move.line', context=context) and this.exclude_journal_item: |
||||
|
models['account.move.line'] = 'partner_id' |
||||
|
|
||||
|
return models |
||||
|
|
||||
|
def _process_query(self, cr, uid, ids, query, context=None): |
||||
|
""" |
||||
|
Execute the select request and write the result in this wizard |
||||
|
""" |
||||
|
proxy = self.pool.get('base.partner.merge.line') |
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
models = self.compute_models(cr, uid, ids, context=context) |
||||
|
cr.execute(query) |
||||
|
|
||||
|
counter = 0 |
||||
|
for min_id, aggr_ids in cr.fetchall(): |
||||
|
if models and self._partner_use_in(cr, uid, aggr_ids, models, context=context): |
||||
|
continue |
||||
|
values = { |
||||
|
'wizard_id': this.id, |
||||
|
'min_id': min_id, |
||||
|
'aggr_ids': aggr_ids, |
||||
|
} |
||||
|
|
||||
|
proxy.create(cr, uid, values, context=context) |
||||
|
counter += 1 |
||||
|
|
||||
|
values = { |
||||
|
'state': 'selection', |
||||
|
'number_group': counter, |
||||
|
} |
||||
|
|
||||
|
this.write(values) |
||||
|
|
||||
|
_logger.info("counter: %s", counter) |
||||
|
|
||||
|
def start_process_cb(self, cr, uid, ids, context=None): |
||||
|
""" |
||||
|
Start the process. |
||||
|
* Compute the selected groups (with duplication) |
||||
|
* If the user has selected the 'exclude_XXX' fields, avoid the partners. |
||||
|
""" |
||||
|
assert is_integer_list(ids) |
||||
|
|
||||
|
context = dict(context or {}, active_test=False) |
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
groups = self._compute_selected_groupby(this) |
||||
|
query = self._generate_query(groups, this.maximum_group) |
||||
|
self._process_query(cr, uid, ids, query, context=context) |
||||
|
|
||||
|
return self._next_screen(cr, uid, this, context) |
||||
|
|
||||
|
def automatic_process_cb(self, cr, uid, ids, context=None): |
||||
|
assert is_integer_list(ids) |
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
this.start_process_cb() |
||||
|
this.refresh() |
||||
|
|
||||
|
for line in this.line_ids: |
||||
|
partner_ids = literal_eval(line.aggr_ids) |
||||
|
self._merge(cr, uid, partner_ids, context=context) |
||||
|
line.unlink() |
||||
|
cr.commit() |
||||
|
|
||||
|
this.write({'state': 'finished'}) |
||||
|
return { |
||||
|
'type': 'ir.actions.act_window', |
||||
|
'res_model': this._name, |
||||
|
'res_id': this.id, |
||||
|
'view_mode': 'form', |
||||
|
'target': 'new', |
||||
|
} |
||||
|
|
||||
|
def parent_migration_process_cb(self, cr, uid, ids, context=None): |
||||
|
assert is_integer_list(ids) |
||||
|
|
||||
|
context = dict(context or {}, active_test=False) |
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
|
||||
|
query = """ |
||||
|
SELECT |
||||
|
min(p1.id), |
||||
|
array_agg(DISTINCT p1.id) |
||||
|
FROM |
||||
|
res_partner as p1 |
||||
|
INNER join |
||||
|
res_partner as p2 |
||||
|
ON |
||||
|
p1.email = p2.email AND |
||||
|
p1.name = p2.name AND |
||||
|
(p1.parent_id = p2.id OR p1.id = p2.parent_id) |
||||
|
WHERE |
||||
|
p2.id IS NOT NULL |
||||
|
GROUP BY |
||||
|
p1.email, |
||||
|
p1.name, |
||||
|
CASE WHEN p1.parent_id = p2.id THEN p2.id |
||||
|
ELSE p1.id |
||||
|
END |
||||
|
HAVING COUNT(*) >= 2 |
||||
|
ORDER BY |
||||
|
min(p1.id) |
||||
|
""" |
||||
|
|
||||
|
self._process_query(cr, uid, ids, query, context=context) |
||||
|
|
||||
|
for line in this.line_ids: |
||||
|
partner_ids = literal_eval(line.aggr_ids) |
||||
|
self._merge(cr, uid, partner_ids, context=context) |
||||
|
line.unlink() |
||||
|
cr.commit() |
||||
|
|
||||
|
this.write({'state': 'finished'}) |
||||
|
|
||||
|
cr.execute(""" |
||||
|
UPDATE |
||||
|
res_partner |
||||
|
SET |
||||
|
is_company = NULL, |
||||
|
parent_id = NULL |
||||
|
WHERE |
||||
|
parent_id = id |
||||
|
""") |
||||
|
|
||||
|
return { |
||||
|
'type': 'ir.actions.act_window', |
||||
|
'res_model': this._name, |
||||
|
'res_id': this.id, |
||||
|
'view_mode': 'form', |
||||
|
'target': 'new', |
||||
|
} |
||||
|
|
||||
|
def update_all_process_cb(self, cr, uid, ids, context=None): |
||||
|
assert is_integer_list(ids) |
||||
|
|
||||
|
# WITH RECURSIVE cycle(id, parent_id) AS ( |
||||
|
# SELECT id, parent_id FROM res_partner |
||||
|
# UNION |
||||
|
# SELECT cycle.id, res_partner.parent_id |
||||
|
# FROM res_partner, cycle |
||||
|
# WHERE res_partner.id = cycle.parent_id AND |
||||
|
# cycle.id != cycle.parent_id |
||||
|
# ) |
||||
|
# UPDATE res_partner |
||||
|
# SET parent_id = NULL |
||||
|
# WHERE id in (SELECT id FROM cycle WHERE id = parent_id); |
||||
|
|
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
|
||||
|
self.parent_migration_process_cb(cr, uid, ids, context=None) |
||||
|
|
||||
|
list_merge = [ |
||||
|
{'group_by_vat': True, 'group_by_email': True, 'group_by_name': True}, |
||||
|
# {'group_by_name': True, 'group_by_is_company': True, 'group_by_parent_id': True}, |
||||
|
# {'group_by_email': True, 'group_by_is_company': True, 'group_by_parent_id': True}, |
||||
|
# {'group_by_name': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True}, |
||||
|
# {'group_by_email': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True}, |
||||
|
# {'group_by_email': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True}, |
||||
|
# {'group_by_name': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True} |
||||
|
] |
||||
|
|
||||
|
for merge_value in list_merge: |
||||
|
id = self.create(cr, uid, merge_value, context=context) |
||||
|
self.automatic_process_cb(cr, uid, [id], context=context) |
||||
|
|
||||
|
cr.execute(""" |
||||
|
UPDATE |
||||
|
res_partner |
||||
|
SET |
||||
|
is_company = NULL |
||||
|
WHERE |
||||
|
parent_id IS NOT NULL AND |
||||
|
is_company IS NOT NULL |
||||
|
""") |
||||
|
|
||||
|
# cr.execute(""" |
||||
|
# UPDATE |
||||
|
# res_partner as p1 |
||||
|
# SET |
||||
|
# is_company = NULL, |
||||
|
# parent_id = ( |
||||
|
# SELECT p2.id |
||||
|
# FROM res_partner as p2 |
||||
|
# WHERE p2.email = p1.email AND |
||||
|
# p2.parent_id != p2.id |
||||
|
# LIMIT 1 |
||||
|
# ) |
||||
|
# WHERE |
||||
|
# p1.parent_id = p1.id |
||||
|
# """) |
||||
|
|
||||
|
return self._next_screen(cr, uid, this, context) |
||||
|
|
||||
|
def merge_cb(self, cr, uid, ids, context=None): |
||||
|
assert is_integer_list(ids) |
||||
|
|
||||
|
context = dict(context or {}, active_test=False) |
||||
|
this = self.browse(cr, uid, ids[0], context=context) |
||||
|
|
||||
|
partner_ids = set(map(int, this.partner_ids)) |
||||
|
if not partner_ids: |
||||
|
this.write({'state': 'finished'}) |
||||
|
return { |
||||
|
'type': 'ir.actions.act_window', |
||||
|
'res_model': this._name, |
||||
|
'res_id': this.id, |
||||
|
'view_mode': 'form', |
||||
|
'target': 'new', |
||||
|
} |
||||
|
|
||||
|
self._merge(cr, uid, partner_ids, this.dst_partner_id, context=context) |
||||
|
|
||||
|
if this.current_line_id: |
||||
|
this.current_line_id.unlink() |
||||
|
|
||||
|
return self._next_screen(cr, uid, this, context) |
||||
|
|
||||
|
def auto_set_parent_id(self, cr, uid, ids, context=None): |
||||
|
assert is_integer_list(ids) |
||||
|
|
||||
|
# select partner who have one least invoice |
||||
|
partner_treated = ['@gmail.com'] |
||||
|
cr.execute(""" SELECT p.id, p.email |
||||
|
FROM res_partner as p |
||||
|
LEFT JOIN account_invoice as a |
||||
|
ON p.id = a.partner_id AND a.state in ('open','paid') |
||||
|
WHERE p.grade_id is NOT NULL |
||||
|
GROUP BY p.id |
||||
|
ORDER BY COUNT(a.id) DESC |
||||
|
""") |
||||
|
re_email = re.compile(r".*@") |
||||
|
for id, email in cr.fetchall(): |
||||
|
# check email domain |
||||
|
email = re_email.sub("@", email or "") |
||||
|
if not email or email in partner_treated: |
||||
|
continue |
||||
|
partner_treated.append(email) |
||||
|
|
||||
|
# don't update the partners if they are more of one who have invoice |
||||
|
cr.execute(""" SELECT * |
||||
|
FROM res_partner as p |
||||
|
WHERE p.id != %s AND p.email LIKE '%%%s' AND |
||||
|
EXISTS (SELECT * FROM account_invoice as a WHERE p.id = a.partner_id AND a.state in ('open','paid')) |
||||
|
""" % (id, email)) |
||||
|
|
||||
|
if len(cr.fetchall()) > 1: |
||||
|
_logger.info("%s MORE OF ONE COMPANY", email) |
||||
|
continue |
||||
|
|
||||
|
# to display changed values |
||||
|
cr.execute(""" SELECT id,email |
||||
|
FROM res_partner |
||||
|
WHERE parent_id != %s AND id != %s AND email LIKE '%%%s' |
||||
|
""" % (id, id, email)) |
||||
|
_logger.info("%r", cr.fetchall()) |
||||
|
|
||||
|
# upgrade |
||||
|
cr.execute(""" UPDATE res_partner |
||||
|
SET parent_id = %s |
||||
|
WHERE id != %s AND email LIKE '%%%s' |
||||
|
""" % (id, id, email)) |
||||
|
return False |
@ -0,0 +1,123 @@ |
|||||
|
<?xml version="1.0" encoding="UTF-8"?> |
||||
|
<openerp> |
||||
|
<data> |
||||
|
<!-- the sequence of the configuration sub menu is 30 --> |
||||
|
<menuitem id='root_menu' name='Tools' parent='base.menu_base_partner' sequence="25"/> |
||||
|
|
||||
|
<record model="ir.actions.act_window" id="base_partner_merge_automatic_act"> |
||||
|
<field name="name">Deduplicate Contacts</field> |
||||
|
<field name="res_model">base.partner.merge.automatic.wizard</field> |
||||
|
<field name="view_type">form</field> |
||||
|
<field name="view_mode">form</field> |
||||
|
<field name="target">new</field> |
||||
|
<field name="context">{'active_test': False}</field> |
||||
|
</record> |
||||
|
|
||||
|
<menuitem id='partner_merge_automatic_menu' |
||||
|
action='base_partner_merge_automatic_act' |
||||
|
groups='base.group_system' |
||||
|
parent='root_menu' /> |
||||
|
|
||||
|
<record model='ir.ui.view' id='base_partner_merge_automatic_wizard_form'> |
||||
|
<field name='name'>base.partner.merge.automatic.wizard.form</field> |
||||
|
<field name='model'>base.partner.merge.automatic.wizard</field> |
||||
|
<field name='arch' type='xml'> |
||||
|
<form string='Automatic Merge Wizard' version='7.0'> |
||||
|
<header> |
||||
|
<button name='merge_cb' string='Merge Selection' |
||||
|
class='oe_highlight' |
||||
|
type='object' |
||||
|
attrs="{'invisible': [('state', 'in', ('option', 'finished' ))]}" |
||||
|
/> |
||||
|
<button name='next_cb' string='Skip these contacts' |
||||
|
type='object' class='oe_link' |
||||
|
attrs="{'invisible': [('state', '!=', 'selection')]}" /> |
||||
|
<button name='start_process_cb' |
||||
|
string='Merge with Manual Check' |
||||
|
type='object' class='oe_highlight' |
||||
|
attrs="{'invisible': [('state', '!=', 'option')]}" /> |
||||
|
<button name='automatic_process_cb' |
||||
|
string='Merge Automatically' |
||||
|
type='object' class='oe_highlight' |
||||
|
confirm="Are you sure to execute the automatic merge of your contacts ?" |
||||
|
attrs="{'invisible': [('state', '!=', 'option')]}" /> |
||||
|
<button name='update_all_process_cb' |
||||
|
string='Merge Automatically all process' |
||||
|
type='object' |
||||
|
confirm="Are you sure to execute the list of automatic merges of your contacts ?" |
||||
|
attrs="{'invisible': [('state', '!=', 'option')]}" /> |
||||
|
<span class="or_cancel" attrs="{'invisible': [('state', '=', 'finished')]} ">or |
||||
|
<button name="close_cb" special="nosave" string="Cancel" type="object" class="oe_link oe_inline"/> |
||||
|
</span> |
||||
|
<span class="or_cancel" attrs="{'invisible': [('state', '!=', 'finished')]} "> |
||||
|
<button name="close_cb" special="nosave" |
||||
|
string="Close" |
||||
|
type="object" |
||||
|
class="oe_link oe_inline"/> |
||||
|
</span> |
||||
|
</header> |
||||
|
<sheet> |
||||
|
<group attrs="{'invisible': [('state', '!=', 'finished')]}" col="1"> |
||||
|
<h2>There is no more contacts to merge for this request...</h2> |
||||
|
<button name="%(base_partner_merge_automatic_act)d" string="Deduplicate the other Contacts" class="oe_highlight" |
||||
|
type="action"/> |
||||
|
</group> |
||||
|
<p class="oe_grey" attrs="{'invisible': [('state', '!=', ('option'))]}"> |
||||
|
Select the list of fields used to search for |
||||
|
duplicated records. If you select several fields, |
||||
|
OpenERP will propose you to merge only those having |
||||
|
all these fields in common. (not one of the fields). |
||||
|
</p> |
||||
|
<group attrs="{'invisible': ['|', ('state', 'not in', ('selection', 'finished')), ('number_group', '=', 0)]}"> |
||||
|
<field name="state" invisible="1" /> |
||||
|
<field name="number_group"/> |
||||
|
</group> |
||||
|
<group string="Search duplicates based on duplicated data in" |
||||
|
attrs="{'invisible': [('state', 'not in', ('option',))]}"> |
||||
|
<field name='group_by_email' /> |
||||
|
<field name='group_by_name' /> |
||||
|
<field name='group_by_is_company' /> |
||||
|
<field name='group_by_vat' /> |
||||
|
<field name='group_by_parent_id' /> |
||||
|
</group> |
||||
|
<group string="Exclude contacts having" |
||||
|
attrs="{'invisible': [('state', 'not in', ('option',))]}"> |
||||
|
<field name='exclude_contact' /> |
||||
|
<field name='exclude_journal_item' /> |
||||
|
</group> |
||||
|
<separator string="Options" attrs="{'invisible': [('state', 'not in', ('option',))]}"/> |
||||
|
<group attrs="{'invisible': [('state', 'not in', ('option','finished'))]}"> |
||||
|
<field name='maximum_group' attrs="{'readonly': [('state', 'in', ('finished'))]}"/> |
||||
|
</group> |
||||
|
<separator string="Merge the following contacts" |
||||
|
attrs="{'invisible': [('state', 'in', ('option', 'finished'))]}"/> |
||||
|
<group attrs="{'invisible': [('state', 'in', ('option', 'finished'))]}" col="1"> |
||||
|
<p class="oe_grey"> |
||||
|
The selected contacts will be merged together. All |
||||
|
documents linking to one of these contacts will be |
||||
|
redirected to the aggregated contact. You can remove |
||||
|
contacts from this list to avoid merging them. |
||||
|
</p> |
||||
|
<field name="dst_partner_id" domain="[('id', 'in', partner_ids and partner_ids[0] and partner_ids[0][2] or False)]" attrs="{'required': [('state', '=', 'selection')]}"/> |
||||
|
<field name="partner_ids" nolabel="1"> |
||||
|
<tree string="Partners"> |
||||
|
<field name="id" /> |
||||
|
<field name="display_name" /> |
||||
|
<field name="email" /> |
||||
|
<field name="is_company" /> |
||||
|
<field name="vat" /> |
||||
|
<field name="country_id" /> |
||||
|
</tree> |
||||
|
</field> |
||||
|
</group> |
||||
|
</sheet> |
||||
|
</form> |
||||
|
</field> |
||||
|
</record> |
||||
|
|
||||
|
<act_window id="action_partner_merge" res_model="base.partner.merge.automatic.wizard" src_model="res.partner" |
||||
|
target="new" multi="True" key2="client_action_multi" view_mode="form" name="Automatic Merge"/> |
||||
|
|
||||
|
</data> |
||||
|
|
||||
|
</openerp> |
@ -0,0 +1,3 @@ |
|||||
|
"id","name","model_id:id","group_id:id","perm_read","perm_write","perm_create","perm_unlink" |
||||
|
"access_base_partner_merge_line_manager","base_partner_merge_line.manager","model_base_partner_merge_line","base.group_system",1,1,1,1 |
||||
|
"access_base_partner_merge_manager","base_partner_merge.manager","model_base_partner_merge_automatic_wizard","base.group_system",1,1,1,1 |
@ -0,0 +1,123 @@ |
|||||
|
# RFC 2822 - style email validation for Python |
||||
|
# (c) 2012 Syrus Akbary <me@syrusakbary.com> |
||||
|
# Extended from (c) 2011 Noel Bush <noel@aitools.org> |
||||
|
# for support of mx and user check |
||||
|
# This code is made available to you under the GNU LGPL v3. |
||||
|
# |
||||
|
# This module provides a single method, valid_email_address(), |
||||
|
# which returns True or False to indicate whether a given address |
||||
|
# is valid according to the 'addr-spec' part of the specification |
||||
|
# given in RFC 2822. Ideally, we would like to find this |
||||
|
# in some other library, already thoroughly tested and well- |
||||
|
# maintained. The standard Python library email.utils |
||||
|
# contains a parse_addr() function, but it is not sufficient |
||||
|
# to detect many malformed addresses. |
||||
|
# |
||||
|
# This implementation aims to be faithful to the RFC, with the |
||||
|
# exception of a circular definition (see comments below), and |
||||
|
# with the omission of the pattern components marked as "obsolete". |
||||
|
|
||||
|
import re |
||||
|
import smtplib |
||||
|
import socket |
||||
|
|
||||
|
try: |
||||
|
import DNS |
||||
|
ServerError = DNS.ServerError |
||||
|
except: |
||||
|
DNS = None |
||||
|
class ServerError(Exception): pass |
||||
|
# All we are really doing is comparing the input string to one |
||||
|
# gigantic regular expression. But building that regexp, and |
||||
|
# ensuring its correctness, is made much easier by assembling it |
||||
|
# from the "tokens" defined by the RFC. Each of these tokens is |
||||
|
# tested in the accompanying unit test file. |
||||
|
# |
||||
|
# The section of RFC 2822 from which each pattern component is |
||||
|
# derived is given in an accompanying comment. |
||||
|
# |
||||
|
# (To make things simple, every string below is given as 'raw', |
||||
|
# even when it's not strictly necessary. This way we don't forget |
||||
|
# when it is necessary.) |
||||
|
# |
||||
|
WSP = r'[ \t]' # see 2.2.2. Structured Header Field Bodies |
||||
|
CRLF = r'(?:\r\n)' # see 2.2.3. Long Header Fields |
||||
|
NO_WS_CTL = r'\x01-\x08\x0b\x0c\x0f-\x1f\x7f' # see 3.2.1. Primitive Tokens |
||||
|
QUOTED_PAIR = r'(?:\\.)' # see 3.2.2. Quoted characters |
||||
|
FWS = r'(?:(?:' + WSP + r'*' + CRLF + r')?' + \ |
||||
|
WSP + r'+)' # see 3.2.3. Folding white space and comments |
||||
|
CTEXT = r'[' + NO_WS_CTL + \ |
||||
|
r'\x21-\x27\x2a-\x5b\x5d-\x7e]' # see 3.2.3 |
||||
|
CCONTENT = r'(?:' + CTEXT + r'|' + \ |
||||
|
QUOTED_PAIR + r')' # see 3.2.3 (NB: The RFC includes COMMENT here |
||||
|
# as well, but that would be circular.) |
||||
|
COMMENT = r'\((?:' + FWS + r'?' + CCONTENT + \ |
||||
|
r')*' + FWS + r'?\)' # see 3.2.3 |
||||
|
CFWS = r'(?:' + FWS + r'?' + COMMENT + ')*(?:' + \ |
||||
|
FWS + '?' + COMMENT + '|' + FWS + ')' # see 3.2.3 |
||||
|
ATEXT = r'[\w!#$%&\'\*\+\-/=\?\^`\{\|\}~]' # see 3.2.4. Atom |
||||
|
ATOM = CFWS + r'?' + ATEXT + r'+' + CFWS + r'?' # see 3.2.4 |
||||
|
DOT_ATOM_TEXT = ATEXT + r'+(?:\.' + ATEXT + r'+)*' # see 3.2.4 |
||||
|
DOT_ATOM = CFWS + r'?' + DOT_ATOM_TEXT + CFWS + r'?' # see 3.2.4 |
||||
|
QTEXT = r'[' + NO_WS_CTL + \ |
||||
|
r'\x21\x23-\x5b\x5d-\x7e]' # see 3.2.5. Quoted strings |
||||
|
QCONTENT = r'(?:' + QTEXT + r'|' + \ |
||||
|
QUOTED_PAIR + r')' # see 3.2.5 |
||||
|
QUOTED_STRING = CFWS + r'?' + r'"(?:' + FWS + \ |
||||
|
r'?' + QCONTENT + r')*' + FWS + \ |
||||
|
r'?' + r'"' + CFWS + r'?' |
||||
|
LOCAL_PART = r'(?:' + DOT_ATOM + r'|' + \ |
||||
|
QUOTED_STRING + r')' # see 3.4.1. Addr-spec specification |
||||
|
DTEXT = r'[' + NO_WS_CTL + r'\x21-\x5a\x5e-\x7e]' # see 3.4.1 |
||||
|
DCONTENT = r'(?:' + DTEXT + r'|' + \ |
||||
|
QUOTED_PAIR + r')' # see 3.4.1 |
||||
|
DOMAIN_LITERAL = CFWS + r'?' + r'\[' + \ |
||||
|
r'(?:' + FWS + r'?' + DCONTENT + \ |
||||
|
r')*' + FWS + r'?\]' + CFWS + r'?' # see 3.4.1 |
||||
|
DOMAIN = r'(?:' + DOT_ATOM + r'|' + \ |
||||
|
DOMAIN_LITERAL + r')' # see 3.4.1 |
||||
|
ADDR_SPEC = LOCAL_PART + r'@' + DOMAIN # see 3.4.1 |
||||
|
|
||||
|
# A valid address will match exactly the 3.4.1 addr-spec. |
||||
|
VALID_ADDRESS_REGEXP = '^' + ADDR_SPEC + '$' |
||||
|
|
||||
|
def validate_email(email, check_mx=False,verify=False): |
||||
|
|
||||
|
"""Indicate whether the given string is a valid email address |
||||
|
according to the 'addr-spec' portion of RFC 2822 (see section |
||||
|
3.4.1). Parts of the spec that are marked obsolete are *not* |
||||
|
included in this test, and certain arcane constructions that |
||||
|
depend on circular definitions in the spec may not pass, but in |
||||
|
general this should correctly identify any email address likely |
||||
|
to be in use as of 2011.""" |
||||
|
try: |
||||
|
assert re.match(VALID_ADDRESS_REGEXP, email) is not None |
||||
|
check_mx |= verify |
||||
|
if check_mx: |
||||
|
if not DNS: raise Exception('For check the mx records or check if the email exists you must have installed pyDNS python package') |
||||
|
DNS.DiscoverNameServers() |
||||
|
hostname = email[email.find('@')+1:] |
||||
|
mx_hosts = DNS.mxlookup(hostname) |
||||
|
for mx in mx_hosts: |
||||
|
try: |
||||
|
smtp = smtplib.SMTP() |
||||
|
smtp.connect(mx[1]) |
||||
|
if not verify: return True |
||||
|
status, _ = smtp.helo() |
||||
|
if status != 250: continue |
||||
|
smtp.mail('') |
||||
|
status, _ = smtp.rcpt(email) |
||||
|
if status != 250: return False |
||||
|
break |
||||
|
except smtplib.SMTPServerDisconnected: #Server not permits verify user |
||||
|
break |
||||
|
except smtplib.SMTPConnectError: |
||||
|
continue |
||||
|
except (AssertionError, ServerError): |
||||
|
return False |
||||
|
return True |
||||
|
|
||||
|
# import sys |
||||
|
|
||||
|
# sys.modules[__name__],sys.modules['validate_email_module'] = validate_email,sys.modules[__name__] |
||||
|
# from validate_email_module import * |
@ -0,0 +1,26 @@ |
|||||
|
# -*- coding: utf-8 -*- |
||||
|
############################################################################## |
||||
|
# |
||||
|
# OpenERP, Open Source Business Applications |
||||
|
# Copyright (c) 2013 OpenERP S.A. <http://openerp.com> |
||||
|
# |
||||
|
# This program is free software: you can redistribute it and/or modify |
||||
|
# it under the terms of the GNU Affero General Public License as |
||||
|
# published by the Free Software Foundation, either version 3 of the |
||||
|
# License, or (at your option) any later version. |
||||
|
# |
||||
|
# This program is distributed in the hope that it will be useful, |
||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
|
# GNU Affero General Public License for more details. |
||||
|
# |
||||
|
# You should have received a copy of the GNU Affero General Public License |
||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
||||
|
# |
||||
|
############################################################################## |
||||
|
|
||||
|
|
||||
|
import portal_wizard |
||||
|
|
||||
|
|
||||
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
@ -0,0 +1,35 @@ |
|||||
|
# -*- coding: utf-8 -*- |
||||
|
############################################################################## |
||||
|
# |
||||
|
# OpenERP, Open Source Business Applications |
||||
|
# Copyright (c) 2013 OpenERP S.A. <http://openerp.com> |
||||
|
# |
||||
|
# This program is free software: you can redistribute it and/or modify |
||||
|
# it under the terms of the GNU Affero General Public License as |
||||
|
# published by the Free Software Foundation, either version 3 of the |
||||
|
# License, or (at your option) any later version. |
||||
|
# |
||||
|
# This program is distributed in the hope that it will be useful, |
||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
||||
|
# GNU Affero General Public License for more details. |
||||
|
# |
||||
|
# You should have received a copy of the GNU Affero General Public License |
||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
||||
|
# |
||||
|
############################################################################## |
||||
|
|
||||
|
from openerp.osv import osv |
||||
|
from openerp.tools.translate import _ |
||||
|
|
||||
|
class wizard_user(osv.TransientModel): |
||||
|
_inherit = 'portal.wizard.user' |
||||
|
|
||||
|
def get_error_messages(self, cr, uid, ids, context=None): |
||||
|
error_msg = super(wizard_user, self).get_error_messages(cr, uid, ids, context) |
||||
|
if error_msg: |
||||
|
error_msg[-1] = '%s\n%s' % (error_msg[-1], _("- Merge existing contacts together using the Automatic Merge wizard, " |
||||
|
"available in the More menu after selecting several contacts in the Customers list")) |
||||
|
return error_msg |
||||
|
|
||||
|
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
Write
Preview
Loading…
Cancel
Save
Reference in new issue