diff --git a/.travis.yml b/.travis.yml index 4c6a11900..7c6892808 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,12 @@ language: python + python: - "2.7" +env: + - VERSION="8.0" ODOO_REPO="odoo/odoo" + - VERSION="8.0" ODOO_REPO="OCA/OCB" + virtualenv: system_site_packages: true @@ -10,15 +15,15 @@ env: - VERSION="8.0" ODOO_REPO="OCA/OCB" install: - - git clone https://github.com/OCA/maintainer-quality-tools.git $HOME/maintainer-quality-tools - - export PATH=$HOME/maintainer-quality-tools/travis:$PATH - - travis_install_nightly - - pip install python-ldap - - printf '[options]\n\nrunning_env = dev' > /tmp/odoo.cfg + - git clone https://github.com/OCA/maintainer-quality-tools.git ${HOME}/maintainer-quality-tools + - export PATH=${HOME}/maintainer-quality-tools/travis:${PATH} + - travis_install_nightly ${VERSION} + - sudo pip install python-ldap + - printf '[options]\n\nrunning_env = dev' > ${HOME}/.openerp_serverrc script: - - travis_run_flake8 - - travis_run_tests /tmp/odoo.cfg + - travis_run_flake8 + - travis_run_tests ${VERSION} after_success: coveralls diff --git a/__unported__/auth_admin_passkey/model/res_users.py b/__unported__/auth_admin_passkey/model/res_users.py index 6d050108c..d0a5a8aa2 100644 --- a/__unported__/auth_admin_passkey/model/res_users.py +++ b/__unported__/auth_admin_passkey/model/res_users.py @@ -35,7 +35,7 @@ class res_users(Model): # Private Function section def _get_translation(self, cr, lang, text): - context = {'lang': lang} + context = {'lang': lang} # noqa: _() checks page for locals return _(text) def _send_email_passkey(self, cr, user_id, user_agent_env): diff --git a/__unported__/auth_from_http_basic/__init__.py b/__unported__/auth_from_http_basic/__init__.py index 34a0d9550..4458cc282 100644 --- a/__unported__/auth_from_http_basic/__init__.py +++ b/__unported__/auth_from_http_basic/__init__.py @@ -36,7 +36,8 @@ def init(self, params): base_location=self.httprequest.url_root.rstrip('/'), HTTP_HOST=self.httprequest.environ['HTTP_HOST'], REMOTE_ADDR=self.httprequest.environ['REMOTE_ADDR'] - )) + ) + ) WebRequest.init = init diff --git a/__unported__/base_external_dbsource/__openerp__.py b/__unported__/base_external_dbsource/__openerp__.py index 36bfc5fba..f5f7f72eb 100644 --- a/__unported__/base_external_dbsource/__openerp__.py +++ b/__unported__/base_external_dbsource/__openerp__.py @@ -57,7 +57,7 @@ Contributors 'base_external_dbsource_demo.xml', ], 'test': [ - 'dbsource_connect.yml', + 'test/dbsource_connect.yml', ], 'installable': False, 'active': False, diff --git a/__unported__/base_external_dbsource/base_external_dbsource.py b/__unported__/base_external_dbsource/base_external_dbsource.py index e9e4e787f..9beb69e9e 100644 --- a/__unported__/base_external_dbsource/base_external_dbsource.py +++ b/__unported__/base_external_dbsource/base_external_dbsource.py @@ -34,13 +34,15 @@ try: try: import pymssql CONNECTORS.append(('mssql', 'Microsoft SQL Server')) - except: + assert pymssql + except (ImportError, AssertionError): _logger.info('MS SQL Server not available. Please install "pymssql"\ python package.') try: import MySQLdb CONNECTORS.append(('mysql', 'MySQL')) - except: + assert MySQLdb + except (ImportError, AssertionError): _logger.info('MySQL not available. Please install "mysqldb"\ python package.') except: @@ -90,15 +92,15 @@ Sample connection strings: } def conn_open(self, cr, uid, id1): - #Get dbsource record + # Get dbsource record data = self.browse(cr, uid, id1) - #Build the full connection string + # Build the full connection string connStr = data.conn_string if data.password: if '%s' not in data.conn_string: connStr += ';PWD=%s' connStr = connStr % data.password - #Try to connect + # Try to connect if data.connector == 'cx_Oracle': os.environ['NLS_LANG'] = 'AMERICAN_AMERICA.UTF8' conn = cx_Oracle.connect(connStr) @@ -134,13 +136,13 @@ Sample connection strings: for obj in data: conn = self.conn_open(cr, uid, obj.id) if obj.connector in ["sqlite", "mysql", "mssql"]: - #using sqlalchemy + # using sqlalchemy cur = conn.execute(sqlquery, sqlparams) if metadata: cols = cur.keys() rows = [r for r in cur] else: - #using other db connectors + # using other db connectors cur = conn.cursor() cur.execute(sqlquery, sqlparams) if metadata: @@ -157,7 +159,7 @@ Sample connection strings: conn = False try: conn = self.conn_open(cr, uid, obj.id) - except Exception, e: + except Exception as e: raise orm.except_orm(_("Connection test failed!"), _("Here is what we got instead:\n %s") % tools.ustr(e)) @@ -168,8 +170,6 @@ Sample connection strings: except Exception: # ignored, just a consequence of the previous exception pass - #TODO: if OK a (wizard) message box should be displayed + # TODO: if OK a (wizard) message box should be displayed raise orm.except_orm(_("Connection test succeeded!"), _("Everything seems properly set up!")) - -#EOF diff --git a/__unported__/base_external_dbsource/test/dbsource_connect.yml b/__unported__/base_external_dbsource/test/dbsource_connect.yml index 1eabcf2b2..f1105697d 100644 --- a/__unported__/base_external_dbsource/test/dbsource_connect.yml +++ b/__unported__/base_external_dbsource/test/dbsource_connect.yml @@ -2,4 +2,8 @@ Connect to local Postgres. - !python {model: base.external.dbsource}: | - self.connection_test(cr, uid, [ref("demo_postgresql")] + from openerp.osv.orm import except_orm + try: + self.connection_test(cr, uid, [ref("demo_postgre")]) + except except_orm as e: + assert e.value == u'Everything seems properly set up!' diff --git a/__unported__/base_optional_quick_create/__openerp__.py b/__unported__/base_optional_quick_create/__openerp__.py index 24c08b0e7..81c33ab24 100644 --- a/__unported__/base_optional_quick_create/__openerp__.py +++ b/__unported__/base_optional_quick_create/__openerp__.py @@ -24,8 +24,11 @@ 'category': 'Tools', 'summary': "Avoid 'quick create' on m2o fields, on a 'by model' basis", 'description': """ -This module allows to avoid to 'quick create' new records, through many2one fields, for a specific model. -You can configure which models should allow 'quick create'. When specified, the 'quick create' option will always open the standard create form. +This module allows to avoid to 'quick create' new records, through many2one +fields, for a specific model. +You can configure which models should allow 'quick create'. +When specified, the 'quick create' option will always open the standard create +form. Got the idea from https://twitter.com/nbessi/status/337869826028605441 """, diff --git a/__unported__/base_optional_quick_create/model.py b/__unported__/base_optional_quick_create/model.py index fc15593a7..470659443 100644 --- a/__unported__/base_optional_quick_create/model.py +++ b/__unported__/base_optional_quick_create/model.py @@ -22,10 +22,11 @@ from openerp.osv import orm, fields from openerp import SUPERUSER_ID from openerp.tools.translate import _ + class ir_model(orm.Model): _inherit = 'ir.model' - + _columns = { 'avoid_quick_create': fields.boolean('Avoid quick create'), } diff --git a/__unported__/configuration_helper/config.py b/__unported__/configuration_helper/config.py index 187d683ab..73fd58e2f 100644 --- a/__unported__/configuration_helper/config.py +++ b/__unported__/configuration_helper/config.py @@ -40,7 +40,7 @@ class AbstractConfigSettings(orm.AbstractModel): super(AbstractConfigSettings, self).__init__(pool, cr) if self._companyObject: for field_key in self._companyObject._columns: - #allows to exclude some field + # allows to exclude some field if self._filter_field(field_key): args = ('company_id', field_key) kwargs = { diff --git a/__unported__/cron_run_manually/model/ir_cron.py b/__unported__/cron_run_manually/model/ir_cron.py index 9b35ab95e..3d7a30fa3 100644 --- a/__unported__/cron_run_manually/model/ir_cron.py +++ b/__unported__/cron_run_manually/model/ir_cron.py @@ -27,9 +27,10 @@ from openerp.tools import SUPERUSER_ID from openerp.tools.translate import _ from openerp.tools.safe_eval import safe_eval + class irCron(orm.Model): _inherit = 'ir.cron' - + def run_manually(self, cr, uid, ids, context=None): """ Run a job from the cron form view. @@ -51,7 +52,7 @@ class irCron(orm.Model): _('Error'), _('Only the admin user is allowed to ' 'execute inactive cron jobs manually')) - + try: # Try to grab an exclusive lock on the job row # until the end of the transaction @@ -67,9 +68,9 @@ class irCron(orm.Model): model = self.pool.get(job['model']) method = getattr(model, job['function']) args = safe_eval('tuple(%s)' % (job['args'] or '')) - method(cr, job['user_id'], *args) - - except psycopg2.OperationalError, e: + method(cr, job['user_id'], *args) + + except psycopg2.OperationalError as e: # User friendly error if the lock could not be claimed if e.pgcode == '55P03': raise orm.except_orm( diff --git a/__unported__/email_template_template/__openerp__.py b/__unported__/email_template_template/__openerp__.py index 5959795d9..ccf587699 100644 --- a/__unported__/email_template_template/__openerp__.py +++ b/__unported__/email_template_template/__openerp__.py @@ -60,7 +60,7 @@ Then in your template you write :: Dear ${object.partner_id.name}, - + Your order has been booked on date ${object.date} for a total amount of ${object.sum}. And it will be evaluated to @@ -77,13 +77,18 @@ And it will be evaluated to Example city Example Corp footer -Given the way evaluation works internally (body_text of the template template is evaluated two times, first with the instance of email.template of your own template, then with the object your template refers to), you can do some trickery if you know that a template template is always used with the same kind of model (that is, models that have the same field name): +Given the way evaluation works internally (body_text of the template template +is evaluated two times, first with the instance of email.template of your own +template, then with the object your template refers to), you can do some +trickery if you know that a template template is always used with the same +kind of model (that is, models that have the same field name): In your template template: :: - Dear ${'${object.name}'}, <-- gets evaluated to "${object.name}" in the first step, then to the content of object.name + Dear ${'${object.name}'}, <-- gets evaluated to "${object.name}" in the + first step, then to the content of object.name ${object.body_html} Best, Example Corp diff --git a/__unported__/email_template_template/model/email_template.py b/__unported__/email_template_template/model/email_template.py index 2d1b2091c..e2b67ab1a 100644 --- a/__unported__/email_template_template/model/email_template.py +++ b/__unported__/email_template_template/model/email_template.py @@ -28,7 +28,7 @@ class email_template(Model): def _get_is_template_template(self, cr, uid, ids, fields_name, arg, context=None): - cr.execute('''select + cr.execute('''select id, (select count(*) > 0 from email_template e where email_template_id=email_template.id) from email_template @@ -45,7 +45,7 @@ class email_template(Model): def get_email_template(self, cr, uid, template_id=False, record_id=None, context=None): this = super(email_template, self).get_email_template( - cr, uid, template_id, record_id, context) + cr, uid, template_id, record_id, context) if this.email_template_id and not this.is_template_template: for field in ['body_html']: diff --git a/__unported__/fetchmail_attach_from_folder/match_algorithm/base.py b/__unported__/fetchmail_attach_from_folder/match_algorithm/base.py index 5116c929a..a3d9ba6b8 100644 --- a/__unported__/fetchmail_attach_from_folder/match_algorithm/base.py +++ b/__unported__/fetchmail_attach_from_folder/match_algorithm/base.py @@ -20,6 +20,7 @@ # ############################################################################## + class base(object): name = None '''Name shown to the user''' @@ -30,7 +31,6 @@ class base(object): readonly_fields = [] '''Fields on fetchmail_server folder that are readonly for this algorithm''' - def search_matches(self, cr, uid, conf, mail_message, mail_message_org): '''Returns ids found for model with mail_message''' return [] @@ -40,4 +40,4 @@ class base(object): mail_message, mail_message_org, msgid, context=None): '''Do whatever it takes to handle a match''' return folder.server_id.attach_mail(connection, object_id, folder, - mail_message, msgid) + mail_message, msgid) diff --git a/__unported__/fetchmail_attach_from_folder/match_algorithm/email_domain.py b/__unported__/fetchmail_attach_from_folder/match_algorithm/email_domain.py index 66ab66286..da2ec8ac8 100644 --- a/__unported__/fetchmail_attach_from_folder/match_algorithm/email_domain.py +++ b/__unported__/fetchmail_attach_from_folder/match_algorithm/email_domain.py @@ -22,6 +22,7 @@ from email_exact import email_exact + class email_domain(email_exact): '''Search objects by domain name of email address. Beware of match_first here, this is most likely to get it wrong (gmail)''' @@ -29,16 +30,16 @@ class email_domain(email_exact): def search_matches(self, cr, uid, conf, mail_message, mail_message_org): ids = super(email_domain, self).search_matches( - cr, uid, conf, mail_message, mail_message_org) + cr, uid, conf, mail_message, mail_message_org) if not ids: domains = [] for addr in self._get_mailaddresses(conf, mail_message): domains.append(addr.split('@')[-1]) ids = conf.pool.get(conf.model_id.model).search( - cr, uid, - self._get_mailaddress_search_domain( - conf, mail_message, - operator='like', - values=['%@'+domain for domain in set(domains)]), - order=conf.model_order) + cr, uid, + self._get_mailaddress_search_domain( + conf, mail_message, + operator='like', + values=['%@'+domain for domain in set(domains)]), + order=conf.model_order) return ids diff --git a/__unported__/fetchmail_attach_from_folder/match_algorithm/email_exact.py b/__unported__/fetchmail_attach_from_folder/match_algorithm/email_exact.py index 728c04461..b5686a183 100644 --- a/__unported__/fetchmail_attach_from_folder/match_algorithm/email_exact.py +++ b/__unported__/fetchmail_attach_from_folder/match_algorithm/email_exact.py @@ -24,6 +24,7 @@ from base import base from openerp.tools.safe_eval import safe_eval from openerp.tools.mail import email_split + class email_exact(base): '''Search for exactly the mailadress as noted in the email''' @@ -36,17 +37,17 @@ class email_exact(base): for field in fields: if field in mail_message: mailaddresses += email_split(mail_message[field]) - return [ addr.lower() for addr in mailaddresses ] + return [addr.lower() for addr in mailaddresses] def _get_mailaddress_search_domain( self, conf, mail_message, operator='=', values=None): mailaddresses = values or self._get_mailaddresses( - conf, mail_message) + conf, mail_message) if not mailaddresses: return [(0, '=', 1)] search_domain = ((['|'] * (len(mailaddresses) - 1)) + [ - (conf.model_field, operator, addr) for addr in mailaddresses] + - safe_eval(conf.domain or '[]')) + (conf.model_field, operator, addr) for addr in mailaddresses] + + safe_eval(conf.domain or '[]')) return search_domain def search_matches(self, cr, uid, conf, mail_message, mail_message_org): diff --git a/__unported__/fetchmail_attach_from_folder/match_algorithm/openerp_standard.py b/__unported__/fetchmail_attach_from_folder/match_algorithm/openerp_standard.py index 24a233d0d..2fee96c34 100644 --- a/__unported__/fetchmail_attach_from_folder/match_algorithm/openerp_standard.py +++ b/__unported__/fetchmail_attach_from_folder/match_algorithm/openerp_standard.py @@ -21,15 +21,21 @@ ############################################################################## from base import base -from openerp.tools.safe_eval import safe_eval + class openerp_standard(base): '''No search at all. Use OpenERP's standard mechanism to attach mails to mail.thread objects. Note that this algorithm always matches.''' name = 'OpenERP standard' - readonly_fields = ['model_field', 'mail_field', 'match_first', 'domain', - 'model_order', 'flag_nonmatching'] + readonly_fields = [ + 'model_field', + 'mail_field', + 'match_first', + 'domain', + 'model_order', + 'flag_nonmatching', + ] def search_matches(self, cr, uid, conf, mail_message, mail_message_org): '''Always match. Duplicates will be fished out by message_id''' @@ -39,11 +45,12 @@ class openerp_standard(base): self, cr, uid, connection, object_id, folder, mail_message, mail_message_org, msgid, context): result = folder.pool.get('mail.thread').message_process( - cr, uid, - folder.model_id.model, mail_message_org, - save_original=folder.server_id.original, - strip_attachments=(not folder.server_id.attach), - context=context) + cr, uid, + folder.model_id.model, mail_message_org, + save_original=folder.server_id.original, + strip_attachments=(not folder.server_id.attach), + context=context + ) if folder.delete_matching: connection.store(msgid, '+FLAGS', '\\DELETED') diff --git a/__unported__/fetchmail_attach_from_folder/model/fetchmail_server.py b/__unported__/fetchmail_attach_from_folder/model/fetchmail_server.py index 814da703c..c1673c382 100644 --- a/__unported__/fetchmail_attach_from_folder/model/fetchmail_server.py +++ b/__unported__/fetchmail_attach_from_folder/model/fetchmail_server.py @@ -23,12 +23,11 @@ import base64 import simplejson from lxml import etree -from openerp.osv.orm import Model, except_orm, browse_null +from openerp.osv.orm import Model, except_orm from openerp.tools.translate import _ from openerp.osv import fields from openerp.addons.fetchmail.fetchmail import _logger as logger from openerp.tools.misc import UnquoteEvalContext -from openerp.tools.safe_eval import safe_eval class fetchmail_server(Model): @@ -36,7 +35,7 @@ class fetchmail_server(Model): _columns = { 'folder_ids': fields.one2many( - 'fetchmail.server.folder', 'server_id', 'Folders'), + 'fetchmail.server.folder', 'server_id', 'Folders'), } _defaults = { @@ -95,20 +94,19 @@ class fetchmail_server(Model): if connection.select(folder.path)[0] != 'OK': logger.error( - 'Could not open mailbox %s on %s' % ( - folder.path, this.server)) + 'Could not open mailbox %s on %s' % (folder.path, this.server)) connection.select() continue result, msgids = this.get_msgids(connection) if result != 'OK': logger.error( 'Could not search mailbox %s on %s' % ( - folder.path, this.server)) + folder.path, this.server)) continue for msgid in msgids[0].split(): matched_object_ids += this.apply_matching( - connection, folder, msgid, match_algorithm) + connection, folder, msgid, match_algorithm) logger.info('finished checking for emails in %s server %s', folder.path, this.name) @@ -130,16 +128,16 @@ class fetchmail_server(Model): if result != 'OK': logger.error( - 'Could not fetch %s in %s on %s' % ( - msgid, folder.path, this.server)) + 'Could not fetch %s in %s on %s' % (msgid, folder.path, this.server)) continue mail_message = self.pool.get('mail.thread').message_parse( - cr, uid, msgdata[0][1], save_original=this.original, - context=context) + cr, uid, msgdata[0][1], save_original=this.original, + context=context) - if self.pool.get('mail.message').search(cr, uid, [ - ('message_id', '=', mail_message['message_id'])]): + if self.pool.get('mail.message').search( + cr, uid, [ + ('message_id', '=', mail_message['message_id'])]): continue found_ids = match_algorithm.search_matches( @@ -156,7 +154,7 @@ class fetchmail_server(Model): msgdata[0][1], msgid, context) cr.execute('release savepoint apply_matching') matched_object_ids += found_ids[:1] - except Exception, e: + except Exception: cr.execute('rollback to savepoint apply_matching') logger.exception( "Failed to fetch mail %s from %s", @@ -183,40 +181,40 @@ class fetchmail_server(Model): cr, uid, object_id, context ).partner_id.id - attachments=[] + attachments = [] if this.attach and mail_message.get('attachments'): for attachment in mail_message['attachments']: fname, fcontent = attachment if isinstance(fcontent, unicode): fcontent = fcontent.encode('utf-8') data_attach = { - 'name': fname, - 'datas': base64.b64encode(str(fcontent)), - 'datas_fname': fname, - 'description': _('Mail attachment'), - 'res_model': folder.model_id.model, - 'res_id': object_id, - } + 'name': fname, + 'datas': base64.b64encode(str(fcontent)), + 'datas_fname': fname, + 'description': _('Mail attachment'), + 'res_model': folder.model_id.model, + 'res_id': object_id, + } attachments.append( self.pool.get('ir.attachment').create( cr, uid, data_attach, context=context)) mail_message_ids.append( - self.pool.get('mail.message').create( - cr, uid, - { - 'author_id': partner_id, - 'model': folder.model_id.model, - 'res_id': object_id, - 'type': 'email', - 'body': mail_message.get('body'), - 'subject': mail_message.get('subject'), - 'email_from': mail_message.get('from'), - 'date': mail_message.get('date'), - 'message_id': mail_message.get('message_id'), - 'attachment_ids': [(6, 0, attachments)], - }, - context)) + self.pool.get('mail.message').create( + cr, uid, + { + 'author_id': partner_id, + 'model': folder.model_id.model, + 'res_id': object_id, + 'type': 'email', + 'body': mail_message.get('body'), + 'subject': mail_message.get('subject'), + 'email_from': mail_message.get('from'), + 'date': mail_message.get('date'), + 'message_id': mail_message.get('message_id'), + 'attachment_ids': [(6, 0, attachments)], + }, + context)) if folder.delete_matching: connection.store(msgid, '+FLAGS', '\\DELETED') diff --git a/__unported__/fetchmail_attach_from_folder/model/fetchmail_server_folder.py b/__unported__/fetchmail_attach_from_folder/model/fetchmail_server_folder.py index ea0c07a7b..8021332e2 100644 --- a/__unported__/fetchmail_attach_from_folder/model/fetchmail_server_folder.py +++ b/__unported__/fetchmail_attach_from_folder/model/fetchmail_server_folder.py @@ -47,55 +47,66 @@ class fetchmail_server_folder(Model): _columns = { 'sequence': fields.integer('Sequence'), 'path': fields.char( - 'Path', size=256, help='The path to your mail ' - "folder. Typically would be something like 'INBOX.myfolder'", - required=True), + 'Path', size=256, help='The path to your mail ' + "folder. Typically would be something like 'INBOX.myfolder'", + required=True + ), 'model_id': fields.many2one( - 'ir.model', 'Model', required=True, - help='The model to attach emails to'), + 'ir.model', 'Model', required=True, + help='The model to attach emails to' + ), 'model_field': fields.char( - 'Field (model)', size=128, - help='The field in your model that contains the field to match ' - 'against.\n' - 'Examples:\n' - "'email' if your model is res.partner, or " - "'partner_id.email' if you're matching sale orders"), + 'Field (model)', size=128, + help='The field in your model that contains the field to match ' + 'against.\n' + 'Examples:\n' + "'email' if your model is res.partner, or " + "'partner_id.email' if you're matching sale orders" + ), 'model_order': fields.char( - 'Order (model)', size=128, - help='Fields to order by, this mostly useful in conjunction ' - "with 'Use 1st match'"), + 'Order (model)', size=128, + help='Fields to order by, this mostly useful in conjunction ' + "with 'Use 1st match'" + ), 'match_algorithm': fields.selection( - _get_match_algorithms_sel, - 'Match algorithm', required=True, translate=True, - help='The algorithm used to determine which object an email ' - 'matches.'), + _get_match_algorithms_sel, + 'Match algorithm', required=True, translate=True, + help='The algorithm used to determine which object an email ' + 'matches.' + ), 'mail_field': fields.char( - 'Field (email)', size=128, - help='The field in the email used for matching. Typically ' - "this is 'to' or 'from'"), + 'Field (email)', size=128, + help='The field in the email used for matching. Typically ' + "this is 'to' or 'from'" + ), 'server_id': fields.many2one('fetchmail.server', 'Server'), 'delete_matching': fields.boolean( - 'Delete matches', - help='Delete matched emails from server'), + 'Delete matches', + help='Delete matched emails from server' + ), 'flag_nonmatching': fields.boolean( - 'Flag nonmatching', - help="Flag emails in the server that don't match any object " - 'in OpenERP'), + 'Flag nonmatching', + help="Flag emails in the server that don't match any object " + 'in OpenERP' + ), 'match_first': fields.boolean( - 'Use 1st match', - help='If there are multiple matches, use the first one. If ' - 'not checked, multiple matches count as no match at all'), + 'Use 1st match', + help='If there are multiple matches, use the first one. If ' + 'not checked, multiple matches count as no match at all' + ), 'domain': fields.char( - 'Domain', size=128, help='Fill in a search ' - 'filter to narrow down objects to match'), + 'Domain', size=128, help='Fill in a search ' + 'filter to narrow down objects to match' + ), 'msg_state': fields.selection( - [ - ('sent', 'Sent'), - ('received', 'Received'), - ], - 'Message state', - help='The state messages fetched from this folder should be ' - 'assigned in OpenERP'), + [ + ('sent', 'Sent'), + ('received', 'Received'), + ], + 'Message state', + help='The state messages fetched from this folder should be ' + 'assigned in OpenERP' + ), } _defaults = { diff --git a/__unported__/fetchmail_attach_from_folder/wizard/attach_mail_manually.py b/__unported__/fetchmail_attach_from_folder/wizard/attach_mail_manually.py index d1a16fa1f..18b851be2 100644 --- a/__unported__/fetchmail_attach_from_folder/wizard/attach_mail_manually.py +++ b/__unported__/fetchmail_attach_from_folder/wizard/attach_mail_manually.py @@ -22,48 +22,54 @@ from openerp.osv import fields from openerp.osv.orm import TransientModel +import logging +logger = logging.getLogger(__name__) class attach_mail_manually(TransientModel): _name = 'fetchmail.attach.mail.manually' _columns = { - 'folder_id': fields.many2one('fetchmail.server.folder', 'Folder', - readonly=True), - 'mail_ids': fields.one2many( - 'fetchmail.attach.mail.manually.mail', 'wizard_id', 'Emails'), - } + 'folder_id': fields.many2one('fetchmail.server.folder', 'Folder', + readonly=True), + 'mail_ids': fields.one2many( + 'fetchmail.attach.mail.manually.mail', 'wizard_id', 'Emails'), + } def default_get(self, cr, uid, fields_list, context=None): if context is None: context = {} - defaults = super(attach_mail_manually, self).default_get(cr, uid, - fields_list, context) + defaults = super(attach_mail_manually, self).default_get( + cr, uid, fields_list, context + ) - for folder in self.pool.get('fetchmail.server.folder').browse(cr, uid, + for folder in self.pool.get('fetchmail.server.folder').browse( + cr, uid, [context.get('default_folder_id')], context): - defaults['mail_ids']=[] + defaults['mail_ids'] = [] connection = folder.server_id.connect() connection.select(folder.path) - result, msgids = connection.search(None, - 'FLAGGED' if folder.flag_nonmatching else 'UNDELETED') + result, msgids = connection.search( + None, + 'FLAGGED' if folder.flag_nonmatching else 'UNDELETED') if result != 'OK': logger.error('Could not search mailbox %s on %s' % ( - folder.path, this.server)) + folder.path, folder.server_id.name)) continue - attach_mail_manually_mail._columns['object_id'].selection=[ - (folder.model_id.model, folder.model_id.name)] + attach_mail_manually_mail._columns['object_id'].selection = [ + (folder.model_id.model, folder.model_id.name)] for msgid in msgids[0].split(): result, msgdata = connection.fetch(msgid, '(RFC822)') if result != 'OK': logger.error('Could not fetch %s in %s on %s' % ( - msgid, folder.path, this.server)) + msgid, folder.path, folder.server_id.name)) continue mail_message = self.pool.get('mail.thread').message_parse( - cr, uid, msgdata[0][1], - save_original=folder.server_id.original, - context=context) + cr, uid, msgdata[0][1], + save_original=folder.server_id.original, + context=context + ) defaults['mail_ids'].append((0, 0, { 'msgid': msgid, 'subject': mail_message.get('subject', ''), @@ -79,36 +85,45 @@ class attach_mail_manually(TransientModel): for mail in this.mail_ids: connection = this.folder_id.server_id.connect() connection.select(this.folder_id.path) - result, msgdata = connection.fetch(mail.msgid, '(RFC822)') - if result != 'OK': - logger.error('Could not fetch %s in %s on %s' % ( - msgid, folder.path, this.server)) - continue - + result, msgdata = connection.fetch(mail.msgid, '(RFC822)') + if result != 'OK': + logger.error('Could not fetch %s in %s on %s' % ( + mail.msgid, this.folder_id.path, this.server)) + continue + mail_message = self.pool.get('mail.thread').message_parse( cr, uid, msgdata[0][1], save_original=this.folder_id.server_id.original, context=context) - this.folder_id.server_id.attach_mail(connection, - mail.object_id.id, this.folder_id, mail_message, - mail.msgid) + this.folder_id.server_id.attach_mail( + connection, + mail.object_id.id, this.folder_id, mail_message, + mail.msgid + ) connection.close() return {'type': 'ir.actions.act_window_close'} + class attach_mail_manually_mail(TransientModel): _name = 'fetchmail.attach.mail.manually.mail' _columns = { - 'wizard_id': fields.many2one('fetchmail.attach.mail.manually', - readonly=True), - 'msgid': fields.char('Message id', size=16, readonly=True), - 'subject': fields.char('Subject', size=128, readonly=True), - 'date': fields.datetime('Date', readonly=True), - 'object_id': fields.reference('Object', - selection=lambda self, cr, uid, context: - [(m.model, m.name) for m in - self.pool.get('ir.model').browse(cr, uid, - self.pool.get('ir.model').search(cr, uid, []), - context)], size=128), - } + 'wizard_id': fields.many2one('fetchmail.attach.mail.manually', + readonly=True), + 'msgid': fields.char('Message id', size=16, readonly=True), + 'subject': fields.char('Subject', size=128, readonly=True), + 'date': fields.datetime('Date', readonly=True), + 'object_id': fields.reference( + 'Object', + selection=lambda self, cr, uid, context: [ + (m.model, m.name) + for m in self.pool.get('ir.model').browse( + cr, uid, + self.pool.get('ir.model').search(cr, uid, []), + context + ) + ], + size=128, + ), + } diff --git a/__unported__/import_odbc/__openerp__.py b/__unported__/import_odbc/__openerp__.py index f661d2955..dd3ee9f0a 100644 --- a/__unported__/import_odbc/__openerp__.py +++ b/__unported__/import_odbc/__openerp__.py @@ -29,11 +29,21 @@ Import data directly from other databases. Installed in the Administration module, menu Configuration -> Import from SQL. Features: - * Fetched data from the databases are used to build lines equivalent to regular import files. These are imported using the standard "import_data()" ORM method, benefiting from all its features, including xml_ids. - * Each table import is defined by an SQL statement, used to build the equivalent for an import file. Each column's name should match the column names you would use in an import file. The first column must provide an unique identifier for the record, and will be used to build its xml_id. - * SQL columns named "none" are ignored. This can be used for the first column of the SQL, so that it's used to build the XML Id but it's not imported to any OpenERP field. - * The last sync date is the last successfull execution can be used in the SQL using "%(sync)s", or ":sync" in the case of Oracle. - * When errors are found, only the record with the error fails import. The other correct records are commited. However, the "last sync date" will only be automaticaly updated when no errors are found. + * Fetched data from the databases are used to build lines equivalent to + regular import files. These are imported using the standard "import_data()" + ORM method, benefiting from all its features, including xml_ids. + * Each table import is defined by an SQL statement, used to build the + equivalent for an import file. Each column's name should match the column + names you would use in an import file. The first column must provide an + unique identifier for the record, and will be used to build its xml_id. + * SQL columns named "none" are ignored. This can be used for the first column + of the SQL, so that it's used to build the XML Id but it's not imported to + any OpenERP field. + * The last sync date is the last successfull execution can be used in the SQL + using "%(sync)s", or ":sync" in the case of Oracle. + * When errors are found, only the record with the error fails import. The + other correct records are commited. However, the "last sync date" will only + be automaticaly updated when no errors are found. * The import execution can be scheduled to run automatically. Examples: @@ -54,9 +64,12 @@ Examples: WHERE DATE_CHANGED >= %(sync)s Improvements ideas waiting for a contributor: - * Allow to import many2one fields (currently not supported). Done by adding a second SQL sentence to get child record list? - * Allow "import sets" that can be executed at different time intervals using different scheduler jobs. - * Allow to inactivate/delete OpenERP records when not present in an SQL result set. + * Allow to import many2one fields (currently not supported). Done by adding a + second SQL sentence to get child record list? + * Allow "import sets" that can be executed at different time intervals using + different scheduler jobs. + * Allow to inactivate/delete OpenERP records when not present in an SQL + result set. Contributors ============ diff --git a/__unported__/import_odbc/import_odbc.py b/__unported__/import_odbc/import_odbc.py index 9a16346be..7d418bee0 100644 --- a/__unported__/import_odbc/import_odbc.py +++ b/__unported__/import_odbc/import_odbc.py @@ -90,23 +90,23 @@ class import_odbc_dbtable(orm.Model): except: errmsg = str(sys.exc_info()[1]) if errmsg and not table_obj.ignore_rel_errors: - #Fail + # Fail append_to_log(log, 'ERROR', data, errmsg) log['last_error_count'] += 1 return False if errmsg and table_obj.ignore_rel_errors: - #Warn and retry ignoring many2one fields... + # Warn and retry ignoring many2one fields... append_to_log(log, 'WARN', data, errmsg) log['last_warn_count'] += 1 - #Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first) + # Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first) i = find_m2o(cols) if i >= 0: - #Try again without the [i] column + # Try again without the [i] column del cols[i] del data[i] self._import_data(cr, uid, cols, data, model_obj, table_obj, log) else: - #Fail + # Fail append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.') log['last_error_count'] += 1 return False @@ -117,7 +117,7 @@ class import_odbc_dbtable(orm.Model): actions = self.read(cr, uid, ids, ['id', 'exec_order']) actions.sort(key=lambda x: (x['exec_order'], x['id'])) - #Consider each dbtable: + # Consider each dbtable: for action_ref in actions: obj = self.browse(cr, uid, action_ref['id']) if not obj.enabled: @@ -126,9 +126,9 @@ class import_odbc_dbtable(orm.Model): _logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl) _logger.debug('Importing %s...' % obj.name) - #now() microseconds are stripped to avoid problem with SQL smalldate - #TODO: convert UTC Now to local timezone - #http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime + # now() microseconds are stripped to avoid problem with SQL smalldate + # TODO: convert UTC Now to local timezone + # http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime model_name = obj.model_target.model model_obj = self.pool.get(model_name) xml_prefix = model_name.replace('.', '_') + "_id_" @@ -140,7 +140,7 @@ class import_odbc_dbtable(orm.Model): 'last_log': list()} self.write(cr, uid, [obj.id], log) - #Prepare SQL sentence; replace "%s" with the last_sync date + # Prepare SQL sentence; replace "%s" with the last_sync date if obj.last_sync: sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S") else: @@ -149,30 +149,30 @@ class import_odbc_dbtable(orm.Model): res = db_model.execute(cr, uid, [obj.dbsource_id.id], obj.sql_source, params, metadata=True) - #Exclude columns titled "None"; add (xml_)"id" column + # Exclude columns titled "None"; add (xml_)"id" column cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id'] - #Import each row: + # Import each row: for row in res['rows']: - #Build data row; import only columns present in the "cols" list + # Build data row; import only columns present in the "cols" list data = list() for i in cidx: - #TODO: Handle imported datetimes properly - convert from localtime to UTC! + # TODO: Handle imported datetimes properly - convert from localtime to UTC! v = row[i] if isinstance(v, str): v = v.strip() data.append(v) data.append(xml_prefix + str(row[0]).strip()) - #Import the row; on error, write line to the log + # Import the row; on error, write line to the log log['last_record_count'] += 1 self._import_data(cr, uid, cols, data, model_obj, obj, log) if log['last_record_count'] % 500 == 0: _logger.info('...%s rows processed...' % (log['last_record_count'])) - #Finished importing all rows - #If no errors, write new sync date + # Finished importing all rows + # If no errors, write new sync date if not (log['last_error_count'] or log['last_warn_count']): log['last_sync'] = log['start_run'] level = logging.DEBUG @@ -183,14 +183,14 @@ class import_odbc_dbtable(orm.Model): _logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % ( model_name, log['last_record_count'], log['last_error_count'], log['last_warn_count'])) - #Write run log, either if the table import is active or inactive + # Write run log, either if the table import is active or inactive if log['last_log']: log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==') log.update({'last_log': '\n'.join(log['last_log'])}) log.update({'last_run': datetime.now().replace(microsecond=0)}) self.write(cr, uid, [obj.id], log) - #Finished + # Finished _logger.debug('Import job FINISHED.') return True @@ -214,5 +214,3 @@ class import_odbc_dbtable(orm.Model): 'res_id': new_create_id, 'type': 'ir.actions.act_window', } - -#EOF diff --git a/__unported__/mail_environment/__openerp__.py b/__unported__/mail_environment/__openerp__.py index 5da871afb..45313fe7c 100644 --- a/__unported__/mail_environment/__openerp__.py +++ b/__unported__/mail_environment/__openerp__.py @@ -26,15 +26,17 @@ 'description': """ Extend mail and fetch mail with server environment module. -In config files, sections outgoint_mail and incoming_mails are default values for all Outgoing Mail Servers and Fetchmail Servers. -For each server, you can (re)define values with a section named "outgoing_mail.resource_name" where resource_name is the name of your server. +In config files, sections outgoint_mail and incoming_mails are default values +for all Outgoing Mail Servers and Fetchmail Servers. +For each server, you can (re)define values with a section named +"outgoing_mail.resource_name" where resource_name is the name of your server. Exemple of config file : [outgoing_mail] smtp_host = smtp.myserver.com smtp_port = 587 -smtp_user = +smtp_user = smtp_pass = smtp_encryption = ssl @@ -64,4 +66,3 @@ password = openerp 'installable': False, 'active': False, } -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/__unported__/mail_environment/env_mail.py b/__unported__/mail_environment/env_mail.py index e5c8bb997..252703d2a 100644 --- a/__unported__/mail_environment/env_mail.py +++ b/__unported__/mail_environment/env_mail.py @@ -27,7 +27,7 @@ from server_environment import serv_config class IrMail(osv.osv): _inherit = "ir.mail_server" - + def _get_smtp_conf(self, cursor, uid, ids, name, args, context=None): """ Return configuration @@ -52,44 +52,49 @@ class IrMail(osv.osv): return res _columns = { - 'smtp_host': fields.function(_get_smtp_conf, - method=True, - string='SMTP Server', - type="char", - multi='outgoing_mail_config', - size=128), - 'smtp_port': fields.function(_get_smtp_conf, - method=True, - string='SMTP Port', - type="integer", - multi='outgoing_mail_config', - help="SMTP Port. Usually 465 for SSL, and 25 or 587 for other cases.", - size=5), - 'smtp_user': fields.function(_get_smtp_conf, - method=True, - string='Username', - type="char", - multi='outgoing_mail_config', - help="Optional username for SMTP authentication", - size=64), - 'smtp_pass': fields.function(_get_smtp_conf, - method=True, - string='Password', - type="char", - multi='outgoing_mail_config', - help="Optional password for SMTP authentication", - size=64), - 'smtp_encryption' :fields.function(_get_smtp_conf, - method=True, - string='smtp_encryption', - type="char", - multi='outgoing_mail_config', - help="Choose the connection encryption scheme:\n" - "- none: SMTP sessions are done in cleartext.\n" - "- starttls: TLS encryption is requested at start of SMTP session (Recommended)\n" - "- ssl: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)", - size=64)} - + 'smtp_host': fields.function( + _get_smtp_conf, + method=True, + string='SMTP Server', + type="char", + multi='outgoing_mail_config', + size=128), + 'smtp_port': fields.function( + _get_smtp_conf, + method=True, + string='SMTP Port', + type="integer", + multi='outgoing_mail_config', + help="SMTP Port. Usually 465 for SSL, and 25 or 587 for other cases.", + size=5), + 'smtp_user': fields.function( + _get_smtp_conf, + method=True, + string='Username', + type="char", + multi='outgoing_mail_config', + help="Optional username for SMTP authentication", + size=64), + 'smtp_pass': fields.function( + _get_smtp_conf, + method=True, + string='Password', + type="char", + multi='outgoing_mail_config', + help="Optional password for SMTP authentication", + size=64), + 'smtp_encryption': fields.function( + _get_smtp_conf, + method=True, + string='smtp_encryption', + type="char", + multi='outgoing_mail_config', + help="Choose the connection encryption scheme:\n" + "- none: SMTP sessions are done in cleartext.\n" + "- starttls: TLS encryption is requested at start of SMTP session (Recommended)\n" + "- ssl: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)", + size=64)} + IrMail() @@ -108,13 +113,15 @@ class FetchmailServer(osv.osv): key_types = {'port': int, 'is_ssl': lambda a: bool(int(a)), 'attach': lambda a: bool(int(a)), - 'original': lambda a: bool(int(a)),} + 'original': lambda a: bool(int(a)), + } # default vals config_vals = {'port': 993, 'is_ssl': 0, 'attach': 0, - 'original': 0} + 'original': 0, + } if serv_config.has_section(global_section_name): config_vals.update(serv_config.items(global_section_name)) @@ -132,7 +139,7 @@ class FetchmailServer(osv.osv): result_ids = [] # read all incomming servers values all_ids = self.search(cr, uid, [], context=context) - results = self.read(cr, uid, all_ids, ['id','type'], context=context) + results = self.read(cr, uid, all_ids, ['id', 'type'], context=context) args = args[:] i = 0 while i < len(args): @@ -145,64 +152,74 @@ class FetchmailServer(osv.osv): for search_vals in args[i][2]: for res in results: if (res['type'] == search_vals) and (res['id'] not in result_ids): - result_ids.append(res['id']) + result_ids.append(res['id']) else: continue i += 1 return [('id', 'in', result_ids)] _columns = { - 'server': fields.function(_get_incom_conf, - method=True, - string='Server', - type="char", - multi='income_mail_config', - size=256, help="Hostname or IP of the mail server"), - 'port': fields.function(_get_incom_conf, - method=True, - string='Port', - type="integer", - multi='income_mail_config', - help="Hostname or IP of the mail server"), - 'type': fields.function(_get_incom_conf, - method=True, - string='Type', - type="char", - multi='income_mail_config', - fnct_search=_type_search, - size=64, - help="pop, imap, local"), - 'is_ssl': fields.function(_get_incom_conf, - method=True, - string='Is SSL', - type="boolean", - multi='income_mail_config', - help='Connections are encrypted with SSL/TLS through' - ' a dedicated port (default: IMAPS=993, POP3S=995)'), - 'attach': fields.function(_get_incom_conf, - method=True, - string='Keep Attachments', - type="boolean", - multi='income_mail_config', - help="Whether attachments should be downloaded. " - "If not enabled, incoming emails will be stripped of any attachments before being processed"), - 'original': fields.function(_get_incom_conf, - method=True, - string='Keep Original', - type="boolean", - multi='income_mail_config', - help="Whether a full original copy of each email should be kept for reference" - "and attached to each processed message. This will usually double the size of your message database."), - 'user': fields.function(_get_incom_conf, - method=True, - string='Username', - type="char", - multi='income_mail_config', - size=64), - 'password': fields.function(_get_incom_conf, - method=True, - string='password', - type="char", - multi='income_mail_config', - size=64)} -FetchmailServer() + 'server': fields.function( + _get_incom_conf, + method=True, + string='Server', + type="char", + multi='income_mail_config', + size=256, help="Hostname or IP of the mail server"), + 'port': fields.function( + _get_incom_conf, + method=True, + string='Port', + type="integer", + multi='income_mail_config', + help="Hostname or IP of the mail server"), + 'type': fields.function( + _get_incom_conf, + method=True, + string='Type', + type="char", + multi='income_mail_config', + fnct_search=_type_search, + size=64, + help="pop, imap, local"), + 'is_ssl': fields.function( + _get_incom_conf, + method=True, + string='Is SSL', + type="boolean", + multi='income_mail_config', + help='Connections are encrypted with SSL/TLS through' + ' a dedicated port (default: IMAPS=993, POP3S=995)'), + 'attach': fields.function( + _get_incom_conf, + method=True, + string='Keep Attachments', + type="boolean", + multi='income_mail_config', + help="Whether attachments should be downloaded. " + "If not enabled, incoming emails will be stripped of any " + "attachments before being processed"), + 'original': fields.function( + _get_incom_conf, + method=True, + string='Keep Original', + type="boolean", + multi='income_mail_config', + help="Whether a full original copy of each email should be kept " + "for reference and attached to each processed message. This " + "will usually double the size of your message database."), + 'user': fields.function( + _get_incom_conf, + method=True, + string='Username', + type="char", + multi='income_mail_config', + size=64), + 'password': fields.function( + _get_incom_conf, + method=True, + string='password', + type="char", + multi='income_mail_config', + size=64)} +FetchmailServer() diff --git a/__unported__/scheduler_error_mailer/ir_cron.py b/__unported__/scheduler_error_mailer/ir_cron.py index 80b427c31..8971da486 100644 --- a/__unported__/scheduler_error_mailer/ir_cron.py +++ b/__unported__/scheduler_error_mailer/ir_cron.py @@ -1,5 +1,5 @@ # -*- encoding: utf-8 -*- -################################################################################# +############################################################################## # # Scheduler Error Mailer module for OpenERP # Copyright (C) 2012-2013 Akretion (http://www.akretion.com/) @@ -28,20 +28,21 @@ import logging logger = logging.getLogger(__name__) + class ir_cron(orm.Model): _inherit = "ir.cron" _columns = { - 'email_template': fields.many2one('email.template', + 'email_template': fields.many2one( + 'email.template', 'Error E-mail Template', help="Select the email template that will be sent when this scheduler fails."), } - def _handle_callback_exception(self, cr, uid, model_name, method_name, args, job_id, job_exception): - res = super(ir_cron, self)._handle_callback_exception(cr, uid, - model_name, method_name, args, job_id, job_exception) + res = super(ir_cron, self)._handle_callback_exception( + cr, uid, model_name, method_name, args, job_id, job_exception) my_cron = self.browse(cr, uid, job_id) @@ -54,9 +55,9 @@ class ir_cron(orm.Model): } logger.debug("Sending scheduler error email with context=%s" % context) - self.pool['email.template'].send_mail(cr, uid, - my_cron.email_template.id, my_cron.id, force_send=True, - context=context) + self.pool['email.template'].send_mail( + cr, uid, my_cron.email_template.id, my_cron.id, + force_send=True, context=context) return res @@ -67,4 +68,3 @@ class res_users(orm.Model): def test_scheduler_failure(self, cr, uid, context=None): """This function is used to test and debug this module""" raise orm.except_orm(_('Error :'), _("Task failure with UID = %d." % uid)) - diff --git a/__unported__/server_environment/serv_config.py b/__unported__/server_environment/serv_config.py index 033cc91fe..ccac4f007 100644 --- a/__unported__/server_environment/serv_config.py +++ b/__unported__/server_environment/serv_config.py @@ -93,7 +93,7 @@ def _load_config(): config_p.optionxform = str try: config_p.read(conf_files) - except Exception, e: + except Exception as e: raise Exception('Cannot read config files "%s": %s' % (conf_files, e)) return config_p diff --git a/__unported__/server_environment/system_info.py b/__unported__/server_environment/system_info.py index 5c5875eb0..e6d522d9f 100644 --- a/__unported__/server_environment/system_info.py +++ b/__unported__/server_environment/system_info.py @@ -38,7 +38,7 @@ def get_server_environment(): # inspired by server/bin/service/web_services.py try: rev_id = _get_output('bzr revision-info') - except Exception, e: + except Exception as e: rev_id = 'Exception: %s' % (e,) os_lang = '.'.join([x for x in locale.getdefaultlocale() if x]) diff --git a/__unported__/server_environment_files/__init__.py b/__unported__/server_environment_files/__init__.py index 31fea2953..98a6c9725 100644 --- a/__unported__/server_environment_files/__init__.py +++ b/__unported__/server_environment_files/__init__.py @@ -17,4 +17,4 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . # -############################################################################## \ No newline at end of file +############################################################################## diff --git a/__unported__/super_calendar/__init__.py b/__unported__/super_calendar/__init__.py index 6fe2100cc..90402ead4 100644 --- a/__unported__/super_calendar/__init__.py +++ b/__unported__/super_calendar/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- ############################################################################## -# +# # Copyright (C) 2012 Agile Business Group sagl () # Copyright (C) 2012 Domsense srl () # diff --git a/__unported__/super_calendar/__openerp__.py b/__unported__/super_calendar/__openerp__.py index 0ada28c30..2fc85eaa5 100644 --- a/__unported__/super_calendar/__openerp__.py +++ b/__unported__/super_calendar/__openerp__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- ############################################################################## -# +# # Copyright (C) 2012 Agile Business Group sagl () # Copyright (C) 2012 Domsense srl () # @@ -26,9 +26,13 @@ 'description': """ This module allows to create configurable calendars. -Through the 'calendar configurator' object, you can specify which models have to be merged in the super calendar. For each model, you have to define the 'description' and 'date_start' fields at least. Then you can define 'duration' and the 'user_id' fields. +Through the 'calendar configurator' object, you can specify which models have +to be merged in the super calendar. For each model, you have to define the +'description' and 'date_start' fields at least. Then you can define 'duration' +and the 'user_id' fields. -The 'super.calendar' object contains the the merged calendars. The 'super.calendar' can be updated by 'ir.cron' or manually. +The 'super.calendar' object contains the the merged calendars. The +'super.calendar' can be updated by 'ir.cron' or manually. Configuration ============= @@ -37,7 +41,8 @@ After installing the module you can go to Super calendar → Configuration → Configurators -and create a new configurator. For instance, if you want to see meetings and phone calls, you can create the following lines +and create a new configurator. For instance, if you want to see meetings and +phone calls, you can create the following lines .. image:: http://planet.domsense.com/wp-content/uploads/2012/04/meetings.png :width: 400 px @@ -45,7 +50,8 @@ and create a new configurator. For instance, if you want to see meetings and pho .. image:: http://planet.domsense.com/wp-content/uploads/2012/04/phone_calls.png :width: 400 px -Then, you can use the ‘Generate Calendar’ button or wait for the scheduled action (‘Generate Calendar Records’) to be run. +Then, you can use the ‘Generate Calendar’ button or wait for the scheduled +action (‘Generate Calendar Records’) to be run. When the calendar is generated, you can visualize it by the ‘super calendar’ main menu. @@ -59,13 +65,17 @@ And here is the weekly one: .. image:: http://planet.domsense.com/wp-content/uploads/2012/04/week_calendar.png :width: 400 px -As you can see, several filters are available. A typical usage consists in filtering by ‘Configurator’ (if you have several configurators, ‘Scheduled calls and meetings’ can be one of them) and by your user. Once you filtered, you can save the filter as ‘Advanced filter’ or even add it to a dashboard. +As you can see, several filters are available. A typical usage consists in +filtering by ‘Configurator’ (if you have several configurators, +‘Scheduled calls and meetings’ can be one of them) and by your user. +Once you filtered, you can save the filter as ‘Advanced filter’ or even +add it to a dashboard. """, 'author': 'Agile Business Group', 'website': 'http://www.agilebg.com', 'license': 'AGPL-3', - 'depends' : ['base'], - "data" : [ + 'depends': ['base'], + "data": [ 'super_calendar_view.xml', 'cron_data.xml', 'security/ir.model.access.csv', diff --git a/__unported__/super_calendar/super_calendar.py b/__unported__/super_calendar/super_calendar.py index 83cc9bd1e..be1520252 100644 --- a/__unported__/super_calendar/super_calendar.py +++ b/__unported__/super_calendar/super_calendar.py @@ -19,7 +19,7 @@ # ############################################################################## -from openerp.osv import fields, osv, orm +from openerp.osv import fields, orm from openerp.tools.translate import _ import logging from mako.template import Template @@ -27,19 +27,21 @@ from datetime import datetime from openerp import tools from openerp.tools.safe_eval import safe_eval + def _models_get(self, cr, uid, context=None): obj = self.pool.get('ir.model') ids = obj.search(cr, uid, []) res = obj.read(cr, uid, ids, ['model', 'name'], context) return [(r['model'], r['name']) for r in res] + class super_calendar_configurator(orm.Model): _logger = logging.getLogger('super.calendar') _name = 'super.calendar.configurator' - _columns = { + _columns = { 'name': fields.char('Name', size=64, required=True), 'line_ids': fields.one2many('super.calendar.configurator.line', 'configurator_id', 'Lines'), - } + } def generate_calendar_records(self, cr, uid, ids, context=None): configurator_ids = self.search(cr, uid, []) @@ -59,20 +61,30 @@ class super_calendar_configurator(orm.Model): context=context) for current_record_id in current_record_ids: - current_record = current_pool.browse(cr, uid, current_record_id, context=context) - if line.user_field_id and \ - current_record[line.user_field_id.name] and current_record[line.user_field_id.name]._table_name != 'res.users': - raise osv.except_osv(_('Error'), + current_record = current_pool.browse(cr, uid, current_record_id, context=context) + if (line.user_field_id and + current_record[line.user_field_id.name] and + current_record[line.user_field_id.name]._table_name != 'res.users'): + raise orm.except_orm( + _('Error'), _("The 'User' field of record %s (%s) does not refer to res.users") % (current_record[line.description_field_id.name], line.name.model)) - if (((line.description_field_id - and current_record[line.description_field_id.name]) - or line.description_code) - and current_record[line.date_start_field_id.name]): + if (((line.description_field_id and current_record[line.description_field_id.name]) or + line.description_code) and + current_record[line.date_start_field_id.name]): duration = False - if not line.duration_field_id and line.date_stop_field_id and current_record[line.date_start_field_id.name] and current_record[line.date_stop_field_id.name]: - date_start= datetime.strptime(current_record[line.date_start_field_id.name], tools.DEFAULT_SERVER_DATETIME_FORMAT) - date_stop= datetime.strptime(current_record[line.date_stop_field_id.name], tools.DEFAULT_SERVER_DATETIME_FORMAT) + if (not line.duration_field_id and + line.date_stop_field_id and + current_record[line.date_start_field_id.name] and + current_record[line.date_stop_field_id.name]): + date_start = datetime.strptime( + current_record[line.date_start_field_id.name], + tools.DEFAULT_SERVER_DATETIME_FORMAT + ) + date_stop = datetime.strptime( + current_record[line.date_stop_field_id.name], + tools.DEFAULT_SERVER_DATETIME_FORMAT + ) duration = (date_stop - date_start).total_seconds() / 3600 elif line.duration_field_id: duration = current_record[line.duration_field_id.name] @@ -81,13 +93,18 @@ class super_calendar_configurator(orm.Model): else: parse_dict = {'o': current_record} mytemplate = Template(line.description_code) - name= mytemplate.render(**parse_dict) + name = mytemplate.render(**parse_dict) super_calendar_values = { 'name': name, 'model_description': line.description, 'date_start': current_record[line.date_start_field_id.name], 'duration': duration, - 'user_id': line.user_field_id and current_record[line.user_field_id.name] and current_record[line.user_field_id.name].id or False, + 'user_id': ( + line.user_field_id and + current_record[line.user_field_id.name] and + current_record[line.user_field_id.name].id or + False + ), 'configurator_id': configurator.id, 'res_id': line.name.model+','+str(current_record['id']), 'model_id': line.name.id, @@ -99,7 +116,7 @@ class super_calendar_configurator(orm.Model): class super_calendar_configurator_line(orm.Model): _name = 'super.calendar.configurator.line' - _columns = { + _columns = { 'name': fields.many2one('ir.model', 'Model', required=True), 'description': fields.char('Description', size=128, required=True), 'domain': fields.char('Domain', size=512), @@ -108,30 +125,39 @@ class super_calendar_configurator_line(orm.Model): ('field', 'Field'), ('code', 'Code'), ], string="Description Type"), - 'description_field_id': fields.many2one('ir.model.fields', 'Description field', + 'description_field_id': fields.many2one( + 'ir.model.fields', 'Description field', domain="[('model_id', '=', name),('ttype', '=', 'char')]"), - 'description_code': fields.text('Description field', help="Use '${o}' to refer to the involved object. E.g.: '${o.project_id.name}'"), - 'date_start_field_id': fields.many2one('ir.model.fields', 'Start date field', + 'description_code': fields.text( + 'Description field', + help="Use '${o}' to refer to the involved object. E.g.: '${o.project_id.name}'" + ), + 'date_start_field_id': fields.many2one( + 'ir.model.fields', 'Start date field', domain="['&','|',('ttype', '=', 'datetime'),('ttype', '=', 'date'),('model_id', '=', name)]", required=True), - 'date_stop_field_id': fields.many2one('ir.model.fields', 'End date field', - domain="['&',('ttype', '=', 'datetime'),('model_id', '=', name)]"), - 'duration_field_id': fields.many2one('ir.model.fields', 'Duration field', + 'date_stop_field_id': fields.many2one( + 'ir.model.fields', 'End date field', + domain="['&',('ttype', '=', 'datetime'),('model_id', '=', name)]" + ), + 'duration_field_id': fields.many2one( + 'ir.model.fields', 'Duration field', domain="['&',('ttype', '=', 'float'),('model_id', '=', name)]"), - 'user_field_id': fields.many2one('ir.model.fields', 'User field', + 'user_field_id': fields.many2one( + 'ir.model.fields', 'User field', domain="['&',('ttype', '=', 'many2one'),('model_id', '=', name)]"), - } + } class super_calendar(orm.Model): _name = 'super.calendar' - _columns = { + _columns = { 'name': fields.char('Description', size=512, required=True), 'model_description': fields.char('Model Description', size=128, required=True), - 'date_start':fields.datetime('Start date', required=True), - 'duration':fields.float('Duration'), + 'date_start': fields.datetime('Start date', required=True), + 'duration': fields.float('Duration'), 'user_id': fields.many2one('res.users', 'User'), 'configurator_id': fields.many2one('super.calendar.configurator', 'Configurator'), 'res_id': fields.reference('Resource', selection=_models_get, size=128), 'model_id': fields.many2one('ir.model', 'Model'), - } + } diff --git a/__unported__/users_ldap_groups/__openerp__.py b/__unported__/users_ldap_groups/__openerp__.py index 98acddbab..044301a31 100644 --- a/__unported__/users_ldap_groups/__openerp__.py +++ b/__unported__/users_ldap_groups/__openerp__.py @@ -20,11 +20,11 @@ ############################################################################## { -"name" : "Groups assignment", -"version" : "1.2", -"depends" : ["auth_ldap"], -"author" : "Therp BV", -"description": """ + "name": "Groups assignment", + "version": "1.2", + "depends": ["auth_ldap"], + "author": "Therp BV", + "description": """ Adds user accounts to groups based on rules defined by the administrator. Usage: @@ -35,7 +35,7 @@ ldap server]. Decide whether you want only groups mapped from ldap (Only ldap groups=y) or a mix of manually set groups and ldap groups (Only ldap groups=n). Setting this to 'no' will result in users never losing privileges when you remove them from a -ldap group, so that's a potential security issue. It is still the default to +ldap group, so that's a potential security issue. It is still the default to prevent losing group information by accident. For active directory, use LDAP attribute 'memberOf' and operator 'contains'. @@ -46,17 +46,16 @@ For posix accounts, use operator 'query' and a value like (&(cn=bzr)(objectClass=posixGroup)(memberUid=$uid)) The operator query matches if the filter in value returns something, and value -can contain $[attribute] which will be replaced by the first value of the +can contain $[attribute] which will be replaced by the first value of the user's ldap record's attribute named [attribute]. """, -"category" : "Tools", -"data" : [ - 'users_ldap_groups.xml', - 'security/ir.model.access.csv', -], -"installable": True, -"external_dependencies" : { - 'python' : ['ldap'], -}, + "category": "Tools", + "data": [ + 'users_ldap_groups.xml', + 'security/ir.model.access.csv', + ], + "installable": True, + "external_dependencies": { + 'python': ['ldap'], + }, } -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/__unported__/users_ldap_groups/users_ldap_groups.py b/__unported__/users_ldap_groups/users_ldap_groups.py index 71db80338..9aaa1e71b 100644 --- a/__unported__/users_ldap_groups/users_ldap_groups.py +++ b/__unported__/users_ldap_groups/users_ldap_groups.py @@ -23,76 +23,85 @@ from openerp.osv import fields, orm import logging import users_ldap_groups_operators import inspect -import sys + class CompanyLDAPGroupMapping(orm.Model): - _name='res.company.ldap.group_mapping' - _rec_name='ldap_attribute' - _order='ldap_attribute' + _name = 'res.company.ldap.group_mapping' + _rec_name = 'ldap_attribute' + _order = 'ldap_attribute' - def _get_operators(self, cr, uid, context=None): - operators=[] - for name, operator in inspect.getmembers(users_ldap_groups_operators, - lambda cls: inspect.isclass(cls) - and cls!=users_ldap_groups_operators.LDAPOperator): - operators.append((name, name)) - return tuple(operators) + def _get_operators(self, cr, uid, context=None): + operators = [] + members = inspect.getmembers( + users_ldap_groups_operators, + lambda cls: inspect.isclass(cls) + and cls != users_ldap_groups_operators.LDAPOperator) + for name, operator in members: + operators.append((name, name)) + return tuple(operators) - _columns={ - 'ldap_id': fields.many2one('res.company.ldap', 'LDAP server', - required=True), - 'ldap_attribute': fields.char('LDAP attribute', size=64, + _columns = { + 'ldap_id': fields.many2one('res.company.ldap', 'LDAP server', required=True), + 'ldap_attribute': fields.char( + 'LDAP attribute', size=64, help='The LDAP attribute to check.\n' - 'For active directory, use memberOf.'), - 'operator': fields.selection(_get_operators, 'Operator', + 'For active directory, use memberOf.'), + 'operator': fields.selection( + _get_operators, 'Operator', help='The operator to check the attribute against the value\n' 'For active directory, use \'contains\'', required=True), - 'value': fields.char('Value', size=1024, + 'value': fields.char( + 'Value', size=1024, help='The value to check the attribute against.\n' - 'For active directory, use the dn of the desired group', + 'For active directory, use the dn of the desired group', required=True), - 'group': fields.many2one('res.groups', 'OpenERP group', + 'group': fields.many2one( + 'res.groups', 'OpenERP group', help='The OpenERP group to assign', required=True), } + class CompanyLDAP(orm.Model): - _inherit='res.company.ldap' + _inherit = 'res.company.ldap' - _columns={ - 'group_mappings': fields.one2many('res.company.ldap.group_mapping', - 'ldap_id', 'Group mappings', - help='Define how OpenERP groups are assigned to ldap users'), - 'only_ldap_groups': fields.boolean('Only ldap groups', - help='If this is checked, manual changes to group membership are ' - 'undone on every login (so OpenERP groups are always synchronous ' - 'with LDAP groups). If not, manually added groups are preserved.') - } + _columns = { + 'group_mappings': fields.one2many( + 'res.company.ldap.group_mapping', + 'ldap_id', 'Group mappings', + help='Define how OpenERP groups are assigned to ldap users'), + 'only_ldap_groups': fields.boolean( + 'Only ldap groups', + help='If this is checked, manual changes to group membership are ' + 'undone on every login (so OpenERP groups are always synchronous ' + 'with LDAP groups). If not, manually added groups are preserved.') + } - _default={ - 'only_ldap_groups': False - } + _default = { + 'only_ldap_groups': False, + } - def get_or_create_user(self, cr, uid, conf, login, ldap_entry, context=None): - user_id=super(CompanyLDAP, self).get_or_create_user(cr, uid, conf, login, - ldap_entry, context) - if not user_id: - return user_id - logger=logging.getLogger('users_ldap_groups') - mappingobj=self.pool.get('res.company.ldap.group_mapping') - userobj=self.pool.get('res.users') - conf_all=self.read(cr, uid, conf['id'], ['only_ldap_groups']) - if(conf_all['only_ldap_groups']): - logger.debug('deleting all groups from user %d' % user_id) - userobj.write(cr, uid, [user_id], {'groups_id': [(5, )]}) + def get_or_create_user(self, cr, uid, conf, login, ldap_entry, context=None): + user_id = super(CompanyLDAP, self).get_or_create_user(cr, uid, conf, login, + ldap_entry, context) + if not user_id: + return user_id + logger = logging.getLogger('users_ldap_groups') + mappingobj = self.pool.get('res.company.ldap.group_mapping') + userobj = self.pool.get('res.users') + conf_all = self.read(cr, uid, conf['id'], ['only_ldap_groups']) + if(conf_all['only_ldap_groups']): + logger.debug('deleting all groups from user %d' % user_id) + userobj.write(cr, uid, [user_id], {'groups_id': [(5, )]}, context=context) - for mapping in mappingobj.read(cr, uid, mappingobj.search(cr, uid, - [('ldap_id', '=', conf['id'])]), []): - operator=getattr(users_ldap_groups_operators, mapping['operator'])() - logger.debug('checking mapping %s' % mapping) - if operator.check_value(ldap_entry, mapping['ldap_attribute'], - mapping['value'], conf, self, logger): - logger.debug('adding user %d to group %s' % - (user_id, mapping['group'][1])) - userobj.write(cr, uid, [user_id], - {'groups_id': [(4, mapping['group'][0])]}) - return user_id + for mapping in mappingobj.read(cr, uid, mappingobj.search( + cr, uid, [('ldap_id', '=', conf['id'])]), []): + operator = getattr(users_ldap_groups_operators, mapping['operator'])() + logger.debug('checking mapping %s' % mapping) + if operator.check_value(ldap_entry, mapping['ldap_attribute'], + mapping['value'], conf, self, logger): + logger.debug('adding user %d to group %s' % + (user_id, mapping['group'][1])) + userobj.write(cr, uid, [user_id], + {'groups_id': [(4, mapping['group'][0])]}, + context=context) + return user_id diff --git a/__unported__/users_ldap_groups/users_ldap_groups_operators.py b/__unported__/users_ldap_groups/users_ldap_groups_operators.py index 9706459e9..4de745fbf 100644 --- a/__unported__/users_ldap_groups/users_ldap_groups_operators.py +++ b/__unported__/users_ldap_groups/users_ldap_groups_operators.py @@ -20,25 +20,28 @@ ############################################################################## from string import Template + class LDAPOperator: - pass + pass + class contains(LDAPOperator): - def check_value(self, ldap_entry, attribute, value, ldap_config, company, - logger): - return (attribute in ldap_entry[1]) and (value in ldap_entry[1][attribute]) + def check_value(self, ldap_entry, attribute, value, ldap_config, company, logger): + return (attribute in ldap_entry[1]) and (value in ldap_entry[1][attribute]) + class equals(LDAPOperator): - def check_value(self, ldap_entry, attribute, value, ldap_config, company, - logger): - return (attribute in ldap_entry[1]) and (str(value)==str(ldap_entry[1][attribute])) + def check_value(self, ldap_entry, attribute, value, ldap_config, company, logger): + return attribute in ldap_entry[1] and unicode(value) == unicode(ldap_entry[1][attribute]) + class query(LDAPOperator): - def check_value(self, ldap_entry, attribute, value, ldap_config, company, - logger): - query_string=Template(value).safe_substitute(dict([(attribute, - ldap_entry[1][attribute][0]) for attribute in ldap_entry[1]])) - logger.debug('evaluating query group mapping, filter: %s'%query_string) - results=company.query(ldap_config, query_string) - logger.debug(results) - return bool(results) + def check_value(self, ldap_entry, attribute, value, ldap_config, company, logger): + query_string = Template(value).safe_substitute(dict( + [(attr, ldap_entry[1][attribute][0]) for attr in ldap_entry[1]] + ) + ) + logger.debug('evaluating query group mapping, filter: %s' % query_string) + results = company.query(ldap_config, query_string) + logger.debug(results) + return bool(results) diff --git a/__unported__/users_ldap_mail/__openerp__.py b/__unported__/users_ldap_mail/__openerp__.py index 789bbd583..25d332066 100644 --- a/__unported__/users_ldap_mail/__openerp__.py +++ b/__unported__/users_ldap_mail/__openerp__.py @@ -20,21 +20,20 @@ ############################################################################## { -'name': "LDAP mapping for user name and e-mail", -'version': "1.0", -'depends': ["auth_ldap"], -'author': "Daniel Reis (https://launchpad.com/~dreis-pt)", -'description': """\ + 'name': "LDAP mapping for user name and e-mail", + 'version': "1.0", + 'depends': ["auth_ldap"], + 'author': "Daniel Reis (https://launchpad.com/~dreis-pt)", + 'description': """\ Allows to define the LDAP attributes to use to retrieve user name and e-mail address. The default attribute used for the name is "cn". For Active Directory, you might prefer to use "displayName" instead. AD also supports the "mail" attribute, so it can be mapped into OpenERP. """, -'category': "Tools", -'data': [ - 'users_ldap_view.xml', -], -'installable': False, + 'category': "Tools", + 'data': [ + 'users_ldap_view.xml', + ], + 'installable': True, } -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/__unported__/users_ldap_mail/users_ldap_model.py b/__unported__/users_ldap_mail/users_ldap_model.py index 0bb72a2de..e4a8bd815 100644 --- a/__unported__/users_ldap_mail/users_ldap_model.py +++ b/__unported__/users_ldap_mail/users_ldap_model.py @@ -24,13 +24,16 @@ from openerp.osv import fields, orm import logging _log = logging.getLogger(__name__) + class CompanyLDAP(orm.Model): _inherit = 'res.company.ldap' _columns = { - 'name_attribute': fields.char('Name Attribute', size=64, + 'name_attribute': fields.char( + 'Name Attribute', size=64, help="By default 'cn' is used. " "For ActiveDirectory you might use 'displayName' instead."), - 'mail_attribute': fields.char('E-mail attribute', size=64, + 'mail_attribute': fields.char( + 'E-mail attribute', size=64, help="LDAP attribute to use to retrieve em-mail address."), } _defaults = { @@ -39,7 +42,7 @@ class CompanyLDAP(orm.Model): } def get_ldap_dicts(self, cr, ids=None): - """ + """ Copy of auth_ldap's funtion, changing only the SQL, so that it returns all fields in the table. """ @@ -58,7 +61,7 @@ class CompanyLDAP(orm.Model): def map_ldap_attributes(self, cr, uid, conf, login, ldap_entry): values = super(CompanyLDAP, self).map_ldap_attributes(cr, uid, conf, - login, ldap_entry) + login, ldap_entry) mapping = [ ('name', 'name_attribute'), ('email', 'mail_attribute'), @@ -71,4 +74,3 @@ class CompanyLDAP(orm.Model): _log.warning('No LDAP attribute "%s" found for login "%s"' % ( conf.get(conf_name), values.get('login'))) return values - diff --git a/__unported__/users_ldap_populate/model/populate_wizard.py b/__unported__/users_ldap_populate/model/populate_wizard.py index 6677e2245..ea1ea4ac9 100644 --- a/__unported__/users_ldap_populate/model/populate_wizard.py +++ b/__unported__/users_ldap_populate/model/populate_wizard.py @@ -19,9 +19,10 @@ # ############################################################################## -from osv import osv, fields +from osv import orm, fields -class CompanyLDAPPopulateWizard(osv.TransientModel): + +class CompanyLDAPPopulateWizard(orm.TransientModel): _name = 'res.company.ldap.populate_wizard' _description = 'Populate users from LDAP' _columns = { @@ -34,7 +35,6 @@ class CompanyLDAPPopulateWizard(osv.TransientModel): def create(self, cr, uid, vals, context=None): ldap_pool = self.pool.get('res.company.ldap') - users_pool = self.pool.get('res.users') if 'ldap_id' in vals: vals['users_created'] = ldap_pool.action_populate( cr, uid, vals['ldap_id'], context=context) diff --git a/__unported__/users_ldap_populate/model/users_ldap.py b/__unported__/users_ldap_populate/model/users_ldap.py index f58388e8a..5b5f47fbb 100644 --- a/__unported__/users_ldap_populate/model/users_ldap.py +++ b/__unported__/users_ldap_populate/model/users_ldap.py @@ -21,18 +21,18 @@ import re from ldap.filter import filter_format -from openerp.osv import orm, fields -import openerp.exceptions +from openerp.osv import orm import logging + class CompanyLDAP(orm.Model): _inherit = 'res.company.ldap' - + def action_populate(self, cr, uid, ids, context=None): """ Prepopulate the user table from one or more LDAP resources. - - Obviously, the option to create users must be toggled in + + Obviously, the option to create users must be toggled in the LDAP configuration. Return the number of users created (as far as we can tell). @@ -54,7 +54,7 @@ class CompanyLDAP(orm.Model): if attribute_match: login_attr = attribute_match.group(1) else: - raise osv.except_osv( + raise orm.except_orm( "No login attribute found", "Could not extract login attribute from filter %s" % conf['ldap_filter']) diff --git a/__unported__/web_context_tunnel/__openerp__.py b/__unported__/web_context_tunnel/__openerp__.py index 2bed77732..ba2616bea 100644 --- a/__unported__/web_context_tunnel/__openerp__.py +++ b/__unported__/web_context_tunnel/__openerp__.py @@ -3,7 +3,7 @@ 'category': 'Hidden', 'author': 'Akretion', 'license': 'AGPL-3', - 'description':""" + 'description': """ Web Context Tunnel. =================== @@ -15,7 +15,7 @@ arguments. This is annoying as modules often need to pass extra arguments that are not present in the base on_change signatures. As soon as two modules try to alter this signature to add their extra arguments, they are incompatible between them unless some extra glue module make them compatible again by -taking all extra arguments into account. But this leads to a combinatorial +taking all extra arguments into account. But this leads to a combinatorial explosion to make modules compatible again. The solution