diff --git a/base_optional_quick_create/__openerp__.py b/base_optional_quick_create/__openerp__.py index 24c08b0e7..81c33ab24 100644 --- a/base_optional_quick_create/__openerp__.py +++ b/base_optional_quick_create/__openerp__.py @@ -24,8 +24,11 @@ 'category': 'Tools', 'summary': "Avoid 'quick create' on m2o fields, on a 'by model' basis", 'description': """ -This module allows to avoid to 'quick create' new records, through many2one fields, for a specific model. -You can configure which models should allow 'quick create'. When specified, the 'quick create' option will always open the standard create form. +This module allows to avoid to 'quick create' new records, through many2one +fields, for a specific model. +You can configure which models should allow 'quick create'. +When specified, the 'quick create' option will always open the standard create +form. Got the idea from https://twitter.com/nbessi/status/337869826028605441 """, diff --git a/base_optional_quick_create/model.py b/base_optional_quick_create/model.py index fc15593a7..470659443 100644 --- a/base_optional_quick_create/model.py +++ b/base_optional_quick_create/model.py @@ -22,10 +22,11 @@ from openerp.osv import orm, fields from openerp import SUPERUSER_ID from openerp.tools.translate import _ + class ir_model(orm.Model): _inherit = 'ir.model' - + _columns = { 'avoid_quick_create': fields.boolean('Avoid quick create'), } diff --git a/cron_run_manually/model/ir_cron.py b/cron_run_manually/model/ir_cron.py index 9b35ab95e..22c7f0720 100644 --- a/cron_run_manually/model/ir_cron.py +++ b/cron_run_manually/model/ir_cron.py @@ -27,9 +27,10 @@ from openerp.tools import SUPERUSER_ID from openerp.tools.translate import _ from openerp.tools.safe_eval import safe_eval + class irCron(orm.Model): _inherit = 'ir.cron' - + def run_manually(self, cr, uid, ids, context=None): """ Run a job from the cron form view. @@ -51,7 +52,7 @@ class irCron(orm.Model): _('Error'), _('Only the admin user is allowed to ' 'execute inactive cron jobs manually')) - + try: # Try to grab an exclusive lock on the job row # until the end of the transaction @@ -67,8 +68,8 @@ class irCron(orm.Model): model = self.pool.get(job['model']) method = getattr(model, job['function']) args = safe_eval('tuple(%s)' % (job['args'] or '')) - method(cr, job['user_id'], *args) - + method(cr, job['user_id'], *args) + except psycopg2.OperationalError, e: # User friendly error if the lock could not be claimed if e.pgcode == '55P03': diff --git a/dbfilter_from_header/__init__.py b/dbfilter_from_header/__init__.py index 00ccc56d5..b7e2fd62c 100644 --- a/dbfilter_from_header/__init__.py +++ b/dbfilter_from_header/__init__.py @@ -23,6 +23,7 @@ from openerp.addons.web.controllers import main as web_main db_list_org = web_main.db_list + def db_list(req, force=False): db_filter = req.httprequest.environ.get('HTTP_X_OPENERP_DBFILTER', '.*') dbs = db_list_org(req, force=force) diff --git a/import_odbc/__openerp__.py b/import_odbc/__openerp__.py index ad0e1239c..903f81c24 100644 --- a/import_odbc/__openerp__.py +++ b/import_odbc/__openerp__.py @@ -29,11 +29,21 @@ Import data directly from other databases. Installed in the Administration module, menu Configuration -> Import from SQL. Features: - * Fetched data from the databases are used to build lines equivalent to regular import files. These are imported using the standard "import_data()" ORM method, benefiting from all its features, including xml_ids. - * Each table import is defined by an SQL statement, used to build the equivalent for an import file. Each column's name should match the column names you would use in an import file. The first column must provide an unique identifier for the record, and will be used to build its xml_id. - * SQL columns named "none" are ignored. This can be used for the first column of the SQL, so that it's used to build the XML Id but it's not imported to any OpenERP field. - * The last sync date is the last successfull execution can be used in the SQL using "%(sync)s", or ":sync" in the case of Oracle. - * When errors are found, only the record with the error fails import. The other correct records are commited. However, the "last sync date" will only be automaticaly updated when no errors are found. + * Fetched data from the databases are used to build lines equivalent to + regular import files. These are imported using the standard "import_data()" + ORM method, benefiting from all its features, including xml_ids. + * Each table import is defined by an SQL statement, used to build the + equivalent for an import file. Each column's name should match the column + names you would use in an import file. The first column must provide an + unique identifier for the record, and will be used to build its xml_id. + * SQL columns named "none" are ignored. This can be used for the first column + of the SQL, so that it's used to build the XML Id but it's not imported to + any OpenERP field. + * The last sync date is the last successfull execution can be used in the SQL + using "%(sync)s", or ":sync" in the case of Oracle. + * When errors are found, only the record with the error fails import. The + other correct records are commited. However, the "last sync date" will only + be automaticaly updated when no errors are found. * The import execution can be scheduled to run automatically. Examples: @@ -54,9 +64,12 @@ Examples: WHERE DATE_CHANGED >= %(sync)s Improvements ideas waiting for a contributor: - * Allow to import many2one fields (currently not supported). Done by adding a second SQL sentence to get child record list? - * Allow "import sets" that can be executed at different time intervals using different scheduler jobs. - * Allow to inactivate/delete OpenERP records when not present in an SQL result set. + * Allow to import many2one fields (currently not supported). Done by adding a + second SQL sentence to get child record list? + * Allow "import sets" that can be executed at different time intervals using + different scheduler jobs. + * Allow to inactivate/delete OpenERP records when not present in an SQL + result set. Contributors ============ diff --git a/import_odbc/import_odbc.py b/import_odbc/import_odbc.py index 9a16346be..7d418bee0 100644 --- a/import_odbc/import_odbc.py +++ b/import_odbc/import_odbc.py @@ -90,23 +90,23 @@ class import_odbc_dbtable(orm.Model): except: errmsg = str(sys.exc_info()[1]) if errmsg and not table_obj.ignore_rel_errors: - #Fail + # Fail append_to_log(log, 'ERROR', data, errmsg) log['last_error_count'] += 1 return False if errmsg and table_obj.ignore_rel_errors: - #Warn and retry ignoring many2one fields... + # Warn and retry ignoring many2one fields... append_to_log(log, 'WARN', data, errmsg) log['last_warn_count'] += 1 - #Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first) + # Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first) i = find_m2o(cols) if i >= 0: - #Try again without the [i] column + # Try again without the [i] column del cols[i] del data[i] self._import_data(cr, uid, cols, data, model_obj, table_obj, log) else: - #Fail + # Fail append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.') log['last_error_count'] += 1 return False @@ -117,7 +117,7 @@ class import_odbc_dbtable(orm.Model): actions = self.read(cr, uid, ids, ['id', 'exec_order']) actions.sort(key=lambda x: (x['exec_order'], x['id'])) - #Consider each dbtable: + # Consider each dbtable: for action_ref in actions: obj = self.browse(cr, uid, action_ref['id']) if not obj.enabled: @@ -126,9 +126,9 @@ class import_odbc_dbtable(orm.Model): _logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl) _logger.debug('Importing %s...' % obj.name) - #now() microseconds are stripped to avoid problem with SQL smalldate - #TODO: convert UTC Now to local timezone - #http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime + # now() microseconds are stripped to avoid problem with SQL smalldate + # TODO: convert UTC Now to local timezone + # http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime model_name = obj.model_target.model model_obj = self.pool.get(model_name) xml_prefix = model_name.replace('.', '_') + "_id_" @@ -140,7 +140,7 @@ class import_odbc_dbtable(orm.Model): 'last_log': list()} self.write(cr, uid, [obj.id], log) - #Prepare SQL sentence; replace "%s" with the last_sync date + # Prepare SQL sentence; replace "%s" with the last_sync date if obj.last_sync: sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S") else: @@ -149,30 +149,30 @@ class import_odbc_dbtable(orm.Model): res = db_model.execute(cr, uid, [obj.dbsource_id.id], obj.sql_source, params, metadata=True) - #Exclude columns titled "None"; add (xml_)"id" column + # Exclude columns titled "None"; add (xml_)"id" column cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id'] - #Import each row: + # Import each row: for row in res['rows']: - #Build data row; import only columns present in the "cols" list + # Build data row; import only columns present in the "cols" list data = list() for i in cidx: - #TODO: Handle imported datetimes properly - convert from localtime to UTC! + # TODO: Handle imported datetimes properly - convert from localtime to UTC! v = row[i] if isinstance(v, str): v = v.strip() data.append(v) data.append(xml_prefix + str(row[0]).strip()) - #Import the row; on error, write line to the log + # Import the row; on error, write line to the log log['last_record_count'] += 1 self._import_data(cr, uid, cols, data, model_obj, obj, log) if log['last_record_count'] % 500 == 0: _logger.info('...%s rows processed...' % (log['last_record_count'])) - #Finished importing all rows - #If no errors, write new sync date + # Finished importing all rows + # If no errors, write new sync date if not (log['last_error_count'] or log['last_warn_count']): log['last_sync'] = log['start_run'] level = logging.DEBUG @@ -183,14 +183,14 @@ class import_odbc_dbtable(orm.Model): _logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % ( model_name, log['last_record_count'], log['last_error_count'], log['last_warn_count'])) - #Write run log, either if the table import is active or inactive + # Write run log, either if the table import is active or inactive if log['last_log']: log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==') log.update({'last_log': '\n'.join(log['last_log'])}) log.update({'last_run': datetime.now().replace(microsecond=0)}) self.write(cr, uid, [obj.id], log) - #Finished + # Finished _logger.debug('Import job FINISHED.') return True @@ -214,5 +214,3 @@ class import_odbc_dbtable(orm.Model): 'res_id': new_create_id, 'type': 'ir.actions.act_window', } - -#EOF diff --git a/mail_environment/__openerp__.py b/mail_environment/__openerp__.py index fce396f8d..29970701f 100644 --- a/mail_environment/__openerp__.py +++ b/mail_environment/__openerp__.py @@ -26,15 +26,17 @@ 'description': """ Extend mail and fetch mail with server environment module. -In config files, sections outgoint_mail and incoming_mails are default values for all Outgoing Mail Servers and Fetchmail Servers. -For each server, you can (re)define values with a section named "outgoing_mail.resource_name" where resource_name is the name of your server. +In config files, sections outgoint_mail and incoming_mails are default values +for all Outgoing Mail Servers and Fetchmail Servers. +For each server, you can (re)define values with a section named +"outgoing_mail.resource_name" where resource_name is the name of your server. Exemple of config file : [outgoing_mail] smtp_host = smtp.myserver.com smtp_port = 587 -smtp_user = +smtp_user = smtp_pass = smtp_encryption = ssl @@ -64,4 +66,3 @@ password = openerp 'installable': True, 'active': False, } -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: diff --git a/mail_environment/env_mail.py b/mail_environment/env_mail.py index e5c8bb997..252703d2a 100644 --- a/mail_environment/env_mail.py +++ b/mail_environment/env_mail.py @@ -27,7 +27,7 @@ from server_environment import serv_config class IrMail(osv.osv): _inherit = "ir.mail_server" - + def _get_smtp_conf(self, cursor, uid, ids, name, args, context=None): """ Return configuration @@ -52,44 +52,49 @@ class IrMail(osv.osv): return res _columns = { - 'smtp_host': fields.function(_get_smtp_conf, - method=True, - string='SMTP Server', - type="char", - multi='outgoing_mail_config', - size=128), - 'smtp_port': fields.function(_get_smtp_conf, - method=True, - string='SMTP Port', - type="integer", - multi='outgoing_mail_config', - help="SMTP Port. Usually 465 for SSL, and 25 or 587 for other cases.", - size=5), - 'smtp_user': fields.function(_get_smtp_conf, - method=True, - string='Username', - type="char", - multi='outgoing_mail_config', - help="Optional username for SMTP authentication", - size=64), - 'smtp_pass': fields.function(_get_smtp_conf, - method=True, - string='Password', - type="char", - multi='outgoing_mail_config', - help="Optional password for SMTP authentication", - size=64), - 'smtp_encryption' :fields.function(_get_smtp_conf, - method=True, - string='smtp_encryption', - type="char", - multi='outgoing_mail_config', - help="Choose the connection encryption scheme:\n" - "- none: SMTP sessions are done in cleartext.\n" - "- starttls: TLS encryption is requested at start of SMTP session (Recommended)\n" - "- ssl: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)", - size=64)} - + 'smtp_host': fields.function( + _get_smtp_conf, + method=True, + string='SMTP Server', + type="char", + multi='outgoing_mail_config', + size=128), + 'smtp_port': fields.function( + _get_smtp_conf, + method=True, + string='SMTP Port', + type="integer", + multi='outgoing_mail_config', + help="SMTP Port. Usually 465 for SSL, and 25 or 587 for other cases.", + size=5), + 'smtp_user': fields.function( + _get_smtp_conf, + method=True, + string='Username', + type="char", + multi='outgoing_mail_config', + help="Optional username for SMTP authentication", + size=64), + 'smtp_pass': fields.function( + _get_smtp_conf, + method=True, + string='Password', + type="char", + multi='outgoing_mail_config', + help="Optional password for SMTP authentication", + size=64), + 'smtp_encryption': fields.function( + _get_smtp_conf, + method=True, + string='smtp_encryption', + type="char", + multi='outgoing_mail_config', + help="Choose the connection encryption scheme:\n" + "- none: SMTP sessions are done in cleartext.\n" + "- starttls: TLS encryption is requested at start of SMTP session (Recommended)\n" + "- ssl: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)", + size=64)} + IrMail() @@ -108,13 +113,15 @@ class FetchmailServer(osv.osv): key_types = {'port': int, 'is_ssl': lambda a: bool(int(a)), 'attach': lambda a: bool(int(a)), - 'original': lambda a: bool(int(a)),} + 'original': lambda a: bool(int(a)), + } # default vals config_vals = {'port': 993, 'is_ssl': 0, 'attach': 0, - 'original': 0} + 'original': 0, + } if serv_config.has_section(global_section_name): config_vals.update(serv_config.items(global_section_name)) @@ -132,7 +139,7 @@ class FetchmailServer(osv.osv): result_ids = [] # read all incomming servers values all_ids = self.search(cr, uid, [], context=context) - results = self.read(cr, uid, all_ids, ['id','type'], context=context) + results = self.read(cr, uid, all_ids, ['id', 'type'], context=context) args = args[:] i = 0 while i < len(args): @@ -145,64 +152,74 @@ class FetchmailServer(osv.osv): for search_vals in args[i][2]: for res in results: if (res['type'] == search_vals) and (res['id'] not in result_ids): - result_ids.append(res['id']) + result_ids.append(res['id']) else: continue i += 1 return [('id', 'in', result_ids)] _columns = { - 'server': fields.function(_get_incom_conf, - method=True, - string='Server', - type="char", - multi='income_mail_config', - size=256, help="Hostname or IP of the mail server"), - 'port': fields.function(_get_incom_conf, - method=True, - string='Port', - type="integer", - multi='income_mail_config', - help="Hostname or IP of the mail server"), - 'type': fields.function(_get_incom_conf, - method=True, - string='Type', - type="char", - multi='income_mail_config', - fnct_search=_type_search, - size=64, - help="pop, imap, local"), - 'is_ssl': fields.function(_get_incom_conf, - method=True, - string='Is SSL', - type="boolean", - multi='income_mail_config', - help='Connections are encrypted with SSL/TLS through' - ' a dedicated port (default: IMAPS=993, POP3S=995)'), - 'attach': fields.function(_get_incom_conf, - method=True, - string='Keep Attachments', - type="boolean", - multi='income_mail_config', - help="Whether attachments should be downloaded. " - "If not enabled, incoming emails will be stripped of any attachments before being processed"), - 'original': fields.function(_get_incom_conf, - method=True, - string='Keep Original', - type="boolean", - multi='income_mail_config', - help="Whether a full original copy of each email should be kept for reference" - "and attached to each processed message. This will usually double the size of your message database."), - 'user': fields.function(_get_incom_conf, - method=True, - string='Username', - type="char", - multi='income_mail_config', - size=64), - 'password': fields.function(_get_incom_conf, - method=True, - string='password', - type="char", - multi='income_mail_config', - size=64)} -FetchmailServer() + 'server': fields.function( + _get_incom_conf, + method=True, + string='Server', + type="char", + multi='income_mail_config', + size=256, help="Hostname or IP of the mail server"), + 'port': fields.function( + _get_incom_conf, + method=True, + string='Port', + type="integer", + multi='income_mail_config', + help="Hostname or IP of the mail server"), + 'type': fields.function( + _get_incom_conf, + method=True, + string='Type', + type="char", + multi='income_mail_config', + fnct_search=_type_search, + size=64, + help="pop, imap, local"), + 'is_ssl': fields.function( + _get_incom_conf, + method=True, + string='Is SSL', + type="boolean", + multi='income_mail_config', + help='Connections are encrypted with SSL/TLS through' + ' a dedicated port (default: IMAPS=993, POP3S=995)'), + 'attach': fields.function( + _get_incom_conf, + method=True, + string='Keep Attachments', + type="boolean", + multi='income_mail_config', + help="Whether attachments should be downloaded. " + "If not enabled, incoming emails will be stripped of any " + "attachments before being processed"), + 'original': fields.function( + _get_incom_conf, + method=True, + string='Keep Original', + type="boolean", + multi='income_mail_config', + help="Whether a full original copy of each email should be kept " + "for reference and attached to each processed message. This " + "will usually double the size of your message database."), + 'user': fields.function( + _get_incom_conf, + method=True, + string='Username', + type="char", + multi='income_mail_config', + size=64), + 'password': fields.function( + _get_incom_conf, + method=True, + string='password', + type="char", + multi='income_mail_config', + size=64)} +FetchmailServer() diff --git a/mass_editing/__init__.py b/mass_editing/__init__.py index f203ff27f..92e380a8f 100644 --- a/mass_editing/__init__.py +++ b/mass_editing/__init__.py @@ -18,9 +18,5 @@ # along with this program. If not, see # ############################################################################## - import mass_editing import wizard - -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: - diff --git a/mass_editing/wizard/__init__.py b/mass_editing/wizard/__init__.py index dc5322f17..b27ae9c52 100644 --- a/mass_editing/wizard/__init__.py +++ b/mass_editing/wizard/__init__.py @@ -18,7 +18,4 @@ # along with this program. If not, see # ############################################################################## - import mass_editing_wizard - -# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: \ No newline at end of file diff --git a/scheduler_error_mailer/ir_cron.py b/scheduler_error_mailer/ir_cron.py index 80b427c31..8971da486 100644 --- a/scheduler_error_mailer/ir_cron.py +++ b/scheduler_error_mailer/ir_cron.py @@ -1,5 +1,5 @@ # -*- encoding: utf-8 -*- -################################################################################# +############################################################################## # # Scheduler Error Mailer module for OpenERP # Copyright (C) 2012-2013 Akretion (http://www.akretion.com/) @@ -28,20 +28,21 @@ import logging logger = logging.getLogger(__name__) + class ir_cron(orm.Model): _inherit = "ir.cron" _columns = { - 'email_template': fields.many2one('email.template', + 'email_template': fields.many2one( + 'email.template', 'Error E-mail Template', help="Select the email template that will be sent when this scheduler fails."), } - def _handle_callback_exception(self, cr, uid, model_name, method_name, args, job_id, job_exception): - res = super(ir_cron, self)._handle_callback_exception(cr, uid, - model_name, method_name, args, job_id, job_exception) + res = super(ir_cron, self)._handle_callback_exception( + cr, uid, model_name, method_name, args, job_id, job_exception) my_cron = self.browse(cr, uid, job_id) @@ -54,9 +55,9 @@ class ir_cron(orm.Model): } logger.debug("Sending scheduler error email with context=%s" % context) - self.pool['email.template'].send_mail(cr, uid, - my_cron.email_template.id, my_cron.id, force_send=True, - context=context) + self.pool['email.template'].send_mail( + cr, uid, my_cron.email_template.id, my_cron.id, + force_send=True, context=context) return res @@ -67,4 +68,3 @@ class res_users(orm.Model): def test_scheduler_failure(self, cr, uid, context=None): """This function is used to test and debug this module""" raise orm.except_orm(_('Error :'), _("Task failure with UID = %d." % uid)) - diff --git a/super_calendar/__init__.py b/super_calendar/__init__.py index 6fe2100cc..90402ead4 100644 --- a/super_calendar/__init__.py +++ b/super_calendar/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- ############################################################################## -# +# # Copyright (C) 2012 Agile Business Group sagl () # Copyright (C) 2012 Domsense srl () #