From 131cf140f37c28e264818247404133328e3443ca Mon Sep 17 00:00:00 2001 From: lauris Date: Thu, 21 Aug 2014 11:39:13 +0300 Subject: [PATCH] PEP8 fix for /import_odbc/import_odbc.py --- import_odbc/import_odbc.py | 69 +++++++++++++++++++++++++------------- 1 file changed, 45 insertions(+), 24 deletions(-) diff --git a/import_odbc/import_odbc.py b/import_odbc/import_odbc.py index 7d418bee0..92ec37f6b 100644 --- a/import_odbc/import_odbc.py +++ b/import_odbc/import_odbc.py @@ -35,14 +35,20 @@ class import_odbc_dbtable(orm.Model): _columns = { 'name': fields.char('Datasource name', required=True, size=64), 'enabled': fields.boolean('Execution enabled'), - 'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True), - 'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'), + 'dbsource_id': fields.many2one('base.external.dbsource', + 'Database source',required=True), + 'sql_source': fields.text('SQL', required=True, + help='Column names must be valid "import_data" columns.'), 'model_target': fields.many2one('ir.model', 'Target object'), - 'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."), - 'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"), + 'noupdate': fields.boolean('No updates', + help="Only create new records;\ + disable updates to existing records."), + 'exec_order': fields.integer('Execution order', + help="Defines the order to perform the import"), 'last_sync': fields.datetime('Last sync date', - help="Datetime for the last succesfull sync." - "\nLater changes on the source may not be replicated on the destination"), + help="Datetime for the last succesfull sync.\ + \nLater changes on the source may not be replicated on \ + the destination"), 'start_run': fields.datetime('Time started', readonly=True), 'last_run': fields.datetime('Time ended', readonly=True), 'last_record_count': fields.integer('Last record count', readonly=True), @@ -50,10 +56,10 @@ class import_odbc_dbtable(orm.Model): 'last_warn_count': fields.integer('Last warning count', readonly=True), 'last_log': fields.text('Last run log', readonly=True), 'ignore_rel_errors': fields.boolean('Ignore relationship errors', - help="On error try to reimport rows ignoring relationships."), + help="On error try to reimport rows ignoring relationships."), 'raise_import_errors': fields.boolean('Raise import errors', - help="Import errors not handled, intended for debugging purposes." - "\nAlso forces debug messages to be written to the server log."), + help="Import errors not handled, intended for debugging purposes.\ + \nAlso forces debug messages to be written to the server log."), } _defaults = { 'enabled': True, @@ -72,21 +78,26 @@ class import_odbc_dbtable(orm.Model): def append_to_log(log, level, obj_id='', msg='', rel_id=''): if '_id_' in obj_id: - obj_id = '.'.join(obj_id.split('_')[:-2]) + ': ' + obj_id.split('_')[-1] + obj_id = ('.'.join(obj_id.split('_')[:-2]) + ': ' + + obj_id.split('_')[-1]) if ': .' in msg and not rel_id: rel_id = msg[msg.find(': .')+3:] if '_id_' in rel_id: - rel_id = '.'.join(rel_id.split('_')[:-2]) + ': ' + rel_id.split('_')[-1] + rel_id = ('.'.join(rel_id.split('_')[:-2]) + + ': ' + rel_id.split('_')[-1]) msg = msg[:msg.find(': .')] - log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg)) + log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), + obj_id, rel_id, msg)) _logger.debug(data) cols = list(flds) # copy to avoid side effects errmsg = str() if table_obj.raise_import_errors: - model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate) + model_obj.import_data(cr, uid, cols, [data], + noupdate=table_obj.noupdate) else: try: - model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate) + model_obj.import_data(cr, uid, cols, [data], + noupdate=table_obj.noupdate) except: errmsg = str(sys.exc_info()[1]) if errmsg and not table_obj.ignore_rel_errors: @@ -98,16 +109,19 @@ class import_odbc_dbtable(orm.Model): # Warn and retry ignoring many2one fields... append_to_log(log, 'WARN', data, errmsg) log['last_warn_count'] += 1 - # Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first) + # Try ignoring each many2one + #(tip: in the SQL sentence select more problematic FKs first) i = find_m2o(cols) if i >= 0: # Try again without the [i] column del cols[i] del data[i] - self._import_data(cr, uid, cols, data, model_obj, table_obj, log) + self._import_data(cr, uid, cols, data, model_obj, + table_obj, log) else: # Fail - append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.') + append_to_log(log, 'ERROR', data, + 'Removed all m2o keys and still fails.') log['last_error_count'] += 1 return False return True @@ -123,10 +137,12 @@ class import_odbc_dbtable(orm.Model): if not obj.enabled: continue # skip - _logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl) + _logger.setLevel(obj.raise_import_errors and + logging.DEBUG or _loglvl) _logger.debug('Importing %s...' % obj.name) - # now() microseconds are stripped to avoid problem with SQL smalldate + # now() microseconds are stripped + # to avoid problem with SQL smalldate # TODO: convert UTC Now to local timezone # http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime model_name = obj.model_target.model @@ -151,14 +167,16 @@ class import_odbc_dbtable(orm.Model): # Exclude columns titled "None"; add (xml_)"id" column cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] - cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id'] + cols = ([x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + + ['id']) # Import each row: for row in res['rows']: # Build data row; import only columns present in the "cols" list data = list() for i in cidx: - # TODO: Handle imported datetimes properly - convert from localtime to UTC! + # TODO: Handle imported datetimes properly + # convert from localtime to UTC! v = row[i] if isinstance(v, str): v = v.strip() @@ -169,7 +187,8 @@ class import_odbc_dbtable(orm.Model): log['last_record_count'] += 1 self._import_data(cr, uid, cols, data, model_obj, obj, log) if log['last_record_count'] % 500 == 0: - _logger.info('...%s rows processed...' % (log['last_record_count'])) + _logger.info('...%s rows processed...' + % (log['last_record_count'])) # Finished importing all rows # If no errors, write new sync date @@ -180,12 +199,14 @@ class import_odbc_dbtable(orm.Model): level = logging.WARN if log['last_error_count']: level = logging.ERROR - _logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % ( + _logger.log(level, + 'Imported %s , %d rows, %d errors, %d warnings.' % ( model_name, log['last_record_count'], log['last_error_count'], log['last_warn_count'])) # Write run log, either if the table import is active or inactive if log['last_log']: - log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==') + log['last_log'].insert(0, + 'LEVEL|== Line == |== Relationship ==|== Message ==') log.update({'last_log': '\n'.join(log['last_log'])}) log.update({'last_run': datetime.now().replace(microsecond=0)}) self.write(cr, uid, [obj.id], log)