|
@ -35,14 +35,20 @@ class import_odbc_dbtable(orm.Model): |
|
|
_columns = { |
|
|
_columns = { |
|
|
'name': fields.char('Datasource name', required=True, size=64), |
|
|
'name': fields.char('Datasource name', required=True, size=64), |
|
|
'enabled': fields.boolean('Execution enabled'), |
|
|
'enabled': fields.boolean('Execution enabled'), |
|
|
'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True), |
|
|
|
|
|
'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'), |
|
|
|
|
|
|
|
|
'dbsource_id': fields.many2one('base.external.dbsource', |
|
|
|
|
|
'Database source',required=True), |
|
|
|
|
|
'sql_source': fields.text('SQL', required=True, |
|
|
|
|
|
help='Column names must be valid "import_data" columns.'), |
|
|
'model_target': fields.many2one('ir.model', 'Target object'), |
|
|
'model_target': fields.many2one('ir.model', 'Target object'), |
|
|
'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."), |
|
|
|
|
|
'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"), |
|
|
|
|
|
|
|
|
'noupdate': fields.boolean('No updates', |
|
|
|
|
|
help="Only create new records;\ |
|
|
|
|
|
disable updates to existing records."), |
|
|
|
|
|
'exec_order': fields.integer('Execution order', |
|
|
|
|
|
help="Defines the order to perform the import"), |
|
|
'last_sync': fields.datetime('Last sync date', |
|
|
'last_sync': fields.datetime('Last sync date', |
|
|
help="Datetime for the last succesfull sync." |
|
|
|
|
|
"\nLater changes on the source may not be replicated on the destination"), |
|
|
|
|
|
|
|
|
help="Datetime for the last succesfull sync.\ |
|
|
|
|
|
\nLater changes on the source may not be replicated on \ |
|
|
|
|
|
the destination"), |
|
|
'start_run': fields.datetime('Time started', readonly=True), |
|
|
'start_run': fields.datetime('Time started', readonly=True), |
|
|
'last_run': fields.datetime('Time ended', readonly=True), |
|
|
'last_run': fields.datetime('Time ended', readonly=True), |
|
|
'last_record_count': fields.integer('Last record count', readonly=True), |
|
|
'last_record_count': fields.integer('Last record count', readonly=True), |
|
@ -52,8 +58,8 @@ class import_odbc_dbtable(orm.Model): |
|
|
'ignore_rel_errors': fields.boolean('Ignore relationship errors', |
|
|
'ignore_rel_errors': fields.boolean('Ignore relationship errors', |
|
|
help="On error try to reimport rows ignoring relationships."), |
|
|
help="On error try to reimport rows ignoring relationships."), |
|
|
'raise_import_errors': fields.boolean('Raise import errors', |
|
|
'raise_import_errors': fields.boolean('Raise import errors', |
|
|
help="Import errors not handled, intended for debugging purposes." |
|
|
|
|
|
"\nAlso forces debug messages to be written to the server log."), |
|
|
|
|
|
|
|
|
help="Import errors not handled, intended for debugging purposes.\ |
|
|
|
|
|
\nAlso forces debug messages to be written to the server log."), |
|
|
} |
|
|
} |
|
|
_defaults = { |
|
|
_defaults = { |
|
|
'enabled': True, |
|
|
'enabled': True, |
|
@ -72,21 +78,26 @@ class import_odbc_dbtable(orm.Model): |
|
|
|
|
|
|
|
|
def append_to_log(log, level, obj_id='', msg='', rel_id=''): |
|
|
def append_to_log(log, level, obj_id='', msg='', rel_id=''): |
|
|
if '_id_' in obj_id: |
|
|
if '_id_' in obj_id: |
|
|
obj_id = '.'.join(obj_id.split('_')[:-2]) + ': ' + obj_id.split('_')[-1] |
|
|
|
|
|
|
|
|
obj_id = ('.'.join(obj_id.split('_')[:-2]) + ': ' + |
|
|
|
|
|
obj_id.split('_')[-1]) |
|
|
if ': .' in msg and not rel_id: |
|
|
if ': .' in msg and not rel_id: |
|
|
rel_id = msg[msg.find(': .')+3:] |
|
|
rel_id = msg[msg.find(': .')+3:] |
|
|
if '_id_' in rel_id: |
|
|
if '_id_' in rel_id: |
|
|
rel_id = '.'.join(rel_id.split('_')[:-2]) + ': ' + rel_id.split('_')[-1] |
|
|
|
|
|
|
|
|
rel_id = ('.'.join(rel_id.split('_')[:-2]) + |
|
|
|
|
|
': ' + rel_id.split('_')[-1]) |
|
|
msg = msg[:msg.find(': .')] |
|
|
msg = msg[:msg.find(': .')] |
|
|
log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg)) |
|
|
|
|
|
|
|
|
log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), |
|
|
|
|
|
obj_id, rel_id, msg)) |
|
|
_logger.debug(data) |
|
|
_logger.debug(data) |
|
|
cols = list(flds) # copy to avoid side effects |
|
|
cols = list(flds) # copy to avoid side effects |
|
|
errmsg = str() |
|
|
errmsg = str() |
|
|
if table_obj.raise_import_errors: |
|
|
if table_obj.raise_import_errors: |
|
|
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate) |
|
|
|
|
|
|
|
|
model_obj.import_data(cr, uid, cols, [data], |
|
|
|
|
|
noupdate=table_obj.noupdate) |
|
|
else: |
|
|
else: |
|
|
try: |
|
|
try: |
|
|
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate) |
|
|
|
|
|
|
|
|
model_obj.import_data(cr, uid, cols, [data], |
|
|
|
|
|
noupdate=table_obj.noupdate) |
|
|
except: |
|
|
except: |
|
|
errmsg = str(sys.exc_info()[1]) |
|
|
errmsg = str(sys.exc_info()[1]) |
|
|
if errmsg and not table_obj.ignore_rel_errors: |
|
|
if errmsg and not table_obj.ignore_rel_errors: |
|
@ -98,16 +109,19 @@ class import_odbc_dbtable(orm.Model): |
|
|
# Warn and retry ignoring many2one fields... |
|
|
# Warn and retry ignoring many2one fields... |
|
|
append_to_log(log, 'WARN', data, errmsg) |
|
|
append_to_log(log, 'WARN', data, errmsg) |
|
|
log['last_warn_count'] += 1 |
|
|
log['last_warn_count'] += 1 |
|
|
# Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first) |
|
|
|
|
|
|
|
|
# Try ignoring each many2one |
|
|
|
|
|
#(tip: in the SQL sentence select more problematic FKs first) |
|
|
i = find_m2o(cols) |
|
|
i = find_m2o(cols) |
|
|
if i >= 0: |
|
|
if i >= 0: |
|
|
# Try again without the [i] column |
|
|
# Try again without the [i] column |
|
|
del cols[i] |
|
|
del cols[i] |
|
|
del data[i] |
|
|
del data[i] |
|
|
self._import_data(cr, uid, cols, data, model_obj, table_obj, log) |
|
|
|
|
|
|
|
|
self._import_data(cr, uid, cols, data, model_obj, |
|
|
|
|
|
table_obj, log) |
|
|
else: |
|
|
else: |
|
|
# Fail |
|
|
# Fail |
|
|
append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.') |
|
|
|
|
|
|
|
|
append_to_log(log, 'ERROR', data, |
|
|
|
|
|
'Removed all m2o keys and still fails.') |
|
|
log['last_error_count'] += 1 |
|
|
log['last_error_count'] += 1 |
|
|
return False |
|
|
return False |
|
|
return True |
|
|
return True |
|
@ -123,10 +137,12 @@ class import_odbc_dbtable(orm.Model): |
|
|
if not obj.enabled: |
|
|
if not obj.enabled: |
|
|
continue # skip |
|
|
continue # skip |
|
|
|
|
|
|
|
|
_logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl) |
|
|
|
|
|
|
|
|
_logger.setLevel(obj.raise_import_errors and |
|
|
|
|
|
logging.DEBUG or _loglvl) |
|
|
_logger.debug('Importing %s...' % obj.name) |
|
|
_logger.debug('Importing %s...' % obj.name) |
|
|
|
|
|
|
|
|
# now() microseconds are stripped to avoid problem with SQL smalldate |
|
|
|
|
|
|
|
|
# now() microseconds are stripped |
|
|
|
|
|
# to avoid problem with SQL smalldate |
|
|
# TODO: convert UTC Now to local timezone |
|
|
# TODO: convert UTC Now to local timezone |
|
|
# http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime |
|
|
# http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime |
|
|
model_name = obj.model_target.model |
|
|
model_name = obj.model_target.model |
|
@ -151,14 +167,16 @@ class import_odbc_dbtable(orm.Model): |
|
|
|
|
|
|
|
|
# Exclude columns titled "None"; add (xml_)"id" column |
|
|
# Exclude columns titled "None"; add (xml_)"id" column |
|
|
cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] |
|
|
cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] |
|
|
cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id'] |
|
|
|
|
|
|
|
|
cols = ([x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] |
|
|
|
|
|
+ ['id']) |
|
|
|
|
|
|
|
|
# Import each row: |
|
|
# Import each row: |
|
|
for row in res['rows']: |
|
|
for row in res['rows']: |
|
|
# Build data row; import only columns present in the "cols" list |
|
|
# Build data row; import only columns present in the "cols" list |
|
|
data = list() |
|
|
data = list() |
|
|
for i in cidx: |
|
|
for i in cidx: |
|
|
# TODO: Handle imported datetimes properly - convert from localtime to UTC! |
|
|
|
|
|
|
|
|
# TODO: Handle imported datetimes properly |
|
|
|
|
|
# convert from localtime to UTC! |
|
|
v = row[i] |
|
|
v = row[i] |
|
|
if isinstance(v, str): |
|
|
if isinstance(v, str): |
|
|
v = v.strip() |
|
|
v = v.strip() |
|
@ -169,7 +187,8 @@ class import_odbc_dbtable(orm.Model): |
|
|
log['last_record_count'] += 1 |
|
|
log['last_record_count'] += 1 |
|
|
self._import_data(cr, uid, cols, data, model_obj, obj, log) |
|
|
self._import_data(cr, uid, cols, data, model_obj, obj, log) |
|
|
if log['last_record_count'] % 500 == 0: |
|
|
if log['last_record_count'] % 500 == 0: |
|
|
_logger.info('...%s rows processed...' % (log['last_record_count'])) |
|
|
|
|
|
|
|
|
_logger.info('...%s rows processed...' |
|
|
|
|
|
% (log['last_record_count'])) |
|
|
|
|
|
|
|
|
# Finished importing all rows |
|
|
# Finished importing all rows |
|
|
# If no errors, write new sync date |
|
|
# If no errors, write new sync date |
|
@ -180,12 +199,14 @@ class import_odbc_dbtable(orm.Model): |
|
|
level = logging.WARN |
|
|
level = logging.WARN |
|
|
if log['last_error_count']: |
|
|
if log['last_error_count']: |
|
|
level = logging.ERROR |
|
|
level = logging.ERROR |
|
|
_logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % ( |
|
|
|
|
|
|
|
|
_logger.log(level, |
|
|
|
|
|
'Imported %s , %d rows, %d errors, %d warnings.' % ( |
|
|
model_name, log['last_record_count'], log['last_error_count'], |
|
|
model_name, log['last_record_count'], log['last_error_count'], |
|
|
log['last_warn_count'])) |
|
|
log['last_warn_count'])) |
|
|
# Write run log, either if the table import is active or inactive |
|
|
# Write run log, either if the table import is active or inactive |
|
|
if log['last_log']: |
|
|
if log['last_log']: |
|
|
log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==') |
|
|
|
|
|
|
|
|
log['last_log'].insert(0, |
|
|
|
|
|
'LEVEL|== Line == |== Relationship ==|== Message ==') |
|
|
log.update({'last_log': '\n'.join(log['last_log'])}) |
|
|
log.update({'last_log': '\n'.join(log['last_log'])}) |
|
|
log.update({'last_run': datetime.now().replace(microsecond=0)}) |
|
|
log.update({'last_run': datetime.now().replace(microsecond=0)}) |
|
|
self.write(cr, uid, [obj.id], log) |
|
|
self.write(cr, uid, [obj.id], log) |
|
|