You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

231 lines
10 KiB

  1. # -*- coding: utf-8 -*-
  2. # Copyright (C) 2011 - TODAY Daniel Reis
  3. # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
  4. import sys
  5. from datetime import datetime
  6. from openerp.osv import orm, fields
  7. import logging
  8. _logger = logging.getLogger(__name__)
  9. _loglvl = _logger.getEffectiveLevel()
  10. SEP = '|'
  11. class import_odbc_dbtable(orm.Model):
  12. _name = "import.odbc.dbtable"
  13. _description = 'Import Table Data'
  14. _order = 'exec_order'
  15. _columns = {
  16. 'name': fields.char('Datasource name', required=True, size=64),
  17. 'enabled': fields.boolean('Execution enabled'),
  18. 'dbsource_id': fields.many2one('base.external.dbsource',
  19. 'Database source', required=True),
  20. 'sql_source': fields.text('SQL', required=True,
  21. help='Column names must be valid \
  22. "import_data" columns.'),
  23. 'model_target': fields.many2one('ir.model', 'Target object'),
  24. 'noupdate': fields.boolean('No updates',
  25. help="Only create new records;\
  26. disable updates to existing records."),
  27. 'exec_order': fields.integer('Execution order',
  28. help="Defines the order to perform \
  29. the import"),
  30. 'last_sync': fields.datetime('Last sync date',
  31. help="Datetime for the last succesfull \
  32. sync. \nLater changes on the source may \
  33. not be replicated on the destination"),
  34. 'start_run': fields.datetime('Time started', readonly=True),
  35. 'last_run': fields.datetime('Time ended', readonly=True),
  36. 'last_record_count': fields.integer('Last record count',
  37. readonly=True),
  38. 'last_error_count': fields.integer('Last error count', readonly=True),
  39. 'last_warn_count': fields.integer('Last warning count', readonly=True),
  40. 'last_log': fields.text('Last run log', readonly=True),
  41. 'ignore_rel_errors': fields.boolean('Ignore relationship errors',
  42. help="On error try to reimport \
  43. rows ignoring relationships."),
  44. 'raise_import_errors': fields.boolean('Raise import errors',
  45. help="Import errors not \
  46. handled, intended for \
  47. debugging purposes. \nAlso \
  48. forces debug messages to be \
  49. written to the server log."),
  50. }
  51. _defaults = {
  52. 'enabled': True,
  53. 'exec_order': 10,
  54. }
  55. def _import_data(self, cr, uid, flds, data, model_obj, table_obj, log):
  56. """Import data and returns error msg or empty string"""
  57. def find_m2o(field_list):
  58. """"Find index of first column with a one2many field"""
  59. for i, x in enumerate(field_list):
  60. if len(x) > 3 and x[-3:] == ':id' or x[-3:] == '/id':
  61. return i
  62. return -1
  63. def append_to_log(log, level, obj_id='', msg='', rel_id=''):
  64. if '_id_' in obj_id:
  65. obj_id = ('.'.join(obj_id.split('_')[:-2]) + ': ' +
  66. obj_id.split('_')[-1])
  67. if ': .' in msg and not rel_id:
  68. rel_id = msg[msg.find(': .')+3:]
  69. if '_id_' in rel_id:
  70. rel_id = ('.'.join(rel_id.split('_')[:-2]) +
  71. ': ' + rel_id.split('_')[-1])
  72. msg = msg[:msg.find(': .')]
  73. log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5),
  74. obj_id, rel_id, msg))
  75. _logger.debug(data)
  76. cols = list(flds) # copy to avoid side effects
  77. errmsg = str()
  78. if table_obj.raise_import_errors:
  79. model_obj.import_data(cr, uid, cols, [data],
  80. noupdate=table_obj.noupdate)
  81. else:
  82. try:
  83. model_obj.import_data(cr, uid, cols, [data],
  84. noupdate=table_obj.noupdate)
  85. except:
  86. errmsg = str(sys.exc_info()[1])
  87. if errmsg and not table_obj.ignore_rel_errors:
  88. # Fail
  89. append_to_log(log, 'ERROR', data, errmsg)
  90. log['last_error_count'] += 1
  91. return False
  92. if errmsg and table_obj.ignore_rel_errors:
  93. # Warn and retry ignoring many2one fields...
  94. append_to_log(log, 'WARN', data, errmsg)
  95. log['last_warn_count'] += 1
  96. # Try ignoring each many2one
  97. # (tip: in the SQL sentence select more problematic FKs first)
  98. i = find_m2o(cols)
  99. if i >= 0:
  100. # Try again without the [i] column
  101. del cols[i]
  102. del data[i]
  103. self._import_data(cr, uid, cols, data, model_obj,
  104. table_obj, log)
  105. else:
  106. # Fail
  107. append_to_log(log, 'ERROR', data,
  108. 'Removed all m2o keys and still fails.')
  109. log['last_error_count'] += 1
  110. return False
  111. return True
  112. def import_run(self, cr, uid, ids=None, context=None):
  113. db_model = self.pool.get('base.external.dbsource')
  114. actions = self.read(cr, uid, ids, ['id', 'exec_order'])
  115. actions.sort(key=lambda x: (x['exec_order'], x['id']))
  116. # Consider each dbtable:
  117. for action_ref in actions:
  118. obj = self.browse(cr, uid, action_ref['id'])
  119. if not obj.enabled:
  120. continue # skip
  121. _logger.setLevel(obj.raise_import_errors and
  122. logging.DEBUG or _loglvl)
  123. _logger.debug('Importing %s...' % obj.name)
  124. # now() microseconds are stripped
  125. # to avoid problem with SQL smalldate
  126. # TODO: convert UTC Now to local timezone
  127. # http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime
  128. model_name = obj.model_target.model
  129. model_obj = self.pool.get(model_name)
  130. xml_prefix = model_name.replace('.', '_') + "_id_"
  131. log = {'start_run': datetime.now().replace(microsecond=0),
  132. 'last_run': None,
  133. 'last_record_count': 0,
  134. 'last_error_count': 0,
  135. 'last_warn_count': 0,
  136. 'last_log': list()}
  137. self.write(cr, uid, [obj.id], log)
  138. # Prepare SQL sentence; replace "%s" with the last_sync date
  139. if obj.last_sync:
  140. sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S")
  141. else:
  142. sync = datetime(1900, 1, 1, 0, 0, 0)
  143. params = {'sync': sync}
  144. res = db_model.execute(cr, uid, [obj.dbsource_id.id],
  145. obj.sql_source, params, metadata=True)
  146. # Exclude columns titled "None"; add (xml_)"id" column
  147. cidx = ([i for i, x in enumerate(res['cols'])
  148. if x.upper() != 'NONE'])
  149. cols = ([x for i, x in enumerate(res['cols'])
  150. if x.upper() != 'NONE'] + ['id'])
  151. # Import each row:
  152. for row in res['rows']:
  153. # Build data row;
  154. # import only columns present in the "cols" list
  155. data = list()
  156. for i in cidx:
  157. # TODO: Handle imported datetimes properly
  158. # convert from localtime to UTC!
  159. v = row[i]
  160. if isinstance(v, str):
  161. v = v.strip()
  162. data.append(v)
  163. data.append(xml_prefix + str(row[0]).strip())
  164. # Import the row; on error, write line to the log
  165. log['last_record_count'] += 1
  166. self._import_data(cr, uid, cols, data, model_obj, obj, log)
  167. if log['last_record_count'] % 500 == 0:
  168. _logger.info('...%s rows processed...'
  169. % (log['last_record_count']))
  170. # Finished importing all rows
  171. # If no errors, write new sync date
  172. if not (log['last_error_count'] or log['last_warn_count']):
  173. log['last_sync'] = log['start_run']
  174. level = logging.DEBUG
  175. if log['last_warn_count']:
  176. level = logging.WARN
  177. if log['last_error_count']:
  178. level = logging.ERROR
  179. _logger.log(level,
  180. 'Imported %s , %d rows, %d errors, %d warnings.' %
  181. (model_name, log['last_record_count'],
  182. log['last_error_count'],
  183. log['last_warn_count']))
  184. # Write run log, either if the table import is active or inactive
  185. if log['last_log']:
  186. log['last_log'].insert(0,
  187. 'LEVEL|== Line == |== Relationship \
  188. ==|== Message ==')
  189. log.update({'last_log': '\n'.join(log['last_log'])})
  190. log.update({'last_run': datetime.now().replace(microsecond=0)})
  191. self.write(cr, uid, [obj.id], log)
  192. # Finished
  193. _logger.debug('Import job FINISHED.')
  194. return True
  195. def import_schedule(self, cr, uid, ids, context=None):
  196. cron_obj = self.pool.get('ir.cron')
  197. new_create_id = cron_obj.create(cr, uid, {
  198. 'name': 'Import ODBC tables',
  199. 'interval_type': 'hours',
  200. 'interval_number': 1,
  201. 'numbercall': -1,
  202. 'model': 'import.odbc.dbtable',
  203. 'function': 'import_run',
  204. 'doall': False,
  205. 'active': True
  206. })
  207. return {
  208. 'name': 'Import ODBC tables',
  209. 'view_type': 'form',
  210. 'view_mode': 'form,tree',
  211. 'res_model': 'ir.cron',
  212. 'res_id': new_create_id,
  213. 'type': 'ir.actions.act_window',
  214. }