You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

265 lines
12 KiB

  1. # -*- coding: utf-8 -*-
  2. ##############################################################################
  3. #
  4. # Daniel Reis
  5. # 2011
  6. #
  7. # This program is free software: you can redistribute it and/or modify
  8. # it under the terms of the GNU Affero General Public License as
  9. # published by the Free Software Foundation, either version 3 of the
  10. # License, or (at your option) any later version.
  11. #
  12. # This program is distributed in the hope that it will be useful,
  13. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. # GNU Affero General Public License for more details.
  16. #
  17. # You should have received a copy of the GNU Affero General Public License
  18. # along with this program. If not, see <http://www.gnu.org/licenses/>.
  19. #
  20. ##############################################################################
  21. import sys
  22. from datetime import datetime
  23. from openerp.osv import orm, fields
  24. import logging
  25. _logger = logging.getLogger(__name__)
  26. _loglvl = _logger.getEffectiveLevel()
  27. SEP = '|'
  28. class import_odbc_dbtable(orm.Model):
  29. _name = "import.odbc.dbtable"
  30. _description = 'Import Table Data'
  31. _order = 'exec_order'
  32. _columns = {
  33. 'name': fields.char('Datasource name', required=True, size=64),
  34. 'enabled': fields.boolean('Execution enabled'),
  35. 'dbsource_id': fields.many2one('base.external.dbsource',
  36. 'Database source',
  37. required=True),
  38. 'sql_source': fields.text('SQL',
  39. required=True,
  40. help='Column names must be valid '
  41. '"import_data" columns.'),
  42. 'model_target': fields.many2one('ir.model', 'Target object'),
  43. 'noupdate': fields.boolean('No updates',
  44. help="Only create new records; disable "
  45. "updates to existing records."),
  46. 'exec_order': fields.integer('Execution order',
  47. help="Defines the order to perform "
  48. "the import"),
  49. 'last_sync': fields.datetime(
  50. 'Last sync date',
  51. help="Datetime for the last succesfull sync.\n"
  52. "Later changes on the source may not be replicated "
  53. "on the destination"),
  54. 'start_run': fields.datetime('Time started',
  55. readonly=True),
  56. 'last_run': fields.datetime('Time ended',
  57. readonly=True),
  58. 'last_record_count': fields.integer('Last record count',
  59. readonly=True),
  60. 'last_error_count': fields.integer('Last error count',
  61. readonly=True),
  62. 'last_warn_count': fields.integer('Last warning count',
  63. readonly=True),
  64. 'last_log': fields.text('Last run log', readonly=True),
  65. 'ignore_rel_errors': fields.boolean(
  66. 'Ignore relationship errors',
  67. help="On error try to reimport rows ignoring relationships."
  68. ),
  69. 'raise_import_errors': fields.boolean(
  70. 'Raise import errors',
  71. help="Import errors not handled, intended for debugging purposes."
  72. "\nAlso forces debug messages to be written to the server log."),
  73. }
  74. _defaults = {
  75. 'enabled': True,
  76. 'exec_order': 10,
  77. }
  78. def _import_data(self, cr, uid, flds, data, model_obj, table_obj, log):
  79. """Import data and returns error msg or empty string"""
  80. def find_m2o(field_list):
  81. """Find index of first column with a one2many field"""
  82. for i, x in enumerate(field_list):
  83. if len(x) > 3 and x[-3:] == ':id' or x[-3:] == '/id':
  84. return i
  85. return -1
  86. def append_to_log(log, level, obj_id='', msg='', rel_id=''):
  87. if '_id_' in obj_id:
  88. obj_id = ('.'.join(obj_id.split('_')[:-2])
  89. + ': '
  90. + obj_id.split('_')[-1])
  91. if ': .' in msg and not rel_id:
  92. rel_id = msg[msg.find(': .')+3:]
  93. if '_id_' in rel_id:
  94. rel_id = ('.'.join(rel_id.split('_')[:-2])
  95. + ': '
  96. + rel_id.split('_')[-1])
  97. msg = msg[:msg.find(': .')]
  98. log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5),
  99. obj_id,
  100. rel_id,
  101. msg))
  102. _logger.debug(data)
  103. cols = list(flds) # copy to avoid side effects
  104. errmsg = str()
  105. if table_obj.raise_import_errors:
  106. model_obj.import_data(cr, uid, cols, [data],
  107. noupdate=table_obj.noupdate)
  108. else:
  109. try:
  110. model_obj.import_data(cr, uid, cols, [data],
  111. noupdate=table_obj.noupdate)
  112. except:
  113. errmsg = str(sys.exc_info()[1])
  114. if errmsg and not table_obj.ignore_rel_errors:
  115. # Fail
  116. append_to_log(log, 'ERROR', data, errmsg)
  117. log['last_error_count'] += 1
  118. return False
  119. if errmsg and table_obj.ignore_rel_errors:
  120. # Warn and retry ignoring many2one fields...
  121. append_to_log(log, 'WARN', data, errmsg)
  122. log['last_warn_count'] += 1
  123. # Try ignoring each many2one (tip: in the SQL sentence select more
  124. # problematic FKs first)
  125. i = find_m2o(cols)
  126. if i >= 0:
  127. # Try again without the [i] column
  128. del cols[i]
  129. del data[i]
  130. self._import_data(cr, uid, cols,
  131. data,
  132. model_obj,
  133. table_obj,
  134. log)
  135. else:
  136. # Fail
  137. append_to_log(log, 'ERROR', data,
  138. 'Removed all m2o keys and still fails.')
  139. log['last_error_count'] += 1
  140. return False
  141. return True
  142. def import_run(self, cr, uid, ids=None, context=None):
  143. db_model = self.pool.get('base.external.dbsource')
  144. actions = self.read(cr, uid, ids, ['id', 'exec_order'])
  145. actions.sort(key=lambda x: (x['exec_order'], x['id']))
  146. # Consider each dbtable:
  147. for action_ref in actions:
  148. obj = self.browse(cr, uid, action_ref['id'])
  149. if not obj.enabled:
  150. continue # skip
  151. _logger.setLevel(obj.raise_import_errors and
  152. logging.DEBUG or
  153. _loglvl)
  154. _logger.debug('Importing %s...', obj.name)
  155. # now() microseconds are stripped to avoid problem with SQL
  156. # smalldate
  157. # TODO: convert UTC Now to local timezone
  158. # http://stackoverflow.com/questions/4770297
  159. model_name = obj.model_target.model
  160. model_obj = self.pool.get(model_name)
  161. xml_prefix = model_name.replace('.', '_') + "_id_"
  162. log = {'start_run': datetime.now().replace(microsecond=0),
  163. 'last_run': None,
  164. 'last_record_count': 0,
  165. 'last_error_count': 0,
  166. 'last_warn_count': 0,
  167. 'last_log': list()}
  168. self.write(cr, uid, [obj.id], log)
  169. # Prepare SQL sentence; replace "%s" with the last_sync date
  170. if obj.last_sync:
  171. sync = datetime.strptime(obj.last_sync, "%Y-%m-%d %H:%M:%S")
  172. else:
  173. sync = datetime.datetime(1900, 1, 1, 0, 0, 0)
  174. params = {'sync': sync}
  175. res = db_model.execute(cr, uid, [obj.dbsource_id.id],
  176. obj.sql_source, params, metadata=True)
  177. # Exclude columns titled "None"; add (xml_)"id" column
  178. cidx = [i for i, x in enumerate(res['cols'])
  179. if x.upper() != 'NONE']
  180. cols = [x for i, x in enumerate(res['cols'])
  181. if x.upper() != 'NONE'] + ['id']
  182. # Import each row:
  183. for row in res['rows']:
  184. # Build data row; import only columns present in the "cols"
  185. # list
  186. data = list()
  187. for i in cidx:
  188. # TODO: Handle imported datetimes properly - convert from
  189. # localtime to UTC!
  190. v = row[i]
  191. if isinstance(v, str):
  192. v = v.strip()
  193. data.append(v)
  194. data.append(xml_prefix + str(row[0]).strip())
  195. # Import the row; on error, write line to the log
  196. log['last_record_count'] += 1
  197. self._import_data(cr, uid, cols, data, model_obj, obj, log)
  198. if log['last_record_count'] % 500 == 0:
  199. _logger.info('...%s rows processed...',
  200. (log['last_record_count']))
  201. # Finished importing all rows
  202. # If no errors, write new sync date
  203. if not (log['last_error_count'] or log['last_warn_count']):
  204. log['last_sync'] = log['start_run']
  205. level = logging.DEBUG
  206. if log['last_warn_count']:
  207. level = logging.WARN
  208. if log['last_error_count']:
  209. level = logging.ERROR
  210. _logger.log(level,
  211. 'Imported %s , %d rows, %d errors, %d warnings.',
  212. model_name,
  213. log['last_record_count'],
  214. log['last_error_count'],
  215. log['last_warn_count'])
  216. # Write run log, either if the table import is active or inactive
  217. if log['last_log']:
  218. log['last_log'].insert(0,
  219. 'LEVEL|'
  220. '== Line == |'
  221. '== Relationship ==|'
  222. '== Message ==')
  223. log.update({'last_log': '\n'.join(log['last_log'])})
  224. log.update({'last_run': datetime.now().replace(microsecond=0)})
  225. self.write(cr, uid, [obj.id], log)
  226. # Finished
  227. _logger.debug('Import job FINISHED.')
  228. return True
  229. def import_schedule(self, cr, uid, ids, context=None):
  230. cron_obj = self.pool.get('ir.cron')
  231. new_create_id = cron_obj.create(cr, uid, {
  232. 'name': 'Import ODBC tables',
  233. 'interval_type': 'hours',
  234. 'interval_number': 1,
  235. 'numbercall': -1,
  236. 'model': 'import.odbc.dbtable',
  237. 'function': 'import_run',
  238. 'doall': False,
  239. 'active': True
  240. })
  241. return {
  242. 'name': 'Import ODBC tables',
  243. 'view_type': 'form',
  244. 'view_mode': 'form,tree',
  245. 'res_model': 'ir.cron',
  246. 'res_id': new_create_id,
  247. 'type': 'ir.actions.act_window',
  248. }