@ -21,14 +21,15 @@
import sys
import sys
from datetime import datetime
from datetime import datetime
from osv import fields , osv
from openerp.o sv import orm , fields
import logging
import logging
_logger = logging . getLogger ( __name__ )
_logger = logging . getLogger ( __name__ )
_loglvl = _logger . getEffectiveLevel ( )
_loglvl = _logger . getEffectiveLevel ( )
SEP = ' | '
SEP = ' | '
class import_odbc_dbtable ( osv . osv ) :
_name = " import.odbc.dbtable "
class import_odbc_dbtable ( orm . Model ) :
_name = " import.odbc.dbtable "
_description = ' Import Table Data '
_description = ' Import Table Data '
_order = ' exec_order '
_order = ' exec_order '
_columns = {
_columns = {
@ -36,7 +37,7 @@ class import_odbc_dbtable(osv.osv):
' enabled ' : fields . boolean ( ' Execution enabled ' ) ,
' enabled ' : fields . boolean ( ' Execution enabled ' ) ,
' dbsource_id ' : fields . many2one ( ' base.external.dbsource ' , ' Database source ' , required = True ) ,
' dbsource_id ' : fields . many2one ( ' base.external.dbsource ' , ' Database source ' , required = True ) ,
' sql_source ' : fields . text ( ' SQL ' , required = True , help = ' Column names must be valid " import_data " columns. ' ) ,
' sql_source ' : fields . text ( ' SQL ' , required = True , help = ' Column names must be valid " import_data " columns. ' ) ,
' model_target ' : fields . many2one ( ' ir.model ' , ' Target object ' ) ,
' model_target ' : fields . many2one ( ' ir.model ' , ' Target object ' ) ,
' noupdate ' : fields . boolean ( ' No updates ' , help = " Only create new records; disable updates to existing records. " ) ,
' noupdate ' : fields . boolean ( ' No updates ' , help = " Only create new records; disable updates to existing records. " ) ,
' exec_order ' : fields . integer ( ' Execution order ' , help = " Defines the order to perform the import " ) ,
' exec_order ' : fields . integer ( ' Execution order ' , help = " Defines the order to perform the import " ) ,
' last_sync ' : fields . datetime ( ' Last sync date ' , help = " Datetime for the last succesfull sync. Later changes on the source may not be replicated on the destination " ) ,
' last_sync ' : fields . datetime ( ' Last sync date ' , help = " Datetime for the last succesfull sync. Later changes on the source may not be replicated on the destination " ) ,
@ -47,10 +48,10 @@ class import_odbc_dbtable(osv.osv):
' last_warn_count ' : fields . integer ( ' Last warning count ' , readonly = True ) ,
' last_warn_count ' : fields . integer ( ' Last warning count ' , readonly = True ) ,
' last_log ' : fields . text ( ' Last run log ' , readonly = True ) ,
' last_log ' : fields . text ( ' Last run log ' , readonly = True ) ,
' ignore_rel_errors ' : fields . boolean ( ' Ignore relationship errors ' ,
' ignore_rel_errors ' : fields . boolean ( ' Ignore relationship errors ' ,
help = " On error try to reimport rows ignoring relationships. " ) ,
help = " On error try to reimport rows ignoring relationships. " ) ,
' raise_import_errors ' : fields . boolean ( ' Raise import errors ' ,
' raise_import_errors ' : fields . boolean ( ' Raise import errors ' ,
help = " Import errors not handled, intended for debugging purposes. "
+ " \n Also forces debug messages to be written to the server log. " ) ,
help = " Import errors not handled, intended for debugging purposes. "
" \n Also forces debug messages to be written to the server log. " ) ,
}
}
_defaults = {
_defaults = {
' enabled ' : True ,
' enabled ' : True ,
@ -63,11 +64,11 @@ class import_odbc_dbtable(osv.osv):
def find_m2o ( field_list ) :
def find_m2o ( field_list ) :
""" " Find index of first column with a one2many field """
""" " Find index of first column with a one2many field """
for i , x in enumerate ( field_list ) :
for i , x in enumerate ( field_list ) :
if len ( x ) > 3 and x [ - 3 : ] == ' :id ' or x [ - 3 : ] == ' /id ' :
if len ( x ) > 3 and x [ - 3 : ] == ' :id ' or x [ - 3 : ] == ' /id ' :
return i
return i
return - 1
return - 1
def append_to_log ( log , level , obj_id = ' ' , msg = ' ' , rel_id = ' ' ) :
def append_to_log ( log , level , obj_id = ' ' , msg = ' ' , rel_id = ' ' ) :
if ' _id_ ' in obj_id :
if ' _id_ ' in obj_id :
obj_id = ' . ' . join ( obj_id . split ( ' _ ' ) [ : - 2 ] ) + ' : ' + obj_id . split ( ' _ ' ) [ - 1 ]
obj_id = ' . ' . join ( obj_id . split ( ' _ ' ) [ : - 2 ] ) + ' : ' + obj_id . split ( ' _ ' ) [ - 1 ]
if ' : . ' in msg and not rel_id :
if ' : . ' in msg and not rel_id :
@ -76,10 +77,8 @@ class import_odbc_dbtable(osv.osv):
rel_id = ' . ' . join ( rel_id . split ( ' _ ' ) [ : - 2 ] ) + ' : ' + rel_id . split ( ' _ ' ) [ - 1 ]
rel_id = ' . ' . join ( rel_id . split ( ' _ ' ) [ : - 2 ] ) + ' : ' + rel_id . split ( ' _ ' ) [ - 1 ]
msg = msg [ : msg . find ( ' : . ' ) ]
msg = msg [ : msg . find ( ' : . ' ) ]
log [ ' last_log ' ] . append ( ' %s | %s \t | %s \t | %s ' % ( level . ljust ( 5 ) , obj_id , rel_id , msg ) )
log [ ' last_log ' ] . append ( ' %s | %s \t | %s \t | %s ' % ( level . ljust ( 5 ) , obj_id , rel_id , msg ) )
_logger . debug ( data )
cols = list ( flds ) #copy to avoid side effects
_logger . debug ( data )
cols = list ( flds ) # copy to avoid side effects
errmsg = str ( )
errmsg = str ( )
if table_obj . raise_import_errors :
if table_obj . raise_import_errors :
model_obj . import_data ( cr , uid , cols , [ data ] , noupdate = table_obj . noupdate )
model_obj . import_data ( cr , uid , cols , [ data ] , noupdate = table_obj . noupdate )
@ -88,15 +87,14 @@ class import_odbc_dbtable(osv.osv):
model_obj . import_data ( cr , uid , cols , [ data ] , noupdate = table_obj . noupdate )
model_obj . import_data ( cr , uid , cols , [ data ] , noupdate = table_obj . noupdate )
except :
except :
errmsg = str ( sys . exc_info ( ) [ 1 ] )
errmsg = str ( sys . exc_info ( ) [ 1 ] )
if errmsg and not table_obj . ignore_rel_errors :
if errmsg and not table_obj . ignore_rel_errors :
#Fail
#Fail
append_to_log ( log , ' ERROR ' , data , errmsg )
append_to_log ( log , ' ERROR ' , data , errmsg )
log [ ' last_error_count ' ] + = 1
log [ ' last_error_count ' ] + = 1
return False
return False
if errmsg and table_obj . ignore_rel_errors :
if errmsg and table_obj . ignore_rel_errors :
#Warn and retry ignoring many2one fields...
#Warn and retry ignoring many2one fields...
append_to_log ( log , ' WARN ' , data , errmsg )
append_to_log ( log , ' WARN ' , data , errmsg )
log [ ' last_warn_count ' ] + = 1
log [ ' last_warn_count ' ] + = 1
#Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
#Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
i = find_m2o ( cols )
i = find_m2o ( cols )
@ -107,30 +105,28 @@ class import_odbc_dbtable(osv.osv):
self . _import_data ( cr , uid , cols , data , model_obj , table_obj , log )
self . _import_data ( cr , uid , cols , data , model_obj , table_obj , log )
else :
else :
#Fail
#Fail
append_to_log ( log , ' ERROR ' , data , ' Removed all m2o keys and still fails. ' )
append_to_log ( log , ' ERROR ' , data , ' Removed all m2o keys and still fails. ' )
log [ ' last_error_count ' ] + = 1
log [ ' last_error_count ' ] + = 1
return False
return False
return True
return True
def import_run ( self , cr , uid , ids = None , context = None ) :
def import_run ( self , cr , uid , ids = None , context = None ) :
db_model = self . pool . get ( ' base.external.dbsource ' )
db_model = self . pool . get ( ' base.external.dbsource ' )
actions = self . read ( cr , uid , ids , [ ' id ' , ' exec_order ' ] )
actions = self . read ( cr , uid , ids , [ ' id ' , ' exec_order ' ] )
actions . sort ( key = lambda x : ( x [ ' exec_order ' ] , x [ ' id ' ] ) )
actions . sort ( key = lambda x : ( x [ ' exec_order ' ] , x [ ' id ' ] ) )
#Consider each dbtable:
#Consider each dbtable:
for action_ref in actions :
for action_ref in actions :
obj = self . browse ( cr , uid , action_ref [ ' id ' ] )
obj = self . browse ( cr , uid , action_ref [ ' id ' ] )
if not obj . enabled : continue #skip
if not obj . enabled :
continue # skip
_logger . setLevel ( obj . raise_import_errors and logging . DEBUG or _loglvl )
_logger . setLevel ( obj . raise_import_errors and logging . DEBUG or _loglvl )
_logger . debug ( ' Importing %s ... ' % obj . name )
_logger . debug ( ' Importing %s ... ' % obj . name )
#now() microseconds are stripped to avoid problem with SQL smalldate
#now() microseconds are stripped to avoid problem with SQL smalldate
#TODO: convert UTC Now to local timezone (http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime)
#TODO: convert UTC Now to local timezone
#http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime
model_name = obj . model_target . model
model_name = obj . model_target . model
model_obj = self . pool . get ( model_name )
model_obj = self . pool . get ( model_name )
xml_prefix = model_name . replace ( ' . ' , ' _ ' ) + " _id_ "
xml_prefix = model_name . replace ( ' . ' , ' _ ' ) + " _id_ "
@ -143,10 +139,13 @@ class import_odbc_dbtable(osv.osv):
self . write ( cr , uid , [ obj . id ] , log )
self . write ( cr , uid , [ obj . id ] , log )
#Prepare SQL sentence; replace "%s" with the last_sync date
#Prepare SQL sentence; replace "%s" with the last_sync date
if obj . last_sync : sync = datetime . strptime ( obj . last_sync , " % Y- % m- %d % H: % M: % S " )
else : sync = datetime . datetime ( 1900 , 1 , 1 , 0 , 0 , 0 )
if obj . last_sync :
sync = datetime . strptime ( obj . last_sync , " % Y- % m- %d % H: % M: % S " )
else :
sync = datetime . datetime ( 1900 , 1 , 1 , 0 , 0 , 0 )
params = { ' sync ' : sync }
params = { ' sync ' : sync }
res = db_model . execute ( cr , uid , [ obj . dbsource_id . id ] , obj . sql_source , params , metadata = True )
res = db_model . execute ( cr , uid , [ obj . dbsource_id . id ] ,
obj . sql_source , params , metadata = True )
#Exclude columns titled "None"; add (xml_)"id" column
#Exclude columns titled "None"; add (xml_)"id" column
cidx = [ i for i , x in enumerate ( res [ ' cols ' ] ) if x . upper ( ) != ' NONE ' ]
cidx = [ i for i , x in enumerate ( res [ ' cols ' ] ) if x . upper ( ) != ' NONE ' ]
@ -159,37 +158,40 @@ class import_odbc_dbtable(osv.osv):
for i in cidx :
for i in cidx :
#TODO: Handle imported datetimes properly - convert from localtime to UTC!
#TODO: Handle imported datetimes properly - convert from localtime to UTC!
v = row [ i ]
v = row [ i ]
if isinstance ( v , str ) : v = v . strip ( )
if isinstance ( v , str ) :
v = v . strip ( )
data . append ( v )
data . append ( v )
data . append ( xml_prefix + str ( row [ 0 ] ) . strip ( ) )
data . append ( xml_prefix + str ( row [ 0 ] ) . strip ( ) )
#Import the row; on error, write line to the log
#Import the row; on error, write line to the log
log [ ' last_record_count ' ] + = 1
log [ ' last_record_count ' ] + = 1
self . _import_data ( cr , uid , cols , data , model_obj , obj , log )
self . _import_data ( cr , uid , cols , data , model_obj , obj , log )
if log [ ' last_record_count ' ] % 500 == 0 :
if log [ ' last_record_count ' ] % 500 == 0 :
_logger . info ( ' ... %s rows processed... ' % ( log [ ' last_record_count ' ] ) )
_logger . info ( ' ... %s rows processed... ' % ( log [ ' last_record_count ' ] ) )
#Finished importing all rows
#Finished importing all rows
#If no errors, write new sync date
#If no errors, write new sync date
if not ( log [ ' last_error_count ' ] or log [ ' last_warn_count ' ] ) :
if not ( log [ ' last_error_count ' ] or log [ ' last_warn_count ' ] ) :
log [ ' last_sync ' ] = log [ ' start_run ' ]
log [ ' last_sync ' ] = log [ ' start_run ' ]
level = logging . DEBUG
level = logging . DEBUG
if log [ ' last_warn_count ' ] : level = logging . WARN
if log [ ' last_error_count ' ] : level = logging . ERROR
if log [ ' last_warn_count ' ] :
level = logging . WARN
if log [ ' last_error_count ' ] :
level = logging . ERROR
_logger . log ( level , ' Imported %s , %d rows, %d errors, %d warnings. ' % (
_logger . log ( level , ' Imported %s , %d rows, %d errors, %d warnings. ' % (
model_name , log [ ' last_record_count ' ] , log [ ' last_error_count ' ] ,
log [ ' last_warn_count ' ] ) )
model_name , log [ ' last_record_count ' ] , log [ ' last_error_count ' ] ,
log [ ' last_warn_count ' ] ) )
#Write run log, either if the table import is active or inactive
#Write run log, either if the table import is active or inactive
if log [ ' last_log ' ] :
if log [ ' last_log ' ] :
log [ ' last_log ' ] . insert ( 0 , ' LEVEL|== Line == |== Relationship ==|== Message == ' )
log [ ' last_log ' ] . insert ( 0 , ' LEVEL|== Line == |== Relationship ==|== Message == ' )
log . update ( { ' last_log ' : ' \n ' . join ( log [ ' last_log ' ] ) } )
log . update ( { ' last_run ' : datetime . now ( ) . replace ( microsecond = 0 ) } ) #second=0,
log . update ( { ' last_log ' : ' \n ' . join ( log [ ' last_log ' ] ) } )
log . update ( { ' last_run ' : datetime . now ( ) . replace ( microsecond = 0 ) } )
self . write ( cr , uid , [ obj . id ] , log )
self . write ( cr , uid , [ obj . id ] , log )
#Finished
#Finished
_logger . debug ( ' Import job FINISHED. ' )
_logger . debug ( ' Import job FINISHED. ' )
return True
return True
def import_schedule ( self , cr , uid , ids , context = None ) :
def import_schedule ( self , cr , uid , ids , context = None ) :
cron_obj = self . pool . get ( ' ir.cron ' )
cron_obj = self . pool . get ( ' ir.cron ' )
new_create_id = cron_obj . create ( cr , uid , {
new_create_id = cron_obj . create ( cr , uid , {
@ -211,4 +213,4 @@ class import_odbc_dbtable(osv.osv):
' type ' : ' ir.actions.act_window ' ,
' type ' : ' ir.actions.act_window ' ,
}
}
import_odbc_dbtable ( )
#EOF