Browse Source

7.0: fix flake8 errors

pull/38/head
Alexandre Fayolle 10 years ago
parent
commit
d970d99f42
  1. 8
      base_optional_quick_create/model.py
  2. 15
      configuration_helper/__openerp__.py
  3. 22
      configuration_helper/config.py
  4. 24
      email_template_template/__openerp__.py
  5. 4
      fetchmail_attach_from_folder/match_algorithm/base.py
  6. 17
      fetchmail_attach_from_folder/model/fetchmail_server.py
  7. 12
      fetchmail_attach_from_folder/model/fetchmail_server_folder.py
  8. 127
      import_odbc/import_odbc.py
  9. 6
      mail_environment/__openerp__.py
  10. 25
      mail_environment/env_mail.py
  11. 3
      scheduler_error_mailer/__openerp__.py
  12. 16
      scheduler_error_mailer/ir_cron.py
  13. 11
      super_calendar/__openerp__.py
  14. BIN
      super_calendar/data/meetings.png
  15. BIN
      super_calendar/data/month_calendar.png
  16. BIN
      super_calendar/data/phone_calls.png
  17. BIN
      super_calendar/data/week_calendar.png
  18. 158
      super_calendar/super_calendar.py
  19. 10
      users_ldap_groups/__openerp__.py
  20. 47
      users_ldap_groups/users_ldap_groups.py
  21. 31
      users_ldap_groups/users_ldap_groups_operators.py
  22. 9
      users_ldap_mail/__openerp__.py
  23. 10
      users_ldap_mail/users_ldap_model.py
  24. 7
      users_ldap_populate/__openerp__.py

8
base_optional_quick_create/model.py

@ -33,7 +33,9 @@ class ir_model(orm.Model):
def _wrap_name_create(self, old_create, model):
def wrapper(cr, uid, name, context=None):
raise orm.except_orm(_('Error'), _("Can't create quickly. Opening create form"))
raise orm.except_orm(_('Error'),
_("Can't create quickly. "
"Opening create form"))
return wrapper
def _register_hook(self, cr, ids=None):
@ -44,7 +46,9 @@ class ir_model(orm.Model):
model_name = model.model
model_obj = self.pool.get(model_name)
if not hasattr(model_obj, 'check_quick_create'):
model_obj.name_create = self._wrap_name_create(model_obj.name_create, model_name)
model_obj.name_create = self._wrap_name_create(
model_obj.name_create,
model_name)
model_obj.check_quick_create = True
return True

15
configuration_helper/__openerp__.py

@ -35,8 +35,8 @@ Configuration Helper
This module :
* create automatically related fields in 'whatiwant.config.settings'
using those defined in 'res.company' : it avoid duplicated field definitions.
* create automatically related fields in 'whatiwant.config.settings' using
those defined in 'res.company' : it avoid duplicated field definitions.
* company_id field with default value is created
* onchange_company_id is defined to update all related fields
* supported fields: char, text, integer, float, datetime, date, boolean, m2o
@ -59,8 +59,11 @@ How to use
Roadmap
-------
* support (or check support) for these field types : o2m, m2m, reference, property, selection
* automatically generate a default view for 'whatiwant.config.settings' (in --debug ?)
* support (or check support) for these field types : o2m, m2m, reference,
property, selection
* automatically generate a default view for 'whatiwant.config.settings'
(in --debug?)
Contributors
@ -68,8 +71,8 @@ Contributors
* David BEAL <david.beal@akretion.com>
* Sébastien BEAU <sebastien.beau@akretion.com>
* Yannick Vaucher, Camptocamp, (code refactoring from his module 'delivery_carrier_label_postlogistics')
* Yannick Vaucher, Camptocamp, (code refactoring from his module
'delivery_carrier_label_postlogistics')
""",
'website': 'http://www.akretion.com/',
'data': [

22
configuration_helper/config.py

@ -39,22 +39,23 @@ class AbstractConfigSettings(orm.AbstractModel):
def __init__(self, pool, cr):
super(AbstractConfigSettings, self).__init__(pool, cr)
if self._companyObject:
for field_key in self._companyObject._columns:
company_cols = self._companyObject._columns
for field_key in company_cols:
# allows to exclude some field
if self._filter_field(field_key):
args = ('company_id', field_key)
kwargs = {
'string': self._companyObject._columns[field_key].string,
'help': self._companyObject._columns[field_key].help,
'type': self._companyObject._columns[field_key]._type,
'string': company_cols[field_key].string,
'help': company_cols[field_key].help,
'type': company_cols[field_key]._type,
}
if '_obj' in self._companyObject._columns[field_key].__dict__.keys():
if '_obj' in company_cols[field_key].__dict__:
kwargs['relation'] = \
self._companyObject._columns[field_key]._obj
company_cols[field_key]._obj
if '_domain' in \
self._companyObject._columns[field_key].__dict__.keys():
company_cols[field_key].__dict__:
kwargs['domain'] = \
self._companyObject._columns[field_key]._domain
company_cols[field_key]._domain
field_key = re.sub('^' + self._prefix, '', field_key)
self._columns[field_key] = \
fields.related(*args, **kwargs)
@ -74,7 +75,10 @@ class AbstractConfigSettings(orm.AbstractModel):
'company_id': _default_company,
}
def field_to_populate_as_related(self, cr, uid, field, company_cols, context=None):
def field_to_populate_as_related(self, cr, uid,
field,
company_cols,
context=None):
"""Only fields which comes from company with the right prefix
must be defined as related"""
if self._prefix + field in company_cols:

24
email_template_template/__openerp__.py

@ -26,11 +26,12 @@
'complexity': "expert",
"description": """If an organisation's email layout is a bit more
complicated, changes can be tedious when having to do that across several email
templates. So this addon allows to define templates for mails that is referenced
by other mail templates.
This way we can put the layout parts into the template template and only content
in the other templates. Changing the layout is then only a matter of changing
the template template.
templates. So this addon allows to define templates for mails that is
referenced by other mail templates.
This way we can put the layout parts into the template template and only
content in the other templates. Changing the layout is then only a matter of
changing the template template.
-----
Usage
@ -49,7 +50,8 @@ For example, create a template template
Example Corp logo
Example Corp header
${object.body_text} <- this gets evaluated to the body_text of a template using this template template
${object.body_text} <- this gets evaluated to the body_text of a
template using this template template
Example Corp
Example street 42
Example city
@ -61,7 +63,8 @@ Then in your template you write
Dear ${object.partner_id.name},
Your order has been booked on date ${object.date} for a total amount of ${object.sum}.
Your order has been booked on date ${object.date}
for a total amount of ${object.sum}.
And it will be evaluated to
@ -71,7 +74,8 @@ And it will be evaluated to
Example Corp header
Dear Jane Doe,
Your order has been booked on date 04/17/2013 for a total amount of 42.
Your order has been booked on date 04/17/2013
for a total amount of 42.
Example Corp
Example street 42
Example city
@ -80,8 +84,8 @@ And it will be evaluated to
Given the way evaluation works internally (body_text of the template template
is evaluated two times, first with the instance of email.template of your own
template, then with the object your template refers to), you can do some
trickery if you know that a template template is always used with the same
kind of model (that is, models that have the same field name):
trickery if you know that a template template is always used with the same kind
of model (that is, models that have the same field name):
In your template template:

4
fetchmail_attach_from_folder/match_algorithm/base.py

@ -26,10 +26,10 @@ class base(object):
'''Name shown to the user'''
required_fields = []
'''Fields on fetchmail_server folder that are required for this algorithm'''
'''Fields on fetchmail_server folder required for this algorithm'''
readonly_fields = []
'''Fields on fetchmail_server folder that are readonly for this algorithm'''
'''Fields on fetchmail_server folder readonly for this algorithm'''
def search_matches(self, cr, uid, conf, mail_message, mail_message_org):
'''Returns ids found for model with mail_message'''

17
fetchmail_attach_from_folder/model/fetchmail_server.py

@ -93,15 +93,16 @@ class fetchmail_server(Model):
match_algorithm = folder.get_algorithm()
if connection.select(folder.path)[0] != 'OK':
logger.error(
'Could not open mailbox %s on %s' % (folder.path, this.server))
logger.error('Could not open mailbox %s on %s',
folder.path,
this.server)
connection.select()
continue
result, msgids = this.get_msgids(connection)
if result != 'OK':
logger.error(
'Could not search mailbox %s on %s' % (
folder.path, this.server))
logger.error('Could not search mailbox %s on %s',
folder.path,
this.server)
continue
for msgid in msgids[0].split():
@ -127,8 +128,10 @@ class fetchmail_server(Model):
result, msgdata = connection.fetch(msgid, '(RFC822)')
if result != 'OK':
logger.error(
'Could not fetch %s in %s on %s' % (msgid, folder.path, this.server))
logger.error('Could not fetch %s in %s on %s',
msgid,
folder.path,
this.server)
continue
mail_message = self.pool.get('mail.thread').message_parse(

12
fetchmail_attach_from_folder/model/fetchmail_server_folder.py

@ -31,11 +31,13 @@ class fetchmail_server_folder(Model):
def _get_match_algorithms(self):
def get_all_subclasses(cls):
return cls.__subclasses__() + [subsub
for sub in cls.__subclasses__()
for subsub in get_all_subclasses(sub)]
return dict([(cls.__name__, cls) for cls in get_all_subclasses(
match_algorithm.base.base)])
return (cls.__subclasses__() +
[subsub
for sub in cls.__subclasses__()
for subsub in get_all_subclasses(sub)])
return dict([(cls.__name__, cls)
for cls in get_all_subclasses(
match_algorithm.base.base)])
def _get_match_algorithms_sel(self, cr, uid, context=None):
algorithms = []

127
import_odbc/import_odbc.py

@ -35,25 +35,44 @@ class import_odbc_dbtable(orm.Model):
_columns = {
'name': fields.char('Datasource name', required=True, size=64),
'enabled': fields.boolean('Execution enabled'),
'dbsource_id': fields.many2one('base.external.dbsource', 'Database source', required=True),
'sql_source': fields.text('SQL', required=True, help='Column names must be valid "import_data" columns.'),
'dbsource_id': fields.many2one('base.external.dbsource',
'Database source',
required=True),
'sql_source': fields.text('SQL',
required=True,
help='Column names must be valid '
'"import_data" columns.'),
'model_target': fields.many2one('ir.model', 'Target object'),
'noupdate': fields.boolean('No updates', help="Only create new records; disable updates to existing records."),
'exec_order': fields.integer('Execution order', help="Defines the order to perform the import"),
'last_sync': fields.datetime('Last sync date',
help="Datetime for the last succesfull sync."
"\nLater changes on the source may not be replicated on the destination"),
'start_run': fields.datetime('Time started', readonly=True),
'last_run': fields.datetime('Time ended', readonly=True),
'last_record_count': fields.integer('Last record count', readonly=True),
'last_error_count': fields.integer('Last error count', readonly=True),
'last_warn_count': fields.integer('Last warning count', readonly=True),
'noupdate': fields.boolean('No updates',
help="Only create new records; disable "
"updates to existing records."),
'exec_order': fields.integer('Execution order',
help="Defines the order to perform "
"the import"),
'last_sync': fields.datetime(
'Last sync date',
help="Datetime for the last succesfull sync.\n"
"Later changes on the source may not be replicated "
"on the destination"),
'start_run': fields.datetime('Time started',
readonly=True),
'last_run': fields.datetime('Time ended',
readonly=True),
'last_record_count': fields.integer('Last record count',
readonly=True),
'last_error_count': fields.integer('Last error count',
readonly=True),
'last_warn_count': fields.integer('Last warning count',
readonly=True),
'last_log': fields.text('Last run log', readonly=True),
'ignore_rel_errors': fields.boolean('Ignore relationship errors',
help="On error try to reimport rows ignoring relationships."),
'raise_import_errors': fields.boolean('Raise import errors',
help="Import errors not handled, intended for debugging purposes."
"\nAlso forces debug messages to be written to the server log."),
'ignore_rel_errors': fields.boolean(
'Ignore relationship errors',
help="On error try to reimport rows ignoring relationships."
),
'raise_import_errors': fields.boolean(
'Raise import errors',
help="Import errors not handled, intended for debugging purposes."
"\nAlso forces debug messages to be written to the server log."),
}
_defaults = {
'enabled': True,
@ -64,7 +83,7 @@ class import_odbc_dbtable(orm.Model):
"""Import data and returns error msg or empty string"""
def find_m2o(field_list):
""""Find index of first column with a one2many field"""
"""Find index of first column with a one2many field"""
for i, x in enumerate(field_list):
if len(x) > 3 and x[-3:] == ':id' or x[-3:] == '/id':
return i
@ -72,21 +91,30 @@ class import_odbc_dbtable(orm.Model):
def append_to_log(log, level, obj_id='', msg='', rel_id=''):
if '_id_' in obj_id:
obj_id = '.'.join(obj_id.split('_')[:-2]) + ': ' + obj_id.split('_')[-1]
obj_id = ('.'.join(obj_id.split('_')[:-2])
+ ': '
+ obj_id.split('_')[-1])
if ': .' in msg and not rel_id:
rel_id = msg[msg.find(': .')+3:]
if '_id_' in rel_id:
rel_id = '.'.join(rel_id.split('_')[:-2]) + ': ' + rel_id.split('_')[-1]
rel_id = ('.'.join(rel_id.split('_')[:-2])
+ ': '
+ rel_id.split('_')[-1])
msg = msg[:msg.find(': .')]
log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5), obj_id, rel_id, msg))
log['last_log'].append('%s|%s\t|%s\t|%s' % (level.ljust(5),
obj_id,
rel_id,
msg))
_logger.debug(data)
cols = list(flds) # copy to avoid side effects
errmsg = str()
if table_obj.raise_import_errors:
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
model_obj.import_data(cr, uid, cols, [data],
noupdate=table_obj.noupdate)
else:
try:
model_obj.import_data(cr, uid, cols, [data], noupdate=table_obj.noupdate)
model_obj.import_data(cr, uid, cols, [data],
noupdate=table_obj.noupdate)
except:
errmsg = str(sys.exc_info()[1])
if errmsg and not table_obj.ignore_rel_errors:
@ -98,16 +126,22 @@ class import_odbc_dbtable(orm.Model):
# Warn and retry ignoring many2one fields...
append_to_log(log, 'WARN', data, errmsg)
log['last_warn_count'] += 1
# Try ignoring each many2one (tip: in the SQL sentence select more problematic FKs first)
# Try ignoring each many2one (tip: in the SQL sentence select more
# problematic FKs first)
i = find_m2o(cols)
if i >= 0:
# Try again without the [i] column
del cols[i]
del data[i]
self._import_data(cr, uid, cols, data, model_obj, table_obj, log)
self._import_data(cr, uid, cols,
data,
model_obj,
table_obj,
log)
else:
# Fail
append_to_log(log, 'ERROR', data, 'Removed all m2o keys and still fails.')
append_to_log(log, 'ERROR', data,
'Removed all m2o keys and still fails.')
log['last_error_count'] += 1
return False
return True
@ -123,12 +157,15 @@ class import_odbc_dbtable(orm.Model):
if not obj.enabled:
continue # skip
_logger.setLevel(obj.raise_import_errors and logging.DEBUG or _loglvl)
_logger.debug('Importing %s...' % obj.name)
_logger.setLevel(obj.raise_import_errors and
logging.DEBUG or
_loglvl)
_logger.debug('Importing %s...', obj.name)
# now() microseconds are stripped to avoid problem with SQL smalldate
# now() microseconds are stripped to avoid problem with SQL
# smalldate
# TODO: convert UTC Now to local timezone
# http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime
# http://stackoverflow.com/questions/4770297
model_name = obj.model_target.model
model_obj = self.pool.get(model_name)
xml_prefix = model_name.replace('.', '_') + "_id_"
@ -150,15 +187,19 @@ class import_odbc_dbtable(orm.Model):
obj.sql_source, params, metadata=True)
# Exclude columns titled "None"; add (xml_)"id" column
cidx = [i for i, x in enumerate(res['cols']) if x.upper() != 'NONE']
cols = [x for i, x in enumerate(res['cols']) if x.upper() != 'NONE'] + ['id']
cidx = [i for i, x in enumerate(res['cols'])
if x.upper() != 'NONE']
cols = [x for i, x in enumerate(res['cols'])
if x.upper() != 'NONE'] + ['id']
# Import each row:
for row in res['rows']:
# Build data row; import only columns present in the "cols" list
# Build data row; import only columns present in the "cols"
# list
data = list()
for i in cidx:
# TODO: Handle imported datetimes properly - convert from localtime to UTC!
# TODO: Handle imported datetimes properly - convert from
# localtime to UTC!
v = row[i]
if isinstance(v, str):
v = v.strip()
@ -169,7 +210,8 @@ class import_odbc_dbtable(orm.Model):
log['last_record_count'] += 1
self._import_data(cr, uid, cols, data, model_obj, obj, log)
if log['last_record_count'] % 500 == 0:
_logger.info('...%s rows processed...' % (log['last_record_count']))
_logger.info('...%s rows processed...',
(log['last_record_count']))
# Finished importing all rows
# If no errors, write new sync date
@ -180,12 +222,19 @@ class import_odbc_dbtable(orm.Model):
level = logging.WARN
if log['last_error_count']:
level = logging.ERROR
_logger.log(level, 'Imported %s , %d rows, %d errors, %d warnings.' % (
model_name, log['last_record_count'], log['last_error_count'],
log['last_warn_count']))
_logger.log(level,
'Imported %s , %d rows, %d errors, %d warnings.',
model_name,
log['last_record_count'],
log['last_error_count'],
log['last_warn_count'])
# Write run log, either if the table import is active or inactive
if log['last_log']:
log['last_log'].insert(0, 'LEVEL|== Line == |== Relationship ==|== Message ==')
log['last_log'].insert(0,
'LEVEL|'
'== Line == |'
'== Relationship ==|'
'== Message ==')
log.update({'last_log': '\n'.join(log['last_log'])})
log.update({'last_run': datetime.now().replace(microsecond=0)})
self.write(cr, uid, [obj.id], log)

6
mail_environment/__openerp__.py

@ -59,7 +59,11 @@ password = openerp
'author': 'Camptocamp',
'license': 'AGPL-3',
'website': 'http://openerp.camptocamp.com',
'depends': ['mail', 'fetchmail', 'server_environment', 'server_environment_files', 'crm'],
'depends': ['mail',
'fetchmail',
'server_environment',
'server_environment_files',
'crm'],
'init_xml': [],
'update_xml': ['mail_view.xml'],
'demo_xml': [],

25
mail_environment/env_mail.py

@ -41,7 +41,8 @@ class IrMail(osv.osv):
if serv_config.has_section(global_section_name):
config_vals.update((serv_config.items(global_section_name)))
custom_section_name = '.'.join((global_section_name, mail_server.name))
custom_section_name = '.'.join((global_section_name,
mail_server.name))
if serv_config.has_section(custom_section_name):
config_vals.update(serv_config.items(custom_section_name))
@ -65,7 +66,8 @@ class IrMail(osv.osv):
string='SMTP Port',
type="integer",
multi='outgoing_mail_config',
help="SMTP Port. Usually 465 for SSL, and 25 or 587 for other cases.",
help=("SMTP Port. Usually 465 for SSL, "
"and 25 or 587 for other cases."),
size=5),
'smtp_user': fields.function(
_get_smtp_conf,
@ -89,10 +91,12 @@ class IrMail(osv.osv):
string='smtp_encryption',
type="char",
multi='outgoing_mail_config',
help="Choose the connection encryption scheme:\n"
"- none: SMTP sessions are done in cleartext.\n"
"- starttls: TLS encryption is requested at start of SMTP session (Recommended)\n"
"- ssl: SMTP sessions are encrypted with SSL/TLS through a dedicated port (default: 465)",
help=("Choose the connection encryption scheme:\n"
"- none: SMTP sessions are done in cleartext.\n"
"- starttls: TLS encryption is requested at start "
"of SMTP session (Recommended)\n"
"- ssl: SMTP sessions are encrypted with SSL/TLS "
"through a dedicated port (default: 465)"),
size=64)}
IrMail()
@ -125,7 +129,8 @@ class FetchmailServer(osv.osv):
if serv_config.has_section(global_section_name):
config_vals.update(serv_config.items(global_section_name))
custom_section_name = '.'.join((global_section_name, fetchmail.name))
custom_section_name = '.'.join((global_section_name,
fetchmail.name))
if serv_config.has_section(custom_section_name):
config_vals.update(serv_config.items(custom_section_name))
@ -146,12 +151,14 @@ class FetchmailServer(osv.osv):
operator = args[i][1]
if operator == '=':
for res in results:
if (res['type'] == args[i][2]) and (res['id'] not in result_ids):
if (res['type'] == args[i][2]) and \
(res['id'] not in result_ids):
result_ids.append(res['id'])
elif operator == 'in':
for search_vals in args[i][2]:
for res in results:
if (res['type'] == search_vals) and (res['id'] not in result_ids):
if (res['type'] == search_vals) and \
(res['id'] not in result_ids):
result_ids.append(res['id'])
else:
continue

3
scheduler_error_mailer/__openerp__.py

@ -33,7 +33,8 @@
Scheduler Error Mailer
======================
This module adds the possibility to send an e-mail when a scheduler raises an error.""",
This module adds the possibility to send an e-mail when a scheduler raises an
error.""",
'author': 'Akretion',
'website': 'http://www.akretion.com/',
'depends': ['email_template'],

16
scheduler_error_mailer/ir_cron.py

@ -36,10 +36,16 @@ class ir_cron(orm.Model):
'email_template': fields.many2one(
'email.template',
'Error E-mail Template',
help="Select the email template that will be sent when this scheduler fails."),
help="Select the email template that will be sent "
"when this scheduler fails."),
}
def _handle_callback_exception(self, cr, uid, model_name, method_name, args, job_id, job_exception):
def _handle_callback_exception(self, cr, uid,
model_name,
method_name,
args,
job_id,
job_exception):
res = super(ir_cron, self)._handle_callback_exception(
cr, uid, model_name, method_name, args, job_id, job_exception)
@ -54,7 +60,8 @@ class ir_cron(orm.Model):
'dbname': cr.dbname,
}
logger.debug("Sending scheduler error email with context=%s" % context)
logger.debug("Sending scheduler error email with context=%s",
context)
self.pool['email.template'].send_mail(
cr, uid, my_cron.email_template.id, my_cron.id,
force_send=True, context=context)
@ -67,4 +74,5 @@ class res_users(orm.Model):
def test_scheduler_failure(self, cr, uid, context=None):
"""This function is used to test and debug this module"""
raise orm.except_orm(_('Error :'), _("Task failure with UID = %d." % uid))
raise orm.except_orm(_('Error :'),
_("Task failure with UID = %d.") % uid)

11
super_calendar/__openerp__.py

@ -44,25 +44,26 @@ Super calendar → Configuration → Configurators
and create a new configurator. For instance, if you want to see meetings and
phone calls, you can create the following lines
.. image:: http://planet.domsense.com/wp-content/uploads/2012/04/meetings.png
.. image:: data/meetings.png
:width: 400 px
.. image:: http://planet.domsense.com/wp-content/uploads/2012/04/phone_calls.png
.. image:: data/phone_calls.png
:width: 400 px
Then, you can use the Generate Calendar button or wait for the scheduled
action (Generate Calendar Records) to be run.
When the calendar is generated, you can visualize it by the super calendar main menu.
When the calendar is generated, you can visualize it by the super calendar
main menu.
Here is a sample monthly calendar:
.. image:: http://planet.domsense.com/wp-content/uploads/2012/04/month_calendar.png
.. image:: data/month_calendar.png
:width: 400 px
And here is the weekly one:
.. image:: http://planet.domsense.com/wp-content/uploads/2012/04/week_calendar.png
.. image:: data/week_calendar.png
:width: 400 px
As you can see, several filters are available. A typical usage consists in

BIN
super_calendar/data/meetings.png

After

Width: 962  |  Height: 374  |  Size: 25 KiB

BIN
super_calendar/data/month_calendar.png

After

Width: 1064  |  Height: 536  |  Size: 70 KiB

BIN
super_calendar/data/phone_calls.png

After

Width: 960  |  Height: 322  |  Size: 26 KiB

BIN
super_calendar/data/week_calendar.png

After

Width: 1062  |  Height: 566  |  Size: 78 KiB

158
super_calendar/super_calendar.py

@ -40,7 +40,8 @@ class super_calendar_configurator(orm.Model):
_name = 'super.calendar.configurator'
_columns = {
'name': fields.char('Name', size=64, required=True),
'line_ids': fields.one2many('super.calendar.configurator.line', 'configurator_id', 'Lines'),
'line_ids': fields.one2many('super.calendar.configurator.line',
'configurator_id', 'Lines'),
}
def generate_calendar_records(self, cr, uid, ids, context=None):
@ -48,71 +49,87 @@ class super_calendar_configurator(orm.Model):
super_calendar_pool = self.pool.get('super.calendar')
# removing old records
super_calendar_ids = super_calendar_pool.search(cr, uid, [], context=context)
super_calendar_pool.unlink(cr, uid, super_calendar_ids, context=context)
super_calendar_ids = super_calendar_pool.search(cr, uid, [],
context=context)
super_calendar_pool.unlink(cr, uid,
super_calendar_ids,
context=context)
for configurator in self.browse(cr, uid, configurator_ids, context):
for line in configurator.line_ids:
current_pool = self.pool.get(line.name.model)
current_record_ids = current_pool.search(
cr,
uid,
line.domain and safe_eval(line.domain) or [],
context=context)
for current_record_id in current_record_ids:
current_record = current_pool.browse(cr, uid, current_record_id, context=context)
if (line.user_field_id and
current_record[line.user_field_id.name] and
current_record[line.user_field_id.name]._table_name != 'res.users'):
raise orm.except_orm(
_('Error'),
_("The 'User' field of record %s (%s) does not refer to res.users")
% (current_record[line.description_field_id.name], line.name.model))
if (((line.description_field_id and current_record[line.description_field_id.name]) or
line.description_code) and
current_record[line.date_start_field_id.name]):
duration = False
if (not line.duration_field_id and
line.date_stop_field_id and
current_record[line.date_start_field_id.name] and
current_record[line.date_stop_field_id.name]):
date_start = datetime.strptime(
current_record[line.date_start_field_id.name],
tools.DEFAULT_SERVER_DATETIME_FORMAT
)
date_stop = datetime.strptime(
current_record[line.date_stop_field_id.name],
tools.DEFAULT_SERVER_DATETIME_FORMAT
)
duration = (date_stop - date_start).total_seconds() / 3600
elif line.duration_field_id:
duration = current_record[line.duration_field_id.name]
if line.description_type != 'code':
name = current_record[line.description_field_id.name]
else:
parse_dict = {'o': current_record}
mytemplate = Template(line.description_code)
name = mytemplate.render(**parse_dict)
super_calendar_values = {
'name': name,
'model_description': line.description,
'date_start': current_record[line.date_start_field_id.name],
'duration': duration,
'user_id': (
line.user_field_id and
current_record[line.user_field_id.name] and
current_record[line.user_field_id.name].id or
False
),
'configurator_id': configurator.id,
'res_id': line.name.model+','+str(current_record['id']),
'model_id': line.name.id,
}
super_calendar_pool.create(cr, uid, super_calendar_values, context=context)
values = self._generate_record_from_line(cr, uid,
configurator,
line,
context)
super_calendar_pool.create(cr, uid, values, context=context)
self._logger.info('Calendar generated')
return True
def _generate_record_from_line(self, cr, uid, configurator, line, context):
current_pool = self.pool.get(line.name.model)
current_record_ids = current_pool.search(
cr,
uid,
line.domain and safe_eval(line.domain) or [],
context=context)
for current_record_id in current_record_ids:
record = current_pool.browse(cr, uid,
current_record_id,
context=context)
if line.user_field_id and \
record[line.user_field_id.name] and \
record[line.user_field_id.name]._table_name != 'res.users':
raise orm.except_orm(
_('Error'),
_("The 'User' field of record %s (%s) "
"does not refer to res.users")
% (record[line.description_field_id.name],
line.name.model))
if (((line.description_field_id and
record[line.description_field_id.name]) or
line.description_code) and
record[line.date_start_field_id.name]):
duration = False
if (not line.duration_field_id and
line.date_stop_field_id and
record[line.date_start_field_id.name] and
record[line.date_stop_field_id.name]):
date_start = datetime.strptime(
record[line.date_start_field_id.name],
tools.DEFAULT_SERVER_DATETIME_FORMAT
)
date_stop = datetime.strptime(
record[line.date_stop_field_id.name],
tools.DEFAULT_SERVER_DATETIME_FORMAT
)
duration = (date_stop - date_start).total_seconds() / 3600
elif line.duration_field_id:
duration = record[line.duration_field_id.name]
if line.description_type != 'code':
name = record[line.description_field_id.name]
else:
parse_dict = {'o': record}
mytemplate = Template(line.description_code)
name = mytemplate.render(**parse_dict)
super_calendar_values = {
'name': name,
'model_description': line.description,
'date_start': record[line.date_start_field_id.name],
'duration': duration,
'user_id': (
line.user_field_id and
record[line.user_field_id.name] and
record[line.user_field_id.name].id or
False
),
'configurator_id': configurator.id,
'res_id': line.name.model+','+str(record['id']),
'model_id': line.name.id,
}
return super_calendar_values
class super_calendar_configurator_line(orm.Model):
_name = 'super.calendar.configurator.line'
@ -120,7 +137,8 @@ class super_calendar_configurator_line(orm.Model):
'name': fields.many2one('ir.model', 'Model', required=True),
'description': fields.char('Description', size=128, required=True),
'domain': fields.char('Domain', size=512),
'configurator_id': fields.many2one('super.calendar.configurator', 'Configurator'),
'configurator_id': fields.many2one('super.calendar.configurator',
'Configurator'),
'description_type': fields.selection([
('field', 'Field'),
('code', 'Code'),
@ -130,11 +148,14 @@ class super_calendar_configurator_line(orm.Model):
domain="[('model_id', '=', name),('ttype', '=', 'char')]"),
'description_code': fields.text(
'Description field',
help="Use '${o}' to refer to the involved object. E.g.: '${o.project_id.name}'"
help="Use '${o}' to refer to the involved object. "
"E.g.: '${o.project_id.name}'"
),
'date_start_field_id': fields.many2one(
'ir.model.fields', 'Start date field',
domain="['&','|',('ttype', '=', 'datetime'),('ttype', '=', 'date'),('model_id', '=', name)]",
domain="['&','|',('ttype', '=', 'datetime'),"
"('ttype', '=', 'date'),"
"('model_id', '=', name)]",
required=True),
'date_stop_field_id': fields.many2one(
'ir.model.fields', 'End date field',
@ -153,11 +174,16 @@ class super_calendar(orm.Model):
_name = 'super.calendar'
_columns = {
'name': fields.char('Description', size=512, required=True),
'model_description': fields.char('Model Description', size=128, required=True),
'model_description': fields.char('Model Description',
size=128,
required=True),
'date_start': fields.datetime('Start date', required=True),
'duration': fields.float('Duration'),
'user_id': fields.many2one('res.users', 'User'),
'configurator_id': fields.many2one('super.calendar.configurator', 'Configurator'),
'res_id': fields.reference('Resource', selection=_models_get, size=128),
'configurator_id': fields.many2one('super.calendar.configurator',
'Configurator'),
'res_id': fields.reference('Resource',
selection=_models_get,
size=128),
'model_id': fields.many2one('ir.model', 'Model'),
}

10
users_ldap_groups/__openerp__.py

@ -29,13 +29,13 @@ Adds user accounts to groups based on rules defined by the administrator.
Usage:
Define mappings in Settings->Companies->[your company]->tab configuration->[your
ldap server].
Define mappings in Settings->Companies->[your company]->tab
configuration->[your ldap server].
Decide whether you want only groups mapped from ldap (Only ldap groups=y) or a
mix of manually set groups and ldap groups (Only ldap groups=n). Setting this to
'no' will result in users never losing privileges when you remove them from a
ldap group, so that's a potential security issue. It is still the default to
mix of manually set groups and ldap groups (Only ldap groups=n). Setting this
to 'no' will result in users never losing privileges when you remove them from
a ldap group, so that's a potential security issue. It is still the default to
prevent losing group information by accident.
For active directory, use LDAP attribute 'memberOf' and operator 'contains'.

47
users_ldap_groups/users_ldap_groups.py

@ -41,7 +41,8 @@ class CompanyLDAPGroupMapping(orm.Model):
return tuple(operators)
_columns = {
'ldap_id': fields.many2one('res.company.ldap', 'LDAP server', required=True),
'ldap_id': fields.many2one('res.company.ldap', 'LDAP server',
required=True),
'ldap_attribute': fields.char(
'LDAP attribute', size=64,
help='The LDAP attribute to check.\n'
@ -72,17 +73,23 @@ class CompanyLDAP(orm.Model):
'only_ldap_groups': fields.boolean(
'Only ldap groups',
help='If this is checked, manual changes to group membership are '
'undone on every login (so OpenERP groups are always synchronous '
'with LDAP groups). If not, manually added groups are preserved.')
'undone on every login (so OpenERP groups are always '
'synchronous with LDAP groups). If not, manually added '
'groups are preserved.')
}
_default = {
'only_ldap_groups': False,
}
def get_or_create_user(self, cr, uid, conf, login, ldap_entry, context=None):
user_id = super(CompanyLDAP, self).get_or_create_user(cr, uid, conf, login,
ldap_entry, context)
def get_or_create_user(self, cr, uid,
conf,
login,
ldap_entry,
context=None):
_super = super(CompanyLDAP, self)
user_id = _super.get_or_create_user(cr, uid, conf, login,
ldap_entry, context)
if not user_id:
return user_id
logger = logging.getLogger('users_ldap_groups')
@ -90,16 +97,24 @@ class CompanyLDAP(orm.Model):
userobj = self.pool.get('res.users')
conf_all = self.read(cr, uid, conf['id'], ['only_ldap_groups'])
if(conf_all['only_ldap_groups']):
logger.debug('deleting all groups from user %d' % user_id)
userobj.write(cr, uid, [user_id], {'groups_id': [(5, )]}, context=context)
for mapping in mappingobj.read(cr, uid, mappingobj.search(
cr, uid, [('ldap_id', '=', conf['id'])]), []):
operator = getattr(users_ldap_groups_operators, mapping['operator'])()
logger.debug('checking mapping %s' % mapping)
if operator.check_value(ldap_entry, mapping['ldap_attribute'],
mapping['value'], conf, self, logger):
logger.debug('adding user %d to group %s' %
logger.debug('deleting all groups from user %d', user_id)
userobj.write(cr, uid,
[user_id],
{'groups_id': [(5, )]},
context=context)
mapping_ids = mappingobj.search(cr, uid,
[('ldap_id', '=', conf['id'])])
for mapping in mappingobj.read(cr, uid, mapping_ids, []):
operator = getattr(users_ldap_groups_operators,
mapping['operator'])()
logger.debug('checking mapping %s', mapping)
if operator.check_value(ldap_entry,
mapping['ldap_attribute'],
mapping['value'],
conf,
self,
logger):
logger.debug('adding user %d to group %s',
(user_id, mapping['group'][1]))
userobj.write(cr, uid, [user_id],
{'groups_id': [(4, mapping['group'][0])]},

31
users_ldap_groups/users_ldap_groups_operators.py

@ -26,22 +26,41 @@ class LDAPOperator:
class contains(LDAPOperator):
def check_value(self, ldap_entry, attribute, value, ldap_config, company, logger):
return (attribute in ldap_entry[1]) and (value in ldap_entry[1][attribute])
def check_value(self,
ldap_entry,
attribute, value,
ldap_config,
company,
logger):
return (attribute in ldap_entry[1] and
value in ldap_entry[1][attribute])
class equals(LDAPOperator):
def check_value(self, ldap_entry, attribute, value, ldap_config, company, logger):
return attribute in ldap_entry[1] and unicode(value) == unicode(ldap_entry[1][attribute])
def check_value(self,
ldap_entry,
attribute, value,
ldap_config,
company,
logger):
return (attribute in ldap_entry[1] and
unicode(value) == unicode(ldap_entry[1][attribute]))
class query(LDAPOperator):
def check_value(self, ldap_entry, attribute, value, ldap_config, company, logger):
def check_value(self,
ldap_entry,
attribute,
value,
ldap_config,
company,
logger):
query_string = Template(value).safe_substitute(dict(
[(attr, ldap_entry[1][attribute][0]) for attr in ldap_entry[1]]
)
)
logger.debug('evaluating query group mapping, filter: %s' % query_string)
logger.debug('evaluating query group mapping, filter: %s',
query_string)
results = company.query(ldap_config, query_string)
logger.debug(results)
return bool(results)

9
users_ldap_mail/__openerp__.py

@ -25,11 +25,12 @@
'depends': ["auth_ldap"],
'author': "Daniel Reis (https://launchpad.com/~dreis-pt)",
'description': """\
Allows to define the LDAP attributes to use to retrieve user name and e-mail address.
Allows to define the LDAP attributes to use to retrieve user name and e-mail
address.
The default attribute used for the name is "cn".
For Active Directory, you might prefer to use "displayName" instead.
AD also supports the "mail" attribute, so it can be mapped into OpenERP.
The default attribute used for the name is `cn`. For Active Directory, you
might prefer to use `displayName` instead. AD also supports the `mail`
attribute, so it can be mapped into OpenERP.
""",
'category': "Tools",
'data': [

10
users_ldap_mail/users_ldap_model.py

@ -60,8 +60,9 @@ class CompanyLDAP(orm.Model):
return cr.dictfetchall()
def map_ldap_attributes(self, cr, uid, conf, login, ldap_entry):
values = super(CompanyLDAP, self).map_ldap_attributes(cr, uid, conf,
login, ldap_entry)
_super = super(CompanyLDAP, self)
values = _super.map_ldap_attributes(cr, uid, conf,
login, ldap_entry)
mapping = [
('name', 'name_attribute'),
('email', 'mail_attribute'),
@ -71,6 +72,7 @@ class CompanyLDAP(orm.Model):
if conf[conf_name]:
values[value_key] = ldap_entry[1][conf[conf_name]][0]
except KeyError:
_log.warning('No LDAP attribute "%s" found for login "%s"' % (
conf.get(conf_name), values.get('login')))
_log.warning('No LDAP attribute "%s" found for login "%s"',
conf.get(conf_name),
values.get('login'))
return values

7
users_ldap_populate/__openerp__.py

@ -24,8 +24,8 @@
"author": "Therp BV",
"category": 'Tools',
"description": """
This module allows to prepopulate the user database with all entries in the LDAP
database.
This module allows to prepopulate the user database with all entries in the
LDAP database.
In order to schedule the population of the user database on a regular basis,
create a new scheduled action with the following properties:
@ -34,7 +34,8 @@ create a new scheduled action with the following properties:
- Function: action_populate
- Arguments: [res.company.ldap.id]
Substitute res.company.ldap.id with the actual id of the res.company.ldap object you want to query.
Substitute res.company.ldap.id with the actual id of the res.company.ldap
object you want to query.
""",
"depends": [

Loading…
Cancel
Save