Browse Source

Merge pull request #148 from osiell/8.0-auditlog_read

[IMP] Module 'auditlog' - Performing logs on 'read' operations
pull/176/head
Yannick Vaucher 10 years ago
parent
commit
845980403b
  1. 1
      auditlog/README.rst
  2. 151
      auditlog/models/rule.py
  3. 58
      auditlog/tests/test_auditlog.py
  4. 2
      auditlog/views/auditlog_view.xml

1
auditlog/README.rst

@ -21,7 +21,6 @@ For further information, please visit:
Known issues / Roadmap
======================
* log ``read`` operations
* log only operations triggered by some users (currently it logs all users)
* group logs by HTTP query (thanks to werzeug)?
* group HTTP query by user session?

151
auditlog/models/rule.py

@ -19,7 +19,7 @@
#
##############################################################################
from openerp import models, fields, api, modules, _, SUPERUSER_ID
from openerp import models, fields, api, modules, _, SUPERUSER_ID, sql_db
FIELDS_BLACKLIST = [
'id', 'create_uid', 'create_date', 'write_uid', 'write_date',
@ -198,15 +198,22 @@ class auditlog_rule(models.Model):
modules.registry.RegistryManager.signal_registry_change(cr.dbname)
return True
@api.multi
def unlink(self):
"""Unsubscribe rules before removing them."""
self.unsubscribe()
return super(auditlog_rule, self).unlink()
def _make_create(self):
"""Instanciate a create method that log its calls."""
@api.model
def create(self, vals, **kwargs):
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
new_record = create.origin(self, vals, **kwargs)
new_values = dict(
(d['id'], d) for d in new_record.sudo().read(
list(self._columns)))
list(self._fields)))
rule_model.sudo().create_logs(
self.env.uid, self._name, new_record.ids,
'create', None, new_values)
@ -215,41 +222,44 @@ class auditlog_rule(models.Model):
def _make_read(self):
"""Instanciate a read method that log its calls."""
# FIXME: read() seems a bit tricky, improve to handle old/new api
# @api.v7
# def read(self, cr, user, ids, fields=None, context=None,
# load='_classic_read', **kwargs):
# print "LOG READ", fields, load, kwargs
# # avoid loops
# if self.env.context.get('auditlog_method_intercepted'):
# return read.origin(
# self, cr, user, ids, fields, context, load, **kwargs)
# # call original method with a modified context
# context = dict(
# self.env.context, auditlog_method_intercepted=True)
# result = read.origin(
# self.with_context(context),
# cr, user, ids, fields, context, load, **kwargs)
# print "RESULT", result
# return result
# @api.v8
# def read(self, fields=None, load='_classic_read', **kwargs):
# print "LOG READ", fields, load, kwargs
# # avoid loops
# if self.env.context.get('auditlog_method_intercepted'):
# return read.origin(self, fields, load, **kwargs)
# # call original method with a modified context
# context = dict(
# self.env.context, auditlog_method_intercepted=True)
# result = read.origin(
# self.with_context(context), fields, load, **kwargs)
# print "RESULT", result
# return result
def read(self, *args, **kwargs):
result = read.origin(self, *args, **kwargs)
# Sometimes the result is not a list but a dictionary
# Also, we can not modify the current result as it will break calls
result2 = result
if not isinstance(result2, list):
result2 = [result]
read_values = dict((d['id'], d) for d in result2)
# Old API
if args and isinstance(args[0], sql_db.Cursor):
cr, uid, ids = args[0], args[1], args[2]
if isinstance(ids, (int, long)):
ids = [ids]
# If the call came from auditlog itself, skip logging:
# avoid logs on `read` produced by auditlog during internal
# processing: read data of relevant records, 'ir.model',
# 'ir.model.fields'... (no interest in logging such operations)
if kwargs.get('context', {}).get('auditlog_disabled'):
return result
env = api.Environment(cr, uid, {'auditlog_disabled': True})
rule_model = env['auditlog.rule']
rule_model.sudo().create_logs(
env.uid, self._name, ids,
'read', read_values)
# New API
else:
# If the call came from auditlog itself, skip logging:
# avoid logs on `read` produced by auditlog during internal
# processing: read data of relevant records, 'ir.model',
# 'ir.model.fields'... (no interest in logging such operations)
if self.env.context.get('auditlog_disabled'):
return result
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
rule_model.sudo().create_logs(
self.env.uid, self._name, self.ids,
'read', read_values)
return result
return read
@ -257,12 +267,13 @@ class auditlog_rule(models.Model):
"""Instanciate a write method that log its calls."""
@api.multi
def write(self, vals, **kwargs):
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
old_values = dict(
(d['id'], d) for d in self.sudo().read(list(self._columns)))
(d['id'], d) for d in self.sudo().read(list(self._fields)))
result = write.origin(self, vals, **kwargs)
new_values = dict(
(d['id'], d) for d in self.sudo().read(list(self._columns)))
(d['id'], d) for d in self.sudo().read(list(self._fields)))
rule_model.sudo().create_logs(
self.env.uid, self._name, self.ids,
'write', old_values, new_values)
@ -273,9 +284,10 @@ class auditlog_rule(models.Model):
"""Instanciate an unlink method that log its calls."""
@api.multi
def unlink(self, **kwargs):
self = self.with_context(auditlog_disabled=True)
rule_model = self.env['auditlog.rule']
old_values = dict(
(d['id'], d) for d in self.sudo().read(list(self._columns)))
(d['id'], d) for d in self.sudo().read(list(self._fields)))
rule_model.sudo().create_logs(
self.env.uid, self._name, self.ids, 'unlink', old_values)
return unlink.origin(self, **kwargs)
@ -294,9 +306,10 @@ class auditlog_rule(models.Model):
log_model = self.env['auditlog.log']
for res_id in res_ids:
model_model = self.env[res_model]
res_name = model_model.browse(res_id).name_get()
name = model_model.browse(res_id).name_get()
res_name = name and name[0] and name[0][1]
vals = {
'name': res_name and res_name[0] and res_name[0][1] or False,
'name': res_name,
'model_id': self.pool._auditlog_model_cache[res_model],
'res_id': res_id,
'method': method,
@ -307,23 +320,67 @@ class auditlog_rule(models.Model):
diff = DictDiffer(
new_values.get(res_id, EMPTY_DICT),
old_values.get(res_id, EMPTY_DICT))
self._create_log_line_on_write(
log, diff.changed(), old_values, new_values)
self._create_log_line_on_create(log, diff.added(), new_values)
if method is 'create':
self._create_log_line_on_create(log, diff.added(), new_values)
elif method is 'read':
self._create_log_line_on_read(
log, old_values.get(res_id, EMPTY_DICT).keys(), old_values)
elif method is 'write':
self._create_log_line_on_write(
log, diff.changed(), old_values, new_values)
def _get_field(self, model, field_name):
cache = self.pool._auditlog_field_cache
if field_name not in cache.get(model.model, {}):
cache.setdefault(model.model, {})
# We use 'search()' then 'read()' instead of the 'search_read()'
# to take advantage of the 'classic_write' loading
# - we use 'search()' then 'read()' instead of the 'search_read()'
# to take advantage of the 'classic_write' loading
# - search the field in the current model and those it inherits
field_model = self.env['ir.model.fields']
all_model_ids = [model.id]
all_model_ids.extend(model.inherited_model_ids.ids)
field = field_model.search(
[('model_id', '=', model.id), ('name', '=', field_name)])
field_data = field.read(load='_classic_write')[0]
cache[model.model][field_name] = field_data
[('model_id', 'in', all_model_ids), ('name', '=', field_name)])
# The field can be a dummy one, like 'in_group_X' on 'res.users'
# As such we can't log it (field_id is required to create a log)
if not field:
cache[model.model][field_name] = False
else:
field_data = field.read(load='_classic_write')[0]
cache[model.model][field_name] = field_data
return cache[model.model][field_name]
def _create_log_line_on_read(
self, log, fields_list, read_values):
"""Log field filled on a 'read' operation."""
log_line_model = self.env['auditlog.log.line']
for field_name in fields_list:
if field_name in FIELDS_BLACKLIST:
continue
field = self._get_field(log.model_id, field_name)
if field:
log_vals = self._prepare_log_line_vals_on_read(
log, field, read_values)
log_line_model.create(log_vals)
def _prepare_log_line_vals_on_read(self, log, field, read_values):
"""Prepare the dictionary of values used to create a log line on a
'read' operation.
"""
vals = {
'field_id': field['id'],
'log_id': log.id,
'old_value': read_values[log.res_id][field['name']],
'old_value_text': read_values[log.res_id][field['name']],
'new_value': False,
'new_value_text': False,
}
if field['relation'] and '2many' in field['ttype']:
old_value_text = self.env[field['relation']].browse(
vals['old_value']).name_get()
vals['old_value_text'] = old_value_text
return vals
def _create_log_line_on_write(
self, log, fields_list, old_values, new_values):
"""Log field updated on a 'write' operation."""

58
auditlog/tests/test_auditlog.py

@ -22,44 +22,58 @@ from openerp.tests.common import TransactionCase
class TestAuditlog(TransactionCase):
def test_LogCreation(self):
"""First test, caching some data."""
auditlog_log = self.env['auditlog.log']
groups_model_id = self.env.ref('base.model_res_groups').id
self.env['auditlog.rule'].create({
def setUp(self):
super(TestAuditlog, self).setUp()
self.groups_model_id = self.env.ref('base.model_res_groups').id
self.groups_rule = self.env['auditlog.rule'].create({
'name': 'testrule for groups',
'model_id': groups_model_id,
'model_id': self.groups_model_id,
'log_read': True,
'log_create': True,
'log_write': True,
'log_unlink': True,
'state': 'subscribed',
})
def tearDown(self):
self.groups_rule.unlink()
super(TestAuditlog, self).tearDown()
def test_LogCreation(self):
"""First test, caching some data."""
auditlog_log = self.env['auditlog.log']
group = self.env['res.groups'].create({
'name': 'testgroup1',
})
self.assertTrue(auditlog_log.search([
('model_id', '=', groups_model_id),
('model_id', '=', self.groups_model_id),
('method', '=', 'create'),
('res_id', '=', group.id),
]))
]).ensure_one())
group.write({'name': 'Testgroup1'})
self.assertTrue(auditlog_log.search([
('model_id', '=', groups_model_id),
('model_id', '=', self.groups_model_id),
('method', '=', 'write'),
('res_id', '=', group.id),
]))
]).ensure_one())
group.unlink()
self.assertTrue(auditlog_log.search([
('model_id', '=', groups_model_id),
('model_id', '=', self.groups_model_id),
('method', '=', 'unlink'),
('res_id', '=', group.id),
]))
]).ensure_one())
def test_LogCreation2(self):
"""Second test, using cached data of the first one."""
self.env['res.groups'].create({
auditlog_log = self.env['auditlog.log']
testgroup2 = self.env['res.groups'].create({
'name': 'testgroup2',
})
self.assertTrue(auditlog_log.search([
('model_id', '=', self.groups_model_id),
('method', '=', 'create'),
('res_id', '=', testgroup2.id),
]).ensure_one())
def test_LogCreation3(self):
"""Third test, two groups, the latter being the parent of the former.
@ -67,7 +81,8 @@ class TestAuditlog(TransactionCase):
of a 'write' log with a deleted resource (so with no text
representation).
"""
testgroup3 = self.env['res.groups'].create({
auditlog_log = self.env['auditlog.log']
testgroup3 = testgroup3 = self.env['res.groups'].create({
'name': 'testgroup3',
})
testgroup4 = self.env['res.groups'].create({
@ -75,3 +90,18 @@ class TestAuditlog(TransactionCase):
'implied_ids': [(4, testgroup3.id)],
})
testgroup4.write({'implied_ids': [(2, testgroup3.id)]})
self.assertTrue(auditlog_log.search([
('model_id', '=', self.groups_model_id),
('method', '=', 'create'),
('res_id', '=', testgroup3.id),
]).ensure_one())
self.assertTrue(auditlog_log.search([
('model_id', '=', self.groups_model_id),
('method', '=', 'create'),
('res_id', '=', testgroup4.id),
]).ensure_one())
self.assertTrue(auditlog_log.search([
('model_id', '=', self.groups_model_id),
('method', '=', 'write'),
('res_id', '=', testgroup4.id),
]).ensure_one())

2
auditlog/views/auditlog_view.xml

@ -29,7 +29,7 @@
<field name="action_id" readonly="1" groups="base.group_no_one"/>
</group>
<group colspan="1">
<field name="log_read" invisible="1"/>
<field name="log_read"/>
<field name="log_write"/>
<field name="log_unlink"/>
<field name="log_create"/>

Loading…
Cancel
Save