You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

487 lines
18 KiB

6 years ago
  1. import base64
  2. import logging
  3. import os
  4. import pstats
  5. import re
  6. import subprocess
  7. import sys
  8. from contextlib import contextmanager
  9. from cProfile import Profile
  10. import lxml.html
  11. from psycopg2 import OperationalError, ProgrammingError
  12. from odoo import _, api, exceptions, fields, models, sql_db, tools
  13. if sys.version_info[0] >= 3:
  14. from io import StringIO as IO
  15. else:
  16. from io import BytesIO as IO
  17. DATETIME_FORMAT_FILE = "%Y%m%d_%H%M%S"
  18. CPROFILE_EMPTY_CHARS = b"{0"
  19. PGOPTIONS = {
  20. 'log_min_duration_statement': '0',
  21. 'client_min_messages': 'notice',
  22. 'log_min_messages': 'warning',
  23. 'log_min_error_statement': 'error',
  24. 'log_duration': 'off',
  25. 'log_error_verbosity': 'verbose',
  26. 'log_lock_waits': 'on',
  27. 'log_statement': 'none',
  28. 'log_temp_files': '0',
  29. }
  30. PGOPTIONS_ENV = ' '.join(["-c %s=%s" % (param, value)
  31. for param, value in PGOPTIONS.items()])
  32. PY_STATS_FIELDS = [
  33. 'ncalls',
  34. 'tottime', 'tt_percall',
  35. 'cumtime', 'ct_percall',
  36. 'file', 'lineno', 'method',
  37. ]
  38. LINE_STATS_RE = re.compile(
  39. r'(?P<%s>\d+/?\d+|\d+)\s+(?P<%s>\d+\.?\d+)\s+(?P<%s>\d+\.?\d+)\s+'
  40. r'(?P<%s>\d+\.?\d+)\s+(?P<%s>\d+\.?\d+)\s+(?P<%s>.*):(?P<%s>\d+)'
  41. r'\((?P<%s>.*)\)' % tuple(PY_STATS_FIELDS))
  42. _logger = logging.getLogger(__name__)
  43. class ProfilerProfilePythonLine(models.Model):
  44. _name = 'profiler.profile.python.line'
  45. _description = 'Profiler Python Line to save cProfiling results'
  46. _rec_name = 'cprof_fname'
  47. _order = 'cprof_cumtime DESC'
  48. profile_id = fields.Many2one('profiler.profile', required=True,
  49. ondelete='cascade')
  50. cprof_tottime = fields.Float("Total time")
  51. cprof_ncalls = fields.Float("Calls")
  52. cprof_nrcalls = fields.Float("Recursive Calls")
  53. cprof_ttpercall = fields.Float("Time per call")
  54. cprof_cumtime = fields.Float("Cumulative time")
  55. cprof_ctpercall = fields.Float("CT per call")
  56. cprof_fname = fields.Char("Filename:lineno(method)")
  57. class ProfilerProfile(models.Model):
  58. _name = 'profiler.profile'
  59. _description = 'Profiler Profile'
  60. @api.model
  61. def _find_loggers_path(self):
  62. try:
  63. self.env.cr.execute("SHOW log_directory")
  64. except ProgrammingError:
  65. return
  66. log_directory = self.env.cr.fetchone()[0]
  67. self.env.cr.execute("SHOW log_filename")
  68. log_filename = self.env.cr.fetchone()[0]
  69. log_path = os.path.join(log_directory, log_filename)
  70. if not os.path.isabs(log_path):
  71. # It is relative path then join data_directory
  72. self.env.cr.execute("SHOW data_directory")
  73. data_dir = self.env.cr.fetchone()[0]
  74. log_path = os.path.join(data_dir, log_path)
  75. return log_path
  76. name = fields.Char(required=True)
  77. enable_python = fields.Boolean(default=True)
  78. enable_postgresql = fields.Boolean(
  79. default=False,
  80. help="It requires postgresql server logs seudo-enabled")
  81. use_py_index = fields.Boolean(
  82. name="Get cProfiling report", default=False,
  83. help="Index human-readable cProfile attachment."
  84. "\nTo access this report, you must open the cprofile attachment view "
  85. "using debug mode.\nWarning: Uses more resources.")
  86. date_started = fields.Char(readonly=True)
  87. date_finished = fields.Char(readonly=True)
  88. state = fields.Selection([
  89. ('enabled', 'Enabled'),
  90. ('disabled', 'Disabled'),
  91. ], default='disabled', readonly=True, required=True)
  92. description = fields.Text(readonly=True)
  93. attachment_count = fields.Integer(compute="_compute_attachment_count")
  94. pg_log_path = fields.Char(help="Getting the path to the logger",
  95. default=_find_loggers_path)
  96. pg_remote = fields.Char()
  97. pg_stats_slowest_html = fields.Html(
  98. "PostgreSQL Stats - Slowest", readonly=True)
  99. pg_stats_time_consuming_html = fields.Html(
  100. "PostgreSQL Stats - Time Consuming", readonly=True)
  101. pg_stats_most_frequent_html = fields.Html(
  102. "PostgreSQL Stats - Most Frequent", readonly=True)
  103. py_stats_lines = fields.One2many(
  104. "profiler.profile.python.line", "profile_id", "PY Stats Lines")
  105. @api.multi
  106. def _compute_attachment_count(self):
  107. for record in self:
  108. self.attachment_count = self.env['ir.attachment'].search_count([
  109. ('res_model', '=', self._name), ('res_id', '=', record.id)])
  110. @api.onchange('enable_postgresql')
  111. def onchange_enable_postgresql(self):
  112. if not self.enable_postgresql:
  113. return
  114. try:
  115. self.env.cr.execute("SHOW config_file")
  116. except ProgrammingError:
  117. pg_config_file = None
  118. else:
  119. pg_config_file = self.env.cr.fetchone()[0]
  120. db_host = tools.config.get('db_host')
  121. if db_host == 'localhost' or db_host == '127.0.0.1':
  122. db_host = False
  123. if db_host and pg_config_file:
  124. pg_config_file = 'postgres@%s:%s' % (db_host, pg_config_file)
  125. self.pg_remote = db_host
  126. self.description = (
  127. "You need seudo-enable logs from your "
  128. "postgresql-server configuration file.\n\t- %s\n"
  129. "or your can looking for the service using: "
  130. "'ps aux | grep postgres'\n\n"
  131. ) % pg_config_file
  132. self.description += """Adds the following parameters:
  133. # Pre-enable logs
  134. logging_collector=on
  135. log_destination='stderr'
  136. log_directory='/var/log/postgresql'
  137. log_filename='postgresql.log'
  138. log_rotation_age=0
  139. log_checkpoints=on
  140. log_hostname=on
  141. log_line_prefix='%t [%p]: [%l-1] db=%d,user=%u '
  142. log_connections=on
  143. log_disconnections=on
  144. lc_messages='C'
  145. log_timezone='UTC'
  146. Reload configuration using the following query:
  147. - select pg_reload_conf()
  148. Or restart the postgresql server service.
  149. FYI This module will enable the following parameter from the client
  150. It's not needed added them to configuration file if database user is
  151. superuser or use PGOPTIONS environment variable in the terminal
  152. that you start your odoo server.
  153. If you don't add these parameters or PGOPTIONS this module will try do it.
  154. # Enable logs from postgresql.conf
  155. log_min_duration_statement=0
  156. client_min_messages=notice
  157. log_min_messages=warning
  158. log_min_error_statement=error
  159. log_duration=off
  160. log_error_verbosity=verbose
  161. log_lock_waits=on
  162. log_statement='none'
  163. log_temp_files=0
  164. # Or enable logs from PGOPTIONS environment variable before to start odoo
  165. # server
  166. export PGOPTIONS="-c log_min_duration_statement=0 \\
  167. -c client_min_messages=notice -c log_min_messages=warning \\
  168. -c log_min_error_statement=error -c log_connections=on \\
  169. -c log_disconnections=on -c log_duration=off -c log_error_verbosity=verbose \\
  170. -c log_lock_waits=on -c log_statement='none' -c log_temp_files=0"
  171. ~/odoo_path/odoo-bin ...
  172. """
  173. profile = Profile()
  174. enabled = None
  175. pglogs_enabled = None
  176. # True to activate it False to inactivate None to do nothing
  177. activate_deactivate_pglogs = None
  178. # Params dict with values before to change it.
  179. psql_params_original = {}
  180. @api.model
  181. def now_utc(self):
  182. self.env.cr.execute("SHOW log_timezone")
  183. zone = self.env.cr.fetchone()[0]
  184. self.env.cr.execute("SELECT to_char(current_timestamp AT TIME "
  185. "ZONE %s, 'YYYY-MM-DD HH24:MI:SS')", (zone,))
  186. now = self.env.cr.fetchall()[0][0]
  187. # now = fields.Datetime.to_string(
  188. # fields.Datetime.context_timestamp(self, datetime.now()))
  189. return now
  190. @api.multi
  191. def enable(self):
  192. self.ensure_one()
  193. if tools.config.get('workers'):
  194. raise exceptions.UserError(
  195. _("Start the odoo server using the parameter '--workers=0'"))
  196. _logger.info("Enabling profiler")
  197. self.write(dict(
  198. date_started=self.now_utc(),
  199. state='enabled'
  200. ))
  201. ProfilerProfile.enabled = self.enable_python
  202. self._reset_postgresql()
  203. @api.multi
  204. def _reset_postgresql(self):
  205. if not self.enable_postgresql:
  206. return
  207. if ProfilerProfile.pglogs_enabled:
  208. _logger.info("Using postgresql.conf or PGOPTIONS predefined.")
  209. return
  210. os.environ['PGOPTIONS'] = (
  211. PGOPTIONS_ENV if self.state == 'enabled' else '')
  212. self._reset_connection(self.state == 'enabled')
  213. def _reset_connection(self, enable):
  214. for connection in sql_db._Pool._connections:
  215. with connection[0].cursor() as pool_cr:
  216. params = (PGOPTIONS if enable
  217. else ProfilerProfile.psql_params_original)
  218. for param, value in params.items():
  219. try:
  220. pool_cr.execute('SET %s TO %s' % (param, value))
  221. except (OperationalError, ProgrammingError) as oe:
  222. pool_cr.connection.rollback()
  223. raise exceptions.UserError(
  224. _("It's not possible change parameter.\n%s\n"
  225. "Please, disable postgresql or re-enable it "
  226. "in order to read the instructions") % str(oe))
  227. ProfilerProfile.activate_deactivate_pglogs = enable
  228. def get_stats_string(self, cprofile_path):
  229. pstats_stream = IO()
  230. pstats_obj = pstats.Stats(cprofile_path, stream=pstats_stream)
  231. pstats_obj.sort_stats('cumulative')
  232. pstats_obj.print_stats()
  233. pstats_stream.seek(0)
  234. stats_string = pstats_stream.read()
  235. pstats_stream = None
  236. return stats_string
  237. @api.multi
  238. def dump_postgresql_logs(self, indexed=None):
  239. self.ensure_one()
  240. self.description = ''
  241. pgbadger_cmd = self._get_pgbadger_command()
  242. if pgbadger_cmd is None:
  243. return
  244. pgbadger_cmd_str = subprocess.list2cmdline(pgbadger_cmd)
  245. self.description += (
  246. '\nRunning the command: %s') % pgbadger_cmd_str
  247. result = tools.exec_command_pipe(*pgbadger_cmd)
  248. datas = result[1].read()
  249. if not datas:
  250. self.description += "\nPgbadger output is empty!"
  251. return
  252. fname = self._get_attachment_name("pg_stats", ".html")
  253. self.env['ir.attachment'].create({
  254. 'name': fname,
  255. 'res_id': self.id,
  256. 'res_model': self._name,
  257. 'datas': base64.b64encode(datas),
  258. 'datas_fname': fname,
  259. 'description': 'pgbadger html output',
  260. })
  261. xpaths = [
  262. '//*[@id="slowest-individual-queries"]',
  263. '//*[@id="time-consuming-queries"]',
  264. '//*[@id="most-frequent-queries"]',
  265. ]
  266. # pylint: disable=unbalanced-tuple-unpacking
  267. self.pg_stats_slowest_html, self.pg_stats_time_consuming_html, \
  268. self.pg_stats_most_frequent_html = self._compute_pgbadger_html(
  269. datas, xpaths)
  270. @staticmethod
  271. def _compute_pgbadger_html(html_doc, xpaths):
  272. html = lxml.html.document_fromstring(html_doc)
  273. result = []
  274. for this_xpath in xpaths:
  275. this_result = html.xpath(this_xpath)
  276. result.append(
  277. tools.html_sanitize(lxml.html.tostring(this_result[0])))
  278. return result
  279. @api.multi
  280. def _get_pgbadger_command(self):
  281. self.ensure_one()
  282. # TODO: Catch early the following errors.
  283. try:
  284. pgbadger_bin = tools.find_in_path('pgbadger')
  285. except IOError:
  286. self.description += (
  287. "\nInstall 'apt-get install pgbadger'")
  288. return
  289. try:
  290. if not self.pg_log_path:
  291. raise IOError
  292. with open(self.pg_log_path, "r"):
  293. pass
  294. except IOError:
  295. self.description += (
  296. "\nCheck if exists and has permission to read the log file."
  297. "\nMaybe running: chmod 604 '%s'"
  298. ) % self.pg_log_path
  299. return
  300. pgbadger_cmd = [
  301. pgbadger_bin, '-f', 'stderr', '--sample', '15',
  302. '-o', '-', '-x', 'html', '--quiet',
  303. '-T', self.name,
  304. '-d', self.env.cr.dbname,
  305. '-b', self.date_started,
  306. '-e', self.date_finished,
  307. self.pg_log_path,
  308. ]
  309. return pgbadger_cmd
  310. def _get_attachment_name(self, prefix, suffix):
  311. started = fields.Datetime.from_string(
  312. self.date_started).strftime(DATETIME_FORMAT_FILE)
  313. finished = fields.Datetime.from_string(
  314. self.date_finished).strftime(DATETIME_FORMAT_FILE)
  315. fname = '%s_%d_%s_to_%s%s' % (
  316. prefix, self.id, started, finished, suffix)
  317. return fname
  318. @api.model
  319. def dump_stats(self):
  320. attachment = None
  321. with tools.osutil.tempdir() as dump_dir:
  322. cprofile_fname = self._get_attachment_name("py_stats", ".cprofile")
  323. cprofile_path = os.path.join(dump_dir, cprofile_fname)
  324. _logger.info("Dumping cProfile '%s'", cprofile_path)
  325. ProfilerProfile.profile.dump_stats(cprofile_path)
  326. with open(cprofile_path, "rb") as f_cprofile:
  327. datas = f_cprofile.read()
  328. if datas and datas != CPROFILE_EMPTY_CHARS:
  329. attachment = self.env['ir.attachment'].create({
  330. 'name': cprofile_fname,
  331. 'res_id': self.id,
  332. 'res_model': self._name,
  333. 'datas': base64.b64encode(datas),
  334. 'datas_fname': cprofile_fname,
  335. 'description': 'cProfile dump stats',
  336. })
  337. _logger.info("A datas was saved, here %s", attachment.name)
  338. try:
  339. if self.use_py_index:
  340. py_stats = self.get_stats_string(cprofile_path)
  341. self.env['profiler.profile.python.line'].search([
  342. ('profile_id', '=', self.id)]).unlink()
  343. for py_stat_line in py_stats.splitlines():
  344. py_stat_line = py_stat_line.strip('\r\n ')
  345. py_stat_line_match = LINE_STATS_RE.match(
  346. py_stat_line) if py_stat_line else None
  347. if not py_stat_line_match:
  348. continue
  349. data = dict((
  350. field, py_stat_line_match.group(field))
  351. for field in PY_STATS_FIELDS)
  352. data['rcalls'], data['calls'] = (
  353. "%(ncalls)s/%(ncalls)s" % data).split('/')[:2]
  354. self.env['profiler.profile.python.line'].create({
  355. 'cprof_tottime': data['tottime'],
  356. 'cprof_ncalls': data['calls'],
  357. 'cprof_nrcalls': data['rcalls'],
  358. 'cprof_ttpercall': data['tt_percall'],
  359. 'cprof_cumtime': data['cumtime'],
  360. 'cprof_ctpercall': data['ct_percall'],
  361. 'cprof_fname': (
  362. "%(file)s:%(lineno)s (%(method)s)" % data),
  363. 'profile_id': self.id,
  364. })
  365. attachment.index_content = py_stats
  366. except IOError:
  367. # Fancy feature but not stop process if fails
  368. _logger.info("There was an error while getting the stats"
  369. "from the cprofile_path")
  370. # pylint: disable=unnecessary-pass
  371. pass
  372. self.dump_postgresql_logs()
  373. _logger.info("cProfile stats stored.")
  374. else:
  375. _logger.info("cProfile stats empty.")
  376. return attachment
  377. @api.multi
  378. def clear(self, reset_date=True):
  379. self.ensure_one()
  380. _logger.info("Clear profiler")
  381. if reset_date:
  382. self.date_started = self.now_utc()
  383. ProfilerProfile.profile.clear()
  384. @api.multi
  385. def disable(self):
  386. self.ensure_one()
  387. _logger.info("Disabling profiler")
  388. ProfilerProfile.enabled = False
  389. self.state = 'disabled'
  390. self.date_finished = self.now_utc()
  391. self.dump_stats()
  392. self.clear(reset_date=False)
  393. self._reset_postgresql()
  394. @staticmethod
  395. @contextmanager
  396. def profiling():
  397. """Thread local profile management, according to the shared "enabled"
  398. """
  399. if ProfilerProfile.enabled:
  400. _logger.debug("Catching profiling")
  401. ProfilerProfile.profile.enable()
  402. try:
  403. yield
  404. finally:
  405. if ProfilerProfile.enabled:
  406. ProfilerProfile.profile.disable()
  407. @api.multi
  408. def action_view_attachment(self):
  409. attachments = self.env['ir.attachment'].search([
  410. ('res_model', '=', self._name), ('res_id', '=', self.id)])
  411. action = self.env.ref("base.action_attachment").read()[0]
  412. action['domain'] = [('id', 'in', attachments.ids)]
  413. return action
  414. @api.model
  415. def set_pgoptions_enabled(self):
  416. """Verify if postgresql has configured the parameters for logging"""
  417. ProfilerProfile.pglogs_enabled = True
  418. pgoptions_enabled = bool(os.environ.get('PGOPTIONS'))
  419. _logger.info('Logging enabled from environment '
  420. 'variable PGOPTIONS? %s', pgoptions_enabled)
  421. if pgoptions_enabled:
  422. return
  423. pgparams_required = {
  424. 'log_min_duration_statement': '0',
  425. }
  426. for param, value in pgparams_required.items():
  427. # pylint: disable=sql-injection
  428. self.env.cr.execute("SHOW %s" % param)
  429. db_value = self.env.cr.fetchone()[0].lower()
  430. if value.lower() != db_value:
  431. ProfilerProfile.pglogs_enabled = False
  432. break
  433. ProfilerProfile.psql_params_original = self.get_psql_params(
  434. self.env.cr, PGOPTIONS.keys())
  435. _logger.info('Logging enabled from postgresql.conf? %s',
  436. ProfilerProfile.pglogs_enabled)
  437. @staticmethod
  438. def get_psql_params(cr, params):
  439. result = {}
  440. for param in set(params):
  441. # pylint: disable=sql-injection
  442. cr.execute('SHOW %s' % param)
  443. result.update(cr.dictfetchone())
  444. return result
  445. @api.model
  446. def _setup_complete(self):
  447. self.set_pgoptions_enabled()
  448. return super(ProfilerProfile, self)._setup_complete()