You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

149 lines
5.2 KiB

  1. ###################################################################################
  2. #
  3. # Copyright (c) 2017-2019 MuK IT GmbH.
  4. #
  5. # This file is part of MuK Utils
  6. # (see https://mukit.at).
  7. #
  8. # This program is free software: you can redistribute it and/or modify
  9. # it under the terms of the GNU Lesser General Public License as published by
  10. # the Free Software Foundation, either version 3 of the License, or
  11. # (at your option) any later version.
  12. #
  13. # This program is distributed in the hope that it will be useful,
  14. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. # GNU Lesser General Public License for more details.
  17. #
  18. # You should have received a copy of the GNU Lesser General Public License
  19. # along with this program. If not, see <http://www.gnu.org/licenses/>.
  20. #
  21. ###################################################################################
  22. import math
  23. import base64
  24. import logging
  25. import mimetypes
  26. from odoo import registry, api, models, _
  27. from odoo.tools.mimetypes import guess_mimetype
  28. from odoo.tools.misc import split_every
  29. from odoo.exceptions import AccessError
  30. _logger = logging.getLogger(__name__)
  31. class IrAttachment(models.Model):
  32. _inherit = 'ir.attachment'
  33. #----------------------------------------------------------
  34. # Helper
  35. #----------------------------------------------------------
  36. @api.model
  37. def _get_datas_inital_vals(self):
  38. return {
  39. 'store_fname': False,
  40. 'db_datas': False,
  41. }
  42. @api.model
  43. def _update_datas_vals(self, vals, attach, bin_data):
  44. vals.update({
  45. 'file_size': len(bin_data),
  46. 'checksum': self._compute_checksum(bin_data),
  47. 'index_content': self._index(bin_data, attach.datas_fname, attach.mimetype),
  48. })
  49. return vals
  50. @api.model
  51. def _get_datas_clean_vals(self, attach):
  52. vals = {}
  53. if attach.store_fname:
  54. vals['store_fname'] = attach.store_fname
  55. return vals
  56. @api.model
  57. def _clean_datas_after_write(self, vals):
  58. if 'store_fname' in vals:
  59. self._file_delete(vals['store_fname'])
  60. #----------------------------------------------------------
  61. # Actions
  62. #----------------------------------------------------------
  63. @api.multi
  64. def action_migrate(self):
  65. self.migrate()
  66. #----------------------------------------------------------
  67. # Functions
  68. #----------------------------------------------------------
  69. @api.model
  70. def storage_locations(self):
  71. return ['db', 'file']
  72. @api.model
  73. def force_storage(self):
  74. if not self.env.user._is_admin():
  75. raise AccessError(_('Only administrators can execute this action.'))
  76. storage_domain = {
  77. 'db': ('db_datas', '=', False),
  78. 'file': ('store_fname', '=', False),
  79. }
  80. record_domain = [
  81. '&', ('type', '=', 'binary'),
  82. '&', storage_domain[self._storage()],
  83. '|', ('res_field', '=', False), ('res_field', '!=', False)
  84. ]
  85. self.search(record_domain).migrate(batch_size=100)
  86. return True
  87. @api.multi
  88. def migrate(self, batch_size=None):
  89. commit_on_batch = bool(batch_size)
  90. batch_size = batch_size or len(self)
  91. storage_location = self._storage().upper()
  92. batches = math.ceil(len(self) / batch_size)
  93. for index, attachment in enumerate(self, start=1):
  94. _logger.info("Migrate Attachment %s of %s to %s [Batch %s of %s]",
  95. index % batch_size or batch_size, batch_size, storage_location,
  96. math.ceil(index / batch_size), batches
  97. )
  98. attachment.with_context(migration=True).write({
  99. 'datas': attachment.datas
  100. })
  101. if commit_on_batch and not index % batch_size:
  102. self.env.cr.commit()
  103. #----------------------------------------------------------
  104. # Read
  105. #----------------------------------------------------------
  106. @api.multi
  107. def _compute_mimetype(self, values):
  108. if self.env.context.get('migration') and len(self) == 1:
  109. return self.mimetype or 'application/octet-stream'
  110. else:
  111. return super(IrAttachment, self)._compute_mimetype(values)
  112. #----------------------------------------------------------
  113. # Create, Write, Delete
  114. #----------------------------------------------------------
  115. @api.multi
  116. def _inverse_datas(self):
  117. location = self._storage()
  118. for attach in self:
  119. value = attach.datas
  120. bin_data = base64.b64decode(value) if value else b''
  121. vals = self._get_datas_inital_vals()
  122. vals = self._update_datas_vals(vals, attach, bin_data)
  123. if value and location != 'db':
  124. vals['store_fname'] = self._file_write(value, vals['checksum'])
  125. else:
  126. vals['db_datas'] = value
  127. clean_vals = self._get_datas_clean_vals(attach)
  128. models.Model.write(attach.sudo(), vals)
  129. self._clean_datas_after_write(clean_vals)