You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

201 lines
7.7 KiB

  1. ###################################################################################
  2. #
  3. # Copyright (c) 2017-2019 MuK IT GmbH.
  4. #
  5. # This file is part of MuK Filestore Field
  6. # (see https://mukit.at).
  7. #
  8. # This program is free software: you can redistribute it and/or modify
  9. # it under the terms of the GNU Lesser General Public License as published by
  10. # the Free Software Foundation, either version 3 of the License, or
  11. # (at your option) any later version.
  12. #
  13. # This program is distributed in the hope that it will be useful,
  14. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. # GNU Lesser General Public License for more details.
  17. #
  18. # You should have received a copy of the GNU Lesser General Public License
  19. # along with this program. If not, see <http://www.gnu.org/licenses/>.
  20. #
  21. ###################################################################################
  22. import base64
  23. import hashlib
  24. import logging
  25. import os
  26. import re
  27. import shutil
  28. import tempfile
  29. from collections import defaultdict
  30. from odoo import fields, tools
  31. from odoo.addons.muk_utils.tools.file import ensure_path_directories
  32. from odoo.tools import config, human_size
  33. _logger = logging.getLogger(__name__)
  34. def get_store_path(dbname):
  35. return os.path.join(config.get("data_dir"), "files", dbname)
  36. def clean_store(dbname, env):
  37. tables = defaultdict(set)
  38. for model_name in env.registry.models:
  39. model = env[model_name]
  40. if not model._abstract:
  41. for name, field in model._fields.items():
  42. if field.type == "file":
  43. tables[model._table].add(name)
  44. checklist = set()
  45. filestore = get_store_path(dbname)
  46. path = os.path.join(filestore, "checklist")
  47. for root, dirs, files in os.walk(path):
  48. for file in files:
  49. checkpath = os.path.join(root, file)
  50. relpath = os.path.relpath(checkpath, path)
  51. checklist.add(os.path.join(filestore, relpath))
  52. env.cr.commit()
  53. whitelist = set()
  54. for table, fields in tables.items():
  55. select_fields = list(fields)
  56. env.cr.execute("LOCK {} IN SHARE MODE".format(table))
  57. select_query = "SELECT {}".format(", ".join(select_fields))
  58. where_query = "WHERE {} IN %(paths)s".format(select_fields[0])
  59. if len(select_fields) > 1:
  60. for field in select_fields[:1]:
  61. where_query += "OR {} IN %(paths)s".format(field)
  62. sql_query = "{} FROM {} {};".format(select_query, table, where_query)
  63. for paths in env.cr.split_for_in_conditions(checklist):
  64. env.cr.execute(sql_query, {"paths": paths})
  65. for row in env.cr.fetchall():
  66. for column in row:
  67. whitelist.add(column)
  68. remove = checklist - whitelist
  69. for file in remove:
  70. try:
  71. os.unlink(file)
  72. except (OSError, IOError):
  73. _logger.warn("Deleting file from %s failed!", file, exc_info=True)
  74. with tools.ignore(OSError):
  75. shutil.rmtree(path)
  76. env.cr.commit()
  77. _logger.info(
  78. "Cleaned files [ %d checked | %d removed ]", len(checklist), len(remove)
  79. )
  80. class File(fields.Field):
  81. type = "file"
  82. column_type = ("varchar", "varchar")
  83. _slots = {
  84. "prefetch": False,
  85. "depends_context": ("bin_size", "human_size", "path", "bytes", "stream", "checksum", "base64"),
  86. }
  87. def _get_file_path(self, checksume, dbname):
  88. name = os.path.join(checksume[:2], checksume)
  89. name = re.sub("[.]", "", name).strip("/\\")
  90. filestore = get_store_path(dbname)
  91. path = os.path.join(filestore, name)
  92. ensure_path_directories(path)
  93. return path
  94. def _add_to_checklist(self, path, dbname):
  95. filestore = get_store_path(dbname)
  96. relpath = os.path.relpath(path, filestore)
  97. checklist = os.path.join(filestore, "checklist", relpath)
  98. if not os.path.exists(checklist):
  99. ensure_path_directories(checklist)
  100. open(checklist, "ab").close()
  101. def _get_checksum(self, value):
  102. if isinstance(value, bytes):
  103. return hashlib.sha1(value).hexdigest()
  104. else:
  105. checksum = hashlib.sha1()
  106. while True:
  107. chunk = value.read(4096)
  108. if not chunk:
  109. return checksum.hexdigest()
  110. checksum.update(chunk)
  111. def convert_to_column(self, value, record, values=None, validate=True):
  112. path = None
  113. try:
  114. current_path = record.with_context({"path": True})[self.name]
  115. if current_path:
  116. self._add_to_checklist(current_path, record.env.cr.dbname)
  117. if not value:
  118. return None
  119. binary = None
  120. if isinstance(value, bytes):
  121. binary = value
  122. elif isinstance(value, str):
  123. binary = base64.b64decode(value)
  124. if binary:
  125. checksume = self._get_checksum(binary)
  126. path = self._get_file_path(checksume, record.env.cr.dbname)
  127. with open(path, "wb") as file:
  128. file.write(binary)
  129. self._add_to_checklist(path, record.env.cr.dbname)
  130. else:
  131. checksume = self._get_checksum(value)
  132. path = self._get_file_path(checksume, record.env.cr.dbname)
  133. value.seek(0, 0)
  134. with open(path, "wb") as file:
  135. while True:
  136. chunk = value.read(4096)
  137. if not chunk:
  138. break
  139. file.write(chunk)
  140. self._add_to_checklist(path)
  141. except (IOError, OSError):
  142. _logger.warn("Writing file to %s failed!", path, exc_info=True)
  143. return path
  144. def convert_to_record(self, value, record):
  145. if value and isinstance(value, str) and os.path.exists(value):
  146. try:
  147. with open(value, "rb") as file:
  148. if record._context.get("human_size"):
  149. return human_size(file.seek(0, 2))
  150. elif record._context.get("bin_size"):
  151. return file.seek(0, 2)
  152. elif record._context.get("path"):
  153. return value
  154. elif record._context.get("bytes"):
  155. return file.read()
  156. elif record._context.get("stream"):
  157. temp = tempfile.TemporaryFile()
  158. while True:
  159. chunk = file.read(4096)
  160. if not chunk:
  161. temp.seek(0)
  162. return temp
  163. temp.write(chunk)
  164. elif record._context.get("checksum"):
  165. checksum = hashlib.sha1()
  166. while True:
  167. chunk = file.read(4096)
  168. if not chunk:
  169. return checksum.hexdigest()
  170. checksum.update(chunk)
  171. else:
  172. return base64.b64encode(file.read())
  173. except (IOError, OSError):
  174. _logger.warn("Reading file from %s failed!", value, exc_info=True)
  175. return None if value is False else value
  176. def convert_to_export(self, value, record):
  177. if value:
  178. try:
  179. with open(value, "rb") as file:
  180. if record._context.get("export_raw_data"):
  181. return file.read()
  182. return base64.b64encode(file.read())
  183. except (IOError, OSError):
  184. _logger.warn("Reading file from %s failed!", value, exc_info=True)
  185. return ""