Compare commits

...

No commits in common. '14.0' and 'master' have entirely different histories.
14.0 ... master

  1. 3
      .dockerignore
  2. 6
      .gitignore
  3. 2
      CHANGELOG
  4. 40
      CloudronManifest.json
  5. 9
      DESCRIPTION.md
  6. 79
      Dockerfile
  7. 21
      LICENSE
  8. 7
      POSTINSTALL.md
  9. 26
      README.md
  10. 144
      bin/addons
  11. 165
      bin/autoaggregate
  12. 52
      bin/config-generate
  13. 75
      bin/direxec
  14. 15
      bin/log
  15. 11
      bin/pot
  16. 18
      bin/preparedb
  17. 18
      custom/build.d/100-repos-aggregate
  18. 18
      custom/build.d/110-addons-link
  19. 30
      custom/build.d/200-dependencies
  20. 31
      custom/build.d/300-fontconfig
  21. 51
      custom/build.d/400-clean
  22. 10
      custom/build.d/500-compile
  23. 21
      custom/build.d/900-dependencies-cleanup
  24. 8
      custom/dependencies/apt.txt
  25. 3
      custom/dependencies/apt_build.txt
  26. 0
      custom/dependencies/gem.txt
  27. 0
      custom/dependencies/npm.txt
  28. 57
      custom/dependencies/pip.txt
  29. 168
      custom/src/addons.yaml
  30. 20
      custom/src/private/.editorconfig
  31. 899
      custom/src/repos.yaml
  32. 38
      custom/ssh/cloudron_git.rsa
  33. 1
      custom/ssh/cloudron_git.rsa.pub
  34. 7
      custom/ssh/config
  35. 14
      custom/ssh/known_hosts
  36. 1
      dev-scripts/README.md
  37. 11
      dev-scripts/build-push-install.sh
  38. 11
      dev-scripts/docker-run-openldap.sh
  39. 25
      dev-scripts/docker-run-postgres.sh
  40. 11
      dev-scripts/docker-run.sh
  41. 30
      dev-scripts/simulate-cloudron.sh
  42. 173
      lib/doodbalib/__init__.py
  43. 119
      lib/doodbalib/installer.py
  44. BIN
      logo.png
  45. 2
      manifest/CHANGELOG.md
  46. 12
      manifest/DESCRIPTION.md
  47. 6
      manifest/POSTINSTALL.md
  48. BIN
      manifest/logo.png
  49. 93
      nginx.conf
  50. 3
      odoo.conf.sample
  51. 271
      odoo12CE_install.sh
  52. 693
      sql_db.py
  53. 188
      start.sh

3
.dockerignore

@ -1,3 +0,0 @@
.dockerignore
.docker
dev-scripts/

6
.gitignore

@ -1,6 +0,0 @@
*.tar.gz
.DS_Store
.env
.docker/
.idea/
*.iml

2
CHANGELOG

@ -0,0 +1,2 @@
[0.1.0]
* Initial version

40
CloudronManifest.json

@ -1,32 +1,26 @@
{ {
"manifestVersion": 2,
"id": "com.odoo.cloudronapp",
"website": "https://www.odoo.com",
"contactEmail": "support@odoo.com",
"title": "Odoo",
"author": "Odoo authors",
"tagline": "Open Source ERP and CRM",
"version": "14.0.0",
"icon": "manifest/logo.png",
"description": "file://manifest/DESCRIPTION.md",
"changelog": "file://manifest/CHANGELOG.md",
"postInstallMessage": "file://manifest/POSTINSTALL.md",
"id": "com.odoo12ce.community.cloudronapp",
"title": "Odoo Community",
"author": "Odoo",
"description": "file://DESCRIPTION.md",
"changelog": "file://CHANGELOG",
"postInstallMessage": "file://POSTINSTALL.md",
"tagline": "Manage your organization",
"version": "0.1.0",
"healthCheckPath": "/", "healthCheckPath": "/",
"httpPort": 8000,
"memoryLimit": 2684354560,
"httpPort": 8069,
"addons": { "addons": {
"localstorage": {}, "localstorage": {},
"postgresql": {},
"sendmail": {}, "sendmail": {},
"recvmail": {},
"ldap": {}
"ldap" : {},
"postgresql": {}
}, },
"manifestVersion": 2,
"website": "https://odoo.com",
"contactEmail": "",
"icon": "file://logo.png",
"tags": [ "tags": [
"auth"
"crm"
], ],
"optionalSso": true,
"multiDomain": true,
"mediaLinks": [],
"forumUrl": "https://forum.cloudron.io/",
"documentationUrl": "https://docs.cloudron.io/"
"mediaLinks": [ ]
} }

9
DESCRIPTION.md

@ -0,0 +1,9 @@
Odoo, formerly known as OpenERP, is a suite of open-source business apps
written in Python and released under the LGPLv3 license. This suite of
applications covers all business needs, from Website/Ecommerce down to
manufacturing, inventory and accounting, all seamlessly integrated.
Odoo's technical features include a distributed server, flexible
workflows, an object database, a dynamic GUI, customizable reports, and
an XML-RPC interface. Odoo is the most installed business software in
the world. It is used by 2.000.000 users worldwide ranging from very
small companies (1 user) to very large ones (300 000 users).

79
Dockerfile

@ -1,73 +1,20 @@
FROM cloudron/base:3.2.0@sha256:ba1d566164a67c266782545ea9809dc611c4152e27686fd14060332dd88263ea
# Reference: https://github.com/odoo/docker/blob/master/15.0/Dockerfile
FROM cloudron/base:0.12.0
MAINTAINER Samir Saidani <samir.saidani@babel.coop>
RUN mkdir -p /app/code /app/pkg /app/data /app/code/auto/addons
RUN mkdir -p /app/code /app/data
WORKDIR /app/code WORKDIR /app/code
RUN apt-get update && \
apt-get install -y \
python3-dev libxml2-dev libxslt1-dev libldap2-dev libsasl2-dev \
libtiff5-dev libjpeg8-dev libopenjp2-7-dev zlib1g-dev libfreetype6-dev \
liblcms2-dev libwebp-dev libharfbuzz-dev libfribidi-dev libxcb1-dev libpq-dev
COPY ./odoo12CE_install.sh /app/code/
RUN curl -o wkhtmltox.deb -sSL https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.focal_amd64.deb && \
echo 'ae4e85641f004a2097621787bf4381e962fb91e1 wkhtmltox.deb' | sha1sum -c - && \
apt-get install -y --no-install-recommends ./wkhtmltox.deb && \
rm -f ./wkhtmltox.deb && \
rm -rf /var/lib/apt/lists/* /var/cache/apt
RUN /app/code/odoo12CE_install.sh
RUN wget -O - https://nightly.odoo.com/odoo.key | apt-key add -
RUN echo "deb http://nightly.odoo.com/12.0/nightly/deb/ ./" >> /etc/apt/sources.list.d/odoo.list
RUN apt-get update && apt-get -y install wkhtmltopdf && rm -r /var/cache/apt /var/lib/apt/lists
RUN npm install -g rtlcss
# patch to accept a db name
COPY sql_db.py /app/code/odoo-server/odoo/sql_db.py
# COPY sql_db.py /app/code/
COPY bin/* /usr/local/bin/
COPY start.sh /app/data/
COPY lib/doodbalib /usr/local/lib/python3.8/dist-packages/doodbalib
COPY custom /app/code/custom
RUN chmod -R a+rx /usr/local/bin \
&& chmod -R a+rX /usr/local/lib/python3.8/dist-packages/doodbalib \
&& sync
# Install Odoo
# sync extra addons
ENV ODOO_VERSION=14.0
ENV ODOO_SOURCE=OCA/OCB
ENV DEPTH_DEFAULT=100
ENV DEPTH_MERGE=500
RUN git config --global user.email "$CLOUDRON_MAIL_SMTP_USERNAME"
RUN git config --global user.name "Cloudron service"
# RUN curl -L https://github.com/odoo/odoo/archive/${ODOO_COMMIT_HASH}.tar.gz | tar zx --strip-components 1 -C /app/code && \
RUN git clone https://github.com/odoo/odoo.git --depth 1 -b $ODOO_VERSION /app/code/odoo
WORKDIR /app/code/odoo
RUN git pull -r
WORKDIR /app/code
RUN pip3 install -e /app/code/odoo
RUN pip3 install wheel && \
pip3 install -r https://raw.githubusercontent.com/$ODOO_SOURCE/$ODOO_VERSION/requirements.txt && \
pip3 install psycopg2==2.8.6 \
&& pip3 install git-aggregator \
&& (python3 -m compileall -q /usr/local/lib/python3.8/ || true)
# Patch Odoo to prevent connecting to the default database named 'postgres' every now and then.
RUN sed -i.bak "720i\ to = tools.config['db_name']" /app/code/odoo/odoo/sql_db.py
# Properly map the LDAP attribute 'displayname' instead of 'cn' to the display name of the logged in user.
RUN sed -i.bak "181s/'cn'/'displayname'/" /app/code/odoo/addons/auth_ldap/models/res_company_ldap.py
RUN rm -rf /var/log/nginx && mkdir /run/nginx && ln -s /run/nginx /var/log/nginx
# Copy entrypoint script and Odoo configuration file
ADD start.sh odoo.conf.sample nginx.conf /app/pkg/
WORKDIR /app/code/custom/src
RUN gitaggregate -c /app/code/custom/src/repos.yaml --expand-env
RUN /app/code/custom/build.d/110-addons-link
RUN /app/code/custom/build.d/200-dependencies
RUN /app/code/custom/build.d/400-clean
RUN /app/code/custom/build.d/900-dependencies-cleanup
RUN mkdir -p /app/data/odoo/filestore /app/data/odoo/addons && \
chown -R cloudron:cloudron /app/data
CMD [ "/app/pkg/start.sh" ]
CMD [ "/app/data/start.sh" ]

21
LICENSE

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2022 Nj Subedi
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

7
POSTINSTALL.md

@ -0,0 +1,7 @@
Utilisez the following credentials for initial setup:
`username`: admin
`password`: admin
**Please change the admin password and email on first login**

26
README.md

@ -1,26 +0,0 @@
## What
Run [Odoo](https://www.odoo.com/) on [Cloudron](https://cloudron.io). For more information see DESCRIPTION.md
## Why
Because Odoo works almost out of the box in any system that has Postgres and some disk space for data storage.
## Build and Install
- Install Cloudron CLI on your machine: `npm install -g cloudron-cli`.
- Install Docker, and make sure you can push to docker hub, or install the docker registry app in your own Cloudron.
- Log in to your Cloudron using cloudron cli: `cloudron login <my.yourdomain.tld>`.
- Build and publish the docker image: `cloudron build`.
- If you're using your own docker registry, name the image properly,
like `docker.example-cloudron.tld/john_doe/cloudron-odoo`.
- Log in to Docker Hub and mark the image as public, if necessary.
- Install the app `cloudron install -l <auth.yourdomain.tld>`
- Look at the logs to see if everything is going as planned.
Refer to the [Cloudron Docs](https://docs.cloudron.io/packaging/cli) for more information.
## Third-party Intellectual Properties
All third-party product names, company names, and their logos belong to their respective owners, and may be their
trademarks or registered trademarks.

144
bin/addons

@ -1,144 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import ast
import os
import sys
from argparse import ArgumentParser
from subprocess import check_call
from doodbalib import (
CORE,
ENTERPRISE,
MANIFESTS,
PRIVATE,
SRC_DIR,
AddonsConfigError,
addons_config,
logger,
)
# Exit codes
EXIT_NO_ADDONS = 0x4
# Define CLI options
parser = ArgumentParser(description="Install addons in current environment")
parser.add_argument(
"action",
choices=("init", "update", "list"),
help="What to do with the matched addons.",
)
parser.add_argument(
"-c", "--core", action="store_true", help="Use all Odoo core addons"
)
parser.add_argument(
"-d",
"--dependencies",
action="store_true",
help="Use only dependencies of selected addons",
)
parser.add_argument("-e", "--extra", action="store_true", help="Use all extra addons")
parser.add_argument(
"-f",
"--fullpath",
action="store_true",
help="Print addon's full path, only useful with list mode",
)
parser.add_argument(
"-i", "--installable", action="store_true", help="Include only installable addons"
)
parser.add_argument(
"-n", "--enterprise", action="store_true", help="Use all enterprise addons"
)
parser.add_argument(
"-p", "--private", action="store_true", help="Use all private addons"
)
parser.add_argument(
"-s",
"--separator",
type=str,
default=",",
help="String that separates addons only useful with list mode",
)
parser.add_argument(
"-t",
"--test",
action="store_true",
help="Run unit tests for these addons, usually combined with update",
)
parser.add_argument(
"-x",
"--explicit",
action="store_true",
help="Fail if any addon is explicitly declared but not found",
)
parser.add_argument(
"-w",
"--with",
action="append",
dest="with_",
default=[],
help="Addons to include always.",
)
parser.add_argument(
"-W", "--without", action="append", default=[], help="Addons to exclude always."
)
# Generate the matching addons set
args = parser.parse_args()
dependencies = {"base"}
addons = set(args.with_)
without = set(args.without)
if addons & without:
sys.exit("Cannot include and exclude the same addon!")
if args.dependencies and args.fullpath:
sys.exit("Unsupported combination of --dependencies and --fullpath")
try:
for addon, repo in addons_config(strict=args.explicit):
if addon in without:
continue
core_ok = args.core and repo == CORE
enterprise_ok = args.enterprise and repo == ENTERPRISE
extra_ok = args.extra and repo not in {CORE, ENTERPRISE, PRIVATE}
private_ok = args.private and repo == PRIVATE
manual_ok = addon in addons
if private_ok or core_ok or extra_ok or enterprise_ok or manual_ok:
addon_path = os.path.join(SRC_DIR, repo, addon)
manifest = {}
for manifest_name in MANIFESTS:
try:
manifest_path = os.path.join(addon_path, manifest_name)
with open(manifest_path, "r") as code:
manifest = ast.literal_eval(code.read())
break
except IOError:
continue
if args.installable and not manifest.get("installable", True):
continue
dependencies.update(manifest.get("depends", []))
if args.fullpath and args.action == "list":
addon = addon_path
addons.add(addon)
except AddonsConfigError as error:
sys.exit(error.message)
# Use dependencies instead, if requested
if args.dependencies:
addons = dependencies - addons
addons -= without
# Do the required action
if not addons:
print("No addons found", file=sys.stderr)
sys.exit(EXIT_NO_ADDONS)
addons = args.separator.join(sorted(addons))
if args.action == "list":
print(addons)
else:
command = ["odoo", "--stop-after-init", "--{}".format(args.action), addons]
if args.test:
command += ["--test-enable", "--workers", "0"]
if os.environ.get("PGDATABASE"):
command += ["--db-filter", u"^{}$".format(os.environ.get("PGDATABASE"))]
logger.info("Executing %s", " ".join(command))
check_call(command)

165
bin/autoaggregate

@ -1,165 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
from multiprocessing import cpu_count
from subprocess import check_call
import yaml
from doodbalib import (
ADDONS_YAML,
AUTO_REPOS_YAML,
CORE,
ODOO_DIR,
PRIVATE,
REPOS_YAML,
SRC_DIR,
logger,
)
# if the umask matches the `chmod -R u+rwX,g+rX-w,o= /opt/odoo` command the build is faster as we don't need to fix as
# many permissions after auto aggregation
UMASK = os.environ.get("UMASK") or "0027"
UID = int(os.environ.get("UID") or -1)
GID = int(os.environ.get("GID") or -1)
DEFAULT_REPO_PATTERN = os.environ.get("DEFAULT_REPO_PATTERN")
DEFAULT_REPO_PATTERN_ODOO = os.environ.get("DEFAULT_REPO_PATTERN_ODOO")
log_level = os.environ.get("LOG_LEVEL", "INFO")
def aggregate(config):
"""Execute git aggregator to pull git code.
:param str config:
Path where to find the ``repos.yaml`` file.
"""
logger.info("Running gitaggregate with %s", config)
def pre_exec_umask():
# Download git code with the specified umask, if set, otherwise use "0027"
os.umask(int(UMASK, 8))
pre_execs = [pre_exec_umask]
def pre_exec():
for _exec in pre_execs:
try:
_exec()
except Exception as e:
logger.error("Error in %s: %s" % (_exec, e))
logger.exception(e)
raise
if ~GID:
def pre_exec_gid():
# execute git with GID
os.setgid(GID)
pre_execs.append(pre_exec_gid)
if ~UID:
def pre_exec_uid():
# execute git with UID
os.setuid(UID)
# set odoo home directory
# (git checks if user has a config in $HOME, and we cannot read /root as odoo user)
os.environ["HOME"] = "/home/odoo"
pre_execs.append(pre_exec_uid)
check_call(
[
"gitaggregate",
"--expand-env",
"--config",
config,
"--log-level",
log_level,
"--jobs",
str(cpu_count() or 1),
"aggregate",
],
cwd=SRC_DIR,
stderr=sys.stderr,
stdout=sys.stdout,
preexec_fn=pre_exec,
)
def origin_for(
folder,
default_repo_pattern=DEFAULT_REPO_PATTERN,
odoo_repo_pattern=DEFAULT_REPO_PATTERN_ODOO,
):
"""Guess the default git origin for that folder.
:param str folder:
Normally an absolute path to an expected git repo, whose name should
match the git repository where it comes from, using the env-supplied
pattern.
"""
base = os.path.basename(folder)
pattern = default_repo_pattern
if base == "odoo":
pattern = odoo_repo_pattern
return pattern.format(base)
def missing_repos_config():
"""Find the undefined repositories and return their default configuration.
:return dict:
git-aggregator-ready configuration dict for undefined repositories.
"""
defined, expected = set(), {ODOO_DIR}
# Find the repositories defined by hand
try:
with open(REPOS_YAML) as yaml_file:
for doc in yaml.safe_load_all(yaml_file):
for repo in doc:
defined.add(os.path.abspath(os.path.join(SRC_DIR, repo)))
except (IOError, AttributeError):
logger.debug("No repositories defined by hand")
addons_env = {}
# Find the repositories that should be present
try:
with open(ADDONS_YAML) as yaml_file:
for doc in yaml.safe_load_all(yaml_file):
env = dict(os.environ, **doc.get("ENV", {}))
for repo in doc:
if repo in {PRIVATE, "ONLY", "ENV"}:
continue
if repo == CORE:
repo_path = ODOO_DIR
else:
repo_path = os.path.abspath(os.path.join(SRC_DIR, repo))
if not os.path.exists(repo_path) or os.path.isdir(
os.path.join(repo_path, ".git")
):
expected.add(repo_path)
addons_env[repo_path] = env
except (IOError, AttributeError):
logger.debug("No addons are expected to be present")
# Find the undefined repositories and generate a config for them
missing = expected - defined
config = {}
for repo_path in missing:
env = addons_env.get(repo_path, os.environ)
depth = env["DEPTH_DEFAULT"]
origin_version = "origin %s" % env["ODOO_VERSION"]
config[repo_path] = {
"defaults": {"depth": depth},
"merges": [origin_version],
"remotes": {
"origin": origin_for(
repo_path,
env["DEFAULT_REPO_PATTERN"],
env["DEFAULT_REPO_PATTERN_ODOO"],
)
},
"target": origin_version,
}
logger.debug("Generated missing repos config %r", config)
return config

52
bin/config-generate

@ -1,52 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Generate Odoo server configuration from templates"""
import os
from contextlib import closing
from string import Template
from doodbalib import logger
try:
# Python 2, where io.StringIO fails because it is unicode-only
from StringIO import StringIO
except ImportError:
from io import StringIO
try:
from configparser import RawConfigParser
parser = RawConfigParser(strict=False)
except ImportError:
# Python 2, where strict=True doesn't exist
from ConfigParser import RawConfigParser
parser = RawConfigParser()
ODOO_VERSION = os.environ.get("ODOO_VERSION")
TARGET_FILE = os.environ.get("ODOO_RC", "/opt/odoo/auto/odoo.conf")
CONFIG_DIRS = ("/opt/odoo/common/conf.d", "/opt/odoo/custom/conf.d")
CONFIG_FILES = []
# Read all configuraiton files found in those folders
logger.info("Merging found configuration files in %s", TARGET_FILE)
for dir_ in CONFIG_DIRS:
try:
for file_ in sorted(os.listdir(dir_)):
parser.read(os.path.join(dir_, file_))
except OSError: # TODO Use FileNotFoundError when we drop python 2
continue
# Write it to a memory string object
with closing(StringIO()) as resultfp:
parser.write(resultfp)
resultfp.seek(0)
# Obtain the config string
result = resultfp.read()
# Expand environment variables found within
result = Template(result).substitute(os.environ)
logger.debug("Resulting configuration:\n%s", result)
# Write it to destination
with open(TARGET_FILE, "w") as targetfp:
targetfp.write(result)

75
bin/direxec

@ -1,75 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess
import sys
from logging import DEBUG, INFO, WARNING
from doodbalib import logger, which
# Call this file linked from another file called `build` or `entrypoint`
mode = os.path.basename(__file__)
dir_odoo = "/opt/odoo"
dir_common = os.path.join(dir_odoo, "common", "%s.d" % mode)
dir_custom = os.path.join(dir_odoo, "custom", "%s.d" % mode)
# Find scripts
files = [(d, dir_common) for d in os.listdir(dir_common)]
try:
files += [(d, dir_custom) for d in os.listdir(dir_custom)]
except OSError:
pass
# Run scripts
for executable, folder in sorted(files):
command = os.path.join(folder, executable)
if os.access(command, os.X_OK):
logger.debug("Executing %s", command)
subprocess.check_call(command)
# Allow to omit 1st command and default to `odoo`
extra_command = sys.argv[1:]
if extra_command:
if extra_command[0] == "shell" or extra_command[0].startswith("-"):
extra_command.insert(0, "odoo")
# Set the DB creation language, if needed
if extra_command[0] in {"odoo", "/usr/local/bin/odoo"}:
if os.environ.get("INITIAL_LANG"):
from psycopg2 import OperationalError, connect
try:
connection = connect(dbname=os.environ.get("PGDATABASE"))
connection.close()
except OperationalError:
# No DB exists, set initial language
extra_command += ["--load-language", os.environ["INITIAL_LANG"]]
if os.environ.get("PTVSD_ENABLE") == "1":
# Warn deprecation
logger.log(
WARNING,
"ptvsd has beed deprecated for python debugging. "
"Please use debugpy (see https://github.com/Tecnativa/doodba#debugpy)",
)
# See `python -m ptvsd -h` to understand this
extra_command[0] = os.path.realpath(which(extra_command[0]))
extra_command = (
["python", "-m", "ptvsd"]
+ os.environ.get("PTVSD_ARGS", "").split()
+ extra_command
)
elif os.environ["DEBUGPY_ENABLE"] == "1":
# See `python -m debugpy -h` to understand this
extra_command[0] = os.path.realpath(which(extra_command[0]))
extra_command = (
["python", "-m", "debugpy"]
+ os.environ["DEBUGPY_ARGS"].split()
+ extra_command
)
logger.log(
DEBUG if extra_command[0] == "/qa/insider" else INFO,
"Executing %s",
" ".join(extra_command),
)
os.execvp(extra_command[0], extra_command)

15
bin/log

@ -1,15 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import argparse
import logging
from doodbalib import LOG_LEVELS, logger
parser = argparse.ArgumentParser(description="Easy logging for scripts")
parser.add_argument("level", choices=LOG_LEVELS)
parser.add_argument("message", nargs="+")
arguments = parser.parse_args()
logger.log(getattr(logging, arguments.level), " ".join(arguments.message))

11
bin/pot

@ -1,11 +0,0 @@
#!/bin/bash
# Shortcut to run Odoo in unit testing mode
set -e
addons=$1
shift
log INFO Executing Odoo in i18n export mode for addons $addons
# HACK Odoo needs a *.po file to guess the output format
ln -sf /dev/stdout /tmp/stdout.po
set -x
exec odoo --stop-after-init -d "$PGDATABASE" --i18n-export /tmp/stdout.po \
--modules "$addons" --update "$addons" --workers 0 "$@"

18
bin/preparedb

@ -1,18 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Script to prepare the database with initial data
import click
import click_odoo
@click.command()
@click_odoo.env_options(default_log_level="info", database_must_exist=True)
def main(env):
"""Set report.url in the database to be pointing at localhost."""
env["ir.config_parameter"].set_param("report.url", "http://localhost:8069")
env.cr.commit()
if __name__ == "__main__":
main()

18
custom/build.d/100-repos-aggregate

@ -1,18 +0,0 @@
#!/bin/bash
set -e
# make sure odoo has a user.name configured, as merges would not succeed otherwise
# (even if GIT_AUTHOR_NAME and EMAIL are set and should be used, it seems gitaggregate is not passing them to git)
su --shell="$SHELL" odoo -c 'git config user.name 1>/dev/null || git config --global user.name "'"$GIT_AUTHOR_NAME"'"'
# copy ssh directory to odoo user as well (gitaggregate may also be run as odoo user)
if [[ ! -e ~odoo/.ssh ]] ; then
cp -a /opt/odoo/custom/ssh ~odoo/.ssh
fi
if [ "$AGGREGATE" != true ]; then
log WARNING Not aggregating code repositories
exit 0
fi
exec autoaggregate

18
custom/build.d/110-addons-link

@ -1,18 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
from glob import iglob
from doodbalib import ADDONS_DIR, ADDONS_YAML, SRC_DIR, addons_config, logger
logger.info("Linking all addons from %s in %s", ADDONS_YAML, ADDONS_DIR)
# Remove all links in addons dir
for link in iglob(os.path.join(ADDONS_DIR, "*")):
os.remove(link)
# Add new links
for addon, repo in addons_config():
src = os.path.relpath(os.path.join(SRC_DIR, repo, addon), ADDONS_DIR)
dst = os.path.join(ADDONS_DIR, addon)
os.symlink(src, dst)
logger.debug("Linked %s in %s", src, dst)

30
custom/build.d/200-dependencies

@ -1,30 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2017 LasLabs Inc.
from glob import glob
from os.path import basename, join, splitext
from doodbalib import CUSTOM_DIR, FILE_APT_BUILD, SRC_DIR
from doodbalib.installer import INSTALLERS, install, logger
# Build dependencies installed before any others
install("apt", FILE_APT_BUILD)
for name in INSTALLERS:
req_files = []
# Normal dependency installation
req_files.append(join(CUSTOM_DIR, "dependencies", "%s.txt" % name))
for req_file in req_files:
install(name, req_file)
# Sorted dependencies installation
dep_files = sorted(glob(join(CUSTOM_DIR, "dependencies", "[0-9]*-*")))
for dep_file in dep_files:
root, ext = splitext(basename(dep_file))
# Get the installer (xxx-installer[-description][.ext])
installer = root.split("-", 2)[1]
if installer not in INSTALLERS:
logger.error("Unknown installer: %s", installer)
raise Exception
install(installer, dep_file)

31
custom/build.d/300-fontconfig

@ -1,31 +0,0 @@
#!/bin/bash
set -e
mkdir -p ~cloudron/.config/fontconfig/conf.d
cat <<END > ~odoo/.config/fontconfig/conf.d/100-doodba.conf
<?xml version="1.0"?>
<!DOCTYPE fontconfig SYSTEM "fonts.dtd">
<!-- Odoo default fonts, generated by Doodba -->
<fontconfig>
<alias>
<family>monospace</family>
<prefer>
<family>$FONT_MONO</family>
</prefer>
</alias>
<alias>
<family>sans-serif</family>
<prefer>
<family>$FONT_SANS</family>
</prefer>
</alias>
<alias>
<family>serif</family>
<prefer>
<family>$FONT_SERIF</family>
</prefer>
</alias>
</fontconfig>
END
chown cloudron:cloudron ~cloudron/.config

51
custom/build.d/400-clean

@ -1,51 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import shutil
import sys
from doodbalib import CLEAN, ODOO_DIR, PRIVATE_DIR, SRC_DIR, addons_config, logger
if not CLEAN:
logger.warning("Not cleaning garbage")
sys.exit()
# Get the enabled paths
repos_addons = {}
for addon, repo in addons_config(filtered=False):
repo_path = os.path.realpath(os.path.join(SRC_DIR, repo))
repos_addons.setdefault(repo_path, set())
repos_addons[repo_path].add(addon)
logger.debug("Addon paths enabled: %s", repos_addons)
# Traverse src dir and remove anything not explicitly enabled
for directory, subdirectories, subfiles in os.walk(SRC_DIR):
logger.debug("Checking for cleanup directory %s", directory)
# Skip main src directory
if directory == SRC_DIR:
continue
# Always skip private/*
if directory == PRIVATE_DIR:
subdirectories[:] = []
continue
# Inside the odoo dir, skip all but addons dir
if directory == ODOO_DIR:
subdirectories[:] = ["addons"]
continue
try:
# Get addons enalbed in current directory
enabled_addons = repos_addons[directory]
except KeyError:
# This isn't a repo; is there anything inside to preserve?
directory += os.path.sep
if any(repo.startswith(directory) for repo in repos_addons):
# Then, let's walk in; we'll remove later if needed
continue
else:
# This is an addons repo; do not walk into the enabled ones
for addon in enabled_addons:
subdirectories.remove(addon)
continue
# Remove every other directory
logger.info("Removing directory %s", directory)
shutil.rmtree(directory)

10
custom/build.d/500-compile

@ -1,10 +0,0 @@
#!/bin/bash
set -e
if [ "$COMPILE" != true ]; then
log WARNING Not compiling Python code
exit 0
fi
log INFO Compiling all Python code in /opt/odoo
python -m compileall -q /opt/odoo

21
custom/build.d/900-dependencies-cleanup

@ -1,21 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2017 LasLabs Inc.
import os
from doodbalib import CUSTOM_DIR, FILE_APT_BUILD
from doodbalib.installer import INSTALLERS
# Build-time dependencies must be removed when finishing build
if os.path.isfile(FILE_APT_BUILD):
installer = INSTALLERS["apt"](FILE_APT_BUILD)
installer.remove()
installer.cleanup()
# Clean up garbage generated by respective package managers
for name, class_ in INSTALLERS.items():
req_file = os.path.join(CUSTOM_DIR, "dependencies", "%s.txt" % name)
if os.path.isfile(req_file):
class_(req_file).cleanup()

8
custom/dependencies/apt.txt

@ -1,8 +0,0 @@
python3-pandas
cups
libcups2-dev
python3-dev
build-essential
libcairo2-dev
pkg-config
xmlsec1

3
custom/dependencies/apt_build.txt

@ -1,3 +0,0 @@
libldap2-dev
libssl-dev
libsasl2-dev

0
custom/dependencies/gem.txt

0
custom/dependencies/npm.txt

57
custom/dependencies/pip.txt

@ -1,57 +0,0 @@
git+https://github.com/OCA/openupgradelib.git@master
unicodecsv
unidecode
py3o.template
PyPDF2
py3o.formats
zeep
parse-accept-language
pyquerystring
cerberus==1.3.2
apispec>=4.0.0
marshmallow
marshmallow-objects>=2.0.0
cachetools
boto
pycups
bravado_core
facebook_business
python-telegram-bot
swagger_spec_validator
viberbot
PyMuPDF==1.16.14
factur-x
regex
dateparser==1.1.1
pycairo
rocketchat_API
ovh
weboob
payplug
qrcode
markdownify
requests_oauthlib
pyocclient
cryptography>=2.3
jwcrypto==0.5.0
freezegun
pysaml2
formio-data
pysaml2
odoo_test_helper
python-jose
click-odoo-contrib
python-jose
pdfplumber
openpyxl
PyGithub
caldav
python-stdnum==1.17
vatnumber
pydrive
weboob
dropbox
pysftp
icalendar
jsondiff
extendable-pydantic

168
custom/src/addons.yaml

@ -1,168 +0,0 @@
sale-workflow:
- "*"
openupgrade:
- "openupgrade_framework"
- "openupgrade_scripts"
partner-contact:
- "*"
server-ux:
- "*"
bank-payment:
- "*"
account-financial-tools:
- "account_*"
- "base_*"
community-data-files:
- "*"
hr:
- "*"
l10n-france:
- "*"
l10n-switzerland:
- "*"
product-attribute:
- "*"
project:
- "*"
project-reporting:
- "*"
web:
- "*"
website:
- "*"
mis-builder:
- "*"
operating-unit:
- "*"
connector:
- "*"
purchase-workflow:
- "*"
server-env:
- "*"
odoo-theme:
- "*"
report-engine:
- "*"
formio:
- "*"
# learning_addons:
# - "*"
muk-web:
- "*"
rest-framework:
- "*"
server-auth:
- "*"
server-tools:
- "*"
wms:
- "*"
stock-logistics-warehouse:
- "*"
contract:
- "*"
field-service:
- "*"
queue:
- "*"
commission:
- "*"
pms:
- "*"
dms:
- "*"
social:
- "*"
pos:
- "*"
product-configurator:
- "*"
stock-logistics-workflow:
- "*"
account-financial-reporting:
- "*"
bank-statement-import:
- "*"
account-reconcile:
- "*"
manufacture:
- "*"
multi-company:
- "*"
account-analytic:
- "*"
stock-logistics-reporting:
- "*"
account-invoice-reporting:
- "*"
sale-reporting:
- "*"
account-closing:
- "*"
account-payment:
- "*"
edi:
- "*"
timesheet:
- "*"
odoo-pim:
- "*"
delivery-carrier:
- "*"
storage:
- "*"
product-variant:
- "*"
e-commerce:
- "*"
hr-expense:
- "*"
crm:
- "*"
maintenance:
- "*"
connector-telephony:
- "*"
server-backend:
- "*"
intrastat-extrastat:
- "*"
brand:
- "*"
hr-holidays:
- "*"
server-brand:
- "*"
report-print-send:
- "*"
calendar:
- "*"
credit-control:
- "*"
# myc-extra-addons:
# - "*"
account-invoicing:
- "*"
sync-addons:
- "*"
vertical-cooperative:
- "*"
# nj-addons:
# - "*"
vertical-association:
- "*"
account-move-import:
- "*"
galicea-addons:
- "*"
straga-main:
- "*"
odoo-usability:
- "*"
odoo-py3o-report-templates:
- "*"
project-tools:
- "*"
pad-tools:
- "*"

20
custom/src/private/.editorconfig

@ -1,20 +0,0 @@
# Configuration for known file extensions
[*.{css,js,json,less,md,py,rst,sass,scss,xml,yaml,yml}]
charset = utf-8
end_of_line = lf
indent_size = 4
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.{json,yml,yaml,rst,md}]
indent_size = 2
# Do not configure editor for libs and autogenerated content
[{*/static/{lib,src/lib}/**,*/static/description/index.html,*/readme/../README.rst}]
charset = unset
end_of_line = unset
indent_size = unset
indent_style = unset
insert_final_newline = false
trim_trailing_whitespace = false

899
custom/src/repos.yaml

@ -1,899 +0,0 @@
./sale-workflow:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/OCA/sale-workflow.git
target: origin 14.0
merges:
- origin 14.0
./partner-contact:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/partner-contact.git
# myc: ssh://git@git.myceliandre.fr:5022/OCA/partner-contact.git
target: oca 14.0
merges:
#- myc 14.0_partner_favorite
- oca 14.0
./server-ux:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/server-ux.git
target: oca 14.0
merges:
- oca 14.0
./bank-payment:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/bank-payment.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/820/head
- oca refs/pull/822/head
#- oca refs/pull/831/head
#- oca refs/pull/858/head
./account-financial-tools:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/account-financial-tools.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/1087/head
- oca refs/pull/1236/head
./community-data-files:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/community-data-files.git
target: oca 14.0
merges:
- oca 14.0
./hr:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/hr.git
target: oca 14.0
merges:
- oca 14.0
./l10n-switzerland:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/l10n-switzerland.git
target: oca 12.0
merges:
- oca 12.0
./l10n-france:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/l10n-france.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/257/head
- oca refs/pull/321/head
./product-attribute:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/product-attribute.git
target: oca 14.0
merges:
- oca 14.0
./project:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/project.git
target: oca 14.0
merges:
- oca 14.0
./project-reporting:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/project-reporting.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/44/head
./website:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/website.git
target: oca 14.0
merges:
- oca 14.0
./web:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/web.git
target: oca 14.0
merges:
- oca 14.0
./mis-builder:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/mis-builder.git
target: oca 14.0
merges:
- oca 14.0
./operating-unit:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/operating-unit.git
target: oca 14.0
merges:
- oca 14.0
./connector:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/connector.git
target: oca 14.0
merges:
- oca 14.0
./delivery-carrier:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/OCA/delivery-carrier.git
target: origin 14.0
merges:
- origin 14.0
./purchase-workflow:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/OCA/purchase-workflow.git
target: origin 14.0
merges:
- origin 14.0
./server-env:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/OCA/server-env.git
target: origin 14.0
merges:
- origin 14.0
./odoo-theme:
defaults:
depth: $DEPTH_DEFAULT
remotes:
myc: https://git.myceliandre.fr/Myceliandre/odoo_theme.git
target: myc 14.0
merges:
- myc 14.0
./report-engine:
defaults:
depth: 200
remotes:
oca: https://github.com/OCA/reporting-engine.git
target: oca 14.0
merges:
#- oca refs/pull/445/head
#- oca refs/pull/506/head
#- oca refs/pull/526/head
#- oca refs/pull/502/head
# report_py3o PR
#- oca refs/pull/445/head
- oca 14.0
./formio:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/novacode-nl/odoo-formio.git
target: origin 14.0
merges:
- origin 14.0
# ./learning_addons:
# defaults:
# # Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# # You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# # for a sane value of 100 commits)
# depth: $DEPTH_DEFAULT
# remotes:
# origin: ssh://git@git.myceliandre.fr:5022/Myceliandre/odoo_learning_addons.git
# target: origin 14.0
# merges:
# - origin 14.0
./muk-web:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/muk-it/muk_web.git
target: origin 14.0
merges:
- origin 14.0
./rest-framework:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/rest-framework.git
target: oca 14.0
merges:
- oca 14.0
./server-auth:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/server-auth.git
target: oca 14.0
merges:
- oca 14.0
./server-tools:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/server-tools.git
target: oca 14.0
merges:
- oca 14.0
./wms:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/wms.git
target: oca 14.0
merges:
- oca 14.0
./stock-logistics-warehouse:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/stock-logistics-warehouse.git
target: oca 14.0
merges:
- oca 14.0
./contract:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/contract.git
target: oca 14.0
merges:
- oca 14.0
./field-service:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/field-service.git
target: oca 14.0
merges:
- oca 14.0
./queue:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/queue.git
target: oca 14.0
merges:
- oca 14.0
./commission:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/commission.git
target: oca 14.0
merges:
- oca 14.0
./pms:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/pms.git
target: oca 14.0
merges:
- oca 14.0
./dms:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/dms.git
target: oca 14.0
merges:
- oca 14.0
./social:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/social.git
target: oca 14.0
merges:
- oca 14.0
./pos:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/pos.git
target: oca 14.0
merges:
- oca 14.0
./product-configurator:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/product-configurator.git
target: oca 14.0
merges:
- oca 14.0
./stock-logistics-workflow:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/stock-logistics-workflow.git
target: oca 14.0
merges:
- oca 14.0
./account-financial-reporting:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/account-financial-reporting.git
target: oca 14.0
merges:
- oca 14.0
./bank-statement-import:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/bank-statement-import.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/396/head
- oca refs/pull/417/head
- oca refs/pull/423/head
./account-reconcile:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/account-reconcile.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/361/head
- oca refs/pull/382/head
- oca refs/pull/402/head
- oca refs/pull/412/head
./manufacture:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/manufacture.git
target: oca 14.0
merges:
- oca 14.0
./multi-company:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/multi-company.git
target: oca 14.0
merges:
- oca 14.0
./account-analytic:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/account-analytic.git
target: oca 14.0
merges:
- oca 14.0
./stock-logistics-reporting:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/stock-logistics-reporting.git
target: oca 14.0
merges:
- oca 14.0
./account-invoice-reporting:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/account-invoice-reporting.git
target: oca 14.0
merges:
- oca 14.0
./account-invoicing:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/account-invoicing.git
target: oca 14.0
merges:
- oca 14.0
./sale-reporting:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/sale-reporting.git
target: oca 14.0
merges:
- oca 14.0
./account-closing:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/account-closing.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/155/head
- oca refs/pull/174/head
./account-payment:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/account-payment.git
target: oca 14.0
merges:
- oca 14.0
./edi:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: 200
remotes:
oca: https://github.com/OCA/edi.git
target: oca 14.0
merges:
- oca 14.0
- oca refs/pull/326/head
#- oca refs/pull/334/head
./timesheet:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/timesheet.git
target: oca 14.0
merges:
- oca 14.0
./odoo-pim:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/odoo-pim.git
target: oca 14.0
merges:
- oca 14.0
./storage:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/storage.git
target: oca 14.0
merges:
- oca 14.0
./product-variant:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/product-variant.git
target: oca 14.0
merges:
- oca 14.0
./e-commerce:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/e-commerce.git
target: oca 14.0
merges:
- oca 14.0
./hr-expense:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/hr-expense.git
target: oca 14.0
merges:
- oca 14.0
./crm:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/crm.git
target: oca 14.0
merges:
- oca 14.0
./maintenance:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/maintenance.git
target: oca 14.0
merges:
- oca 14.0
./connector-telephony:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/connector-telephony.git
target: oca 14.0
merges:
- oca 14.0
./server-backend:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/server-backend.git
target: oca 14.0
merges:
- oca 14.0
./intrastat-extrastat:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/intrastat-extrastat.git
target: oca 14.0
merges:
- oca 14.0
./brand:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/brand.git
target: oca 14.0
merges:
- oca 14.0
./hr-holidays:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/hr-holidays.git
target: oca 14.0
merges:
- oca 14.0
./server-brand:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/server-brand.git
target: oca 14.0
merges:
- oca 14.0
./report-print-send:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/report-print-send.git
target: oca 14.0
merges:
- oca 14.0
./calendar:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/calendar.git
target: oca 14.0
merges:
- oca 14.0
./credit-control:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
oca: https://github.com/OCA/credit-control.git
target: oca 14.0
merges:
- oca 14.0
# ./myc-extra-addons:
# defaults:
# depth: $DEPTH_DEFAULT
# remotes:
# myc: ssh://git@git.myceliandre.fr:5022/njeudy/myc-extra-addons.git
# target: myc 14.0
# merges:
# - myc 14.0
./sync-addons:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/itpp-labs/sync-addons.git
target: origin 14.0
merges:
- origin 14.0
./vertical-cooperative:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://git.myceliandre.fr/njeudy/vertical-cooperative.git
target: origin 14.0-MIG-INITIAL
merges:
- origin 14.0-MIG-INITIAL
# ./nj-addons:
# defaults:
# depth: $DEPTH_DEFAULT
# remotes:
# origin: ssh://git@git.myceliandre.fr:5022/nj.0k.io/nj-addons.git
# target: origin 14.0
# merges:
# - origin 14.0
./vertical-association:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/OCA/vertical-association.git
target: origin 14.0
merges:
- origin 14.0
./account-move-import:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/akretion/account-move-import.git
target: origin 14.0
merges:
- origin 14.0
./galicea-addons:
defaults:
# Shallow repositories ($DEPTH_DEFAULT=1) are faster & thinner
# You may need a bigger depth when merging PRs (use $DEPTH_MERGE
# for a sane value of 100 commits)
depth: $DEPTH_DEFAULT
remotes:
origin: https://git.myceliandre.fr/Myceliandre/galicea-odoo-addons-ecosystem.git
target: origin 14.0
merges:
- origin 14.0
./openupgrade:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/OCA/OpenUpgrade.git
target: origin 14.0
merges:
- origin 14.0
./straga-main:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/straga/odoo_vik_main.git
target: origin 14.0
merges:
- origin 14.0
./odoo-usability:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/akretion/odoo-usability.git
target: origin 14.0
merges:
- origin 14.0
./odoo-py3o-report-templates:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/akretion/odoo-py3o-report-templates.git
target: origin 14.0
merges:
- origin 14.0
./project-tools:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/elabore-coop/project-tools.git
target: origin 14.0
merges:
- origin 14.0
./pad-tools:
defaults:
depth: $DEPTH_DEFAULT
remotes:
origin: https://github.com/elabore-coop/pad-tools.git
target: origin 14.0
merges:
- origin 14.0

38
custom/ssh/cloudron_git.rsa

@ -1,38 +0,0 @@
-----BEGIN OPENSSH PRIVATE KEY-----
b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABlwAAAAdzc2gtcn
NhAAAAAwEAAQAAAYEAxTkiUEpg2rhVAbvImkk2BAKIB13kCg2IyNuiUtPwKWBt4gyDzQwS
r9+lhDcTb41UmQFn0++dBCveRkTTafY+d23aWSHf+vfM470CSzdwothHIukNnPyRvWAwhO
Z7X51evA2hYuUvdeZex0Rqrwdxu1vrpCsPwHxiEAMm5Adc8ybqEiKBkoRv20PKas1WEl/m
RpSTadGUZVh0fJFp9gRFKmZMqXWm81hB5MpKL4OBd/APhRZfqNT0WOTIBHFKySjAqNi4H8
eUW3voi/ivSMTCv1MyybzSEHRLS8fzDa9zJ6uXp6/SOVSNyIT8oNqBBOG0Bk0w2y9E32lR
tnqmugVU40CSoIwf9LyCy3pSdqM1mM+sXTsd/tqY4Vo/H1m6U+zjEX1/9pLYbS0uDbdCAv
ChoxQg+HtCZ74wX2c+yFrVcDqqHNOqCbbPdjSNrdCFVdZtx2A2AuqALFnXskc1lJ4VTOsJ
Fr5QoAmLGRYN0lvqzNVUwI/BklmIYOgi8cvXf4PxAAAFkItx43GLceNxAAAAB3NzaC1yc2
EAAAGBAMU5IlBKYNq4VQG7yJpJNgQCiAdd5AoNiMjbolLT8ClgbeIMg80MEq/fpYQ3E2+N
VJkBZ9PvnQQr3kZE02n2Pndt2lkh3/r3zOO9Aks3cKLYRyLpDZz8kb1gMITme1+dXrwNoW
LlL3XmXsdEaq8Hcbtb66QrD8B8YhADJuQHXPMm6hIigZKEb9tDymrNVhJf5kaUk2nRlGVY
dHyRafYERSpmTKl1pvNYQeTKSi+DgXfwD4UWX6jU9FjkyARxSskowKjYuB/HlFt76Iv4r0
jEwr9TMsm80hB0S0vH8w2vcyerl6ev0jlUjciE/KDagQThtAZNMNsvRN9pUbZ6proFVONA
kqCMH/S8gst6UnajNZjPrF07Hf7amOFaPx9ZulPs4xF9f/aS2G0tLg23QgLwoaMUIPh7Qm
e+MF9nPsha1XA6qhzTqgm2z3Y0ja3QhVXWbcdgNgLqgCxZ17JHNZSeFUzrCRa+UKAJixkW
DdJb6szVVMCPwZJZiGDoIvHL13+D8QAAAAMBAAEAAAGAAxi7jjEsxiJgy08sfieqHnP/uM
Xjn7jIrgaszMohGjU2ZHc31o9a98H2MlY/CuBYNLLN84jumTMrIUVRYHeKUYu7Au1CPAmK
AQVltNKhBR2KOGUaXp2kmCmbeWq5Ay5QX3mDUC8zCJHeaRiM6ESgp4Vw9LnsXGRXkdLK2I
e5EORKhpBeInPL4dB1rCmfMViqH++TRPUSdGjoI1CRLliw0VKb34lGXsnC9xmqAob5EG4H
gFpylA8L6x1kepVgzDnEjYf9DEHwapmBrqFzamItaVX0/tCbz9Z+pJKPwbQUNiI685vpto
y+1N3ebPlQWIYVMe8nNJk5sHU1fHSvwaUy7LHC7rQS5M8+rPk9uJ6Gn4IzrMT+krUXhLQ7
ty8MeA2MJfkZPh2hewp+oBInocNrQY/YoEIG+GymWl5bWd14Oq1aD3c6kiQx4wFbS+UQ8v
K24PwXQMp44Js8tu8VzW8PkJDS0tWIBbj6vb3VLnLzPSi96RXcUwGNtB5nIkgKbdt1AAAA
wQDarcaxa7NIz5MkRgLhWNUPSZRh9upUnck6Ul7JnMYeVkNTm6u17V5sl1qj6Tk+EhZvHL
5pwMra8XtCDq5rtQuG6d7SQp0aB1uQEXioIkSzazaMO2KRxSJsLjXpSwPQn6dcrwEpDafX
1bgTocYA+fg54qv0F1luBOojv5XaUYY/6Qh0avdVohP00uS5vnNCSrFR4K/VB2Xgi3F0Io
dm7gdGwN/VCXpkydW3o8x/ka1vWQhAWdhfcd4ZNu10tya3m1wAAADBAPveoaaPPK0k4v5B
5TMXlnz939H7i+iwBx3cDgZNRX8c1nnhb+rjy/JQV0N01v6CUmAR4NgRe4SrCb7HgRcvAd
Ac2uYzT3f5/F3Gj/zETGM1cCrMox64BPqIPkMrQVtq58AdclJqqiqvbYYl5oycbkKd2CcF
dhMh5GAI2RTDMcxQzcM5EBGh9vWxUtCosNBsGMRm8jvUXg8fpNIduf6B+qU3pNen6otPPt
ydGZStR+iAkf4p8ny0OJQ+lTPMfimzvwAAAMEAyHUXp/60l7g6A1s5lOgvnUwJYlVk5MRl
QEfdAHVbIhqM+Vig4po1nk2zVf+VKtZe6JIalcrelHydohMgFIsMsfFOn/lhjuL+yUaeb9
ud0aJmP7MPOcf2uFv5iqN87Q893OHdkoZSak2SHWQm/Sho3tHKaM7OdQwOiwJqnzyPc8Dg
YD/JJWsqzNRCQ9BL7zuaf1+0gb5lBJGw95kBDg7rOuKQXdk7uWxQCZPXj3/xO2kk0t/cTa
cgHT4D/mOucfRPAAAAFm5qZXVkeUBERVNLVE9QLU1MUk5HOEQBAgME
-----END OPENSSH PRIVATE KEY-----

1
custom/ssh/cloudron_git.rsa.pub

@ -1 +0,0 @@
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDFOSJQSmDauFUBu8iaSTYEAogHXeQKDYjI26JS0/ApYG3iDIPNDBKv36WENxNvjVSZAWfT750EK95GRNNp9j53bdpZId/698zjvQJLN3Ci2Eci6Q2c/JG9YDCE5ntfnV68DaFi5S915l7HRGqvB3G7W+ukKw/AfGIQAybkB1zzJuoSIoGShG/bQ8pqzVYSX+ZGlJNp0ZRlWHR8kWn2BEUqZkypdabzWEHkykovg4F38A+FFl+o1PRY5MgEcUrJKMCo2Lgfx5Rbe+iL+K9IxMK/UzLJvNIQdEtLx/MNr3Mnq5enr9I5VI3IhPyg2oEE4bQGTTDbL0TfaVG2eqa6BVTjQJKgjB/0vILLelJ2ozWYz6xdOx3+2pjhWj8fWbpT7OMRfX/2kthtLS4Nt0IC8KGjFCD4e0JnvjBfZz7IWtVwOqoc06oJts92NI2t0IVV1m3HYDYC6oAsWdeyRzWUnhVM6wkWvlCgCYsZFg3SW+rM1VTAj8GSWYhg6CLxy9d/g/E= njeudy@DESKTOP-MLRNG8D

7
custom/ssh/config

@ -1,7 +0,0 @@
# See syntax in https://www.ssh.com/ssh/config/ and `man ssh_config`
Host *
IgnoreUnknown UseKeychain
UseKeychain yes
IdentityFile /root/.ssh/cloudron_git.rsa
PubkeyAcceptedKeyTypes=+ssh-dss
AddKeysToAgent yes

14
custom/ssh/known_hosts

@ -1,14 +0,0 @@
# Use `ssh-keyscan` to fill this file and ensure remote git hosts ssh keys
# bitbucket.org:22 SSH-2.0-conker_1.0.298-8c5a6f7 app-126
bitbucket.org ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAubiN81eDcafrgMeLzaFPsw2kNvEcqTKl/VqLat/MaB33pZy0y3rJZtnqwR2qOOvbwKZYKiEO1O6VqNEBxKvJJelCq0dTXWT5pbO2gDXC6h6QDXCaHo6pOHGPUy+YBaGQRGuSusMEASYiWunYN0vCAI8QaXnWMXNMdFP3jHAJH0eDsoiGnLPBlBp4TNm6rYI74nMzgz3B9IikW4WVK+dc8KZJZWYjAuORU3jc1c/NPskD2ASinf8v3xnfXeukU0sJ5N6m5E8VLjObPEO+mN2t/FZTMZLiFqPWc/ALSqnMnnhwrNi2rbfg/rd/IpL8Le3pSBne8+seeFVBoGqzHM9yXw==
# github.com:22 SSH-2.0-libssh-0.7.0
github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==
# gitlab.com:22 SSH-2.0-OpenSSH_7.2p2 Ubuntu-4ubuntu2.2
gitlab.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCsj2bNKTBSpIYDEGk9KxsGh3mySTRgMtXL583qmBpzeQ+jqCMRgBqB98u3z++J1sKlXHWfM9dyhSevkMwSbhoR8XIq/U0tCNyokEi/ueaBMCvbcTHhO7FcwzY92WK4Yt0aGROY5qX2UKSeOvuP4D6TPqKF1onrSzH9bx9XUf2lEdWT/ia1NEKjunUqu1xOB/StKDHMoX4/OKyIzuS0q/T1zOATthvasJFoPrAjkohTyaDUz2LN5JoH839hViyEG82yB+MjcFV5MU3N1l1QL3cVUCh93xSaua1N85qivl+siMkPGbO5xR/En4iEY6K2XPASUEMaieWVNTRCtJ4S8H+9
# gitlab.com:22 SSH-2.0-OpenSSH_7.2p2 Ubuntu-4ubuntu2.2
gitlab.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBFSMqzJeV9rUzU4kWitGjeR4PWSa29SPqJ1fVkhtj3Hw9xjLVXVYrU9QlYWrOLXBpQ6KWjbjTDTdDkoohFzgbEY=
# gitlab.com:22 SSH-2.0-OpenSSH_7.2p2 Ubuntu-4ubuntu2.2
gitlab.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAfuCHKVTjquxvt6CM6tdG4SLp1Btn/nOeHHE5UOzRdf

1
dev-scripts/README.md

@ -1 +0,0 @@
Scripts inside this folder are only used as reference during development, and have no use or effect in the docker image or containers.

11
dev-scripts/build-push-install.sh

@ -1,11 +0,0 @@
#!/bin/sh
VERSION=15.0
DOMAIN='<domain in cloudron to install this app>'
AUTHOR='<your name>'
docker build -t $AUTHOR/cloudron-odoo:$VERSION ./ && docker push $AUTHOR/cloudron-odoo:$VERSION
cloudron install --image $AUTHOR/cloudron-odoo:$VERSION -l $DOMAIN
cloudron logs -f --app $DOMAIN

11
dev-scripts/docker-run-openldap.sh

@ -1,11 +0,0 @@
#!/bin/bash -e
# Run OpenLDAP & phpLDAPadmin on the network named localnet
# Create a network, if not exists: `docker network create localnet`
docker run --name ldap-service --hostname ldap-service --network localnet --detach osixia/openldap:1.1.8
docker run -p 8091:443 --name phpldapadmin-service --hostname phpldapadmin-service --network localnet --env PHPLDAPADMIN_LDAP_HOSTS=ldap-service --detach osixia/phpldapadmin:0.9.0
echo "Go to: https://localhost:8091"
echo "Login DN: cn=admin,dc=example,dc=org"
echo "Password: admin"

25
dev-scripts/docker-run-postgres.sh

@ -1,25 +0,0 @@
#!/bin/sh
# Run `postgres` container named `postgres` in docker network `localnet`
# Create a network, if not exists: `docker network create localnet`
docker run --name postgres -d -p 5432:5432 --network localnet \
-e POSTGRES_USER=odoo_user \
-e POSTGRES_PASSWORD=odoo_password \
-e POSTGRES_DB=odoo \
postgres:latest
# Login to pg cli
PGPASSWORD=postgres psql -h 127.0.0.1 -U postgres -p 5432
# Create user called 'odoo_user'
CREATE ROLE odoo_user with LOGIN
\password odoo_user
# Enter a password, such as: odoo_password, which is an extremely bad password btw.
# Recreate database quickly.
drop database odoo;
create database odoo with encoding 'utf-8' owner odoo_user;
# Try logging in as odoo_user
PGPASSWORD=odoo_password psql -h 172.17.0.1 -p 5432 -U odoo_user -d odoo

11
dev-scripts/docker-run.sh

@ -1,11 +0,0 @@
#!/bin/sh
docker rm -f odoo_container && rm -rf ./.docker/* && mkdir -p ./.docker/run/nginx ./.docker/run/odoo ./.docker/app/data ./.docker/tmp && \
BUILDKIT_PROGRESS=plain docker build -t odoo_custom . && docker run --read-only \
-v "$(pwd)"/.docker/app/data:/app/data:rw \
-v "$(pwd)"/.docker/tmp:/tmp:rw \
-v "$(pwd)"/.docker/run:/run:rw \
-p 8000:8000 \
--network localnet \
--name odoo_container \
odoo_custom

30
dev-scripts/simulate-cloudron.sh

@ -1,30 +0,0 @@
if [[ -z "${CLOUDRON+x}" ]]; then
echo "Not Cloudron. Setting testing vars..."
export CLOUDRON_POSTGRESQL_PORT=5432
export CLOUDRON_POSTGRESQL_HOST=172.17.0.1
export CLOUDRON_POSTGRESQL_DATABASE=odootest
export CLOUDRON_POSTGRESQL_USERNAME=odoo_user
export CLOUDRON_POSTGRESQL_PASSWORD=odoo_password
export CLOUDRON_APP_DOMAIN=odoo.localhost
export CLOUDRON_APP_ORIGIN=https://odoo.localhost
export CLOUDRON_MAIL_SMTP_SERVER='localhost'
export CLOUDRON_MAIL_SMTP_PORT='25'
export CLOUDRON_MAIL_SMTP_USERNAME='username'
export CLOUDRON_MAIL_SMTP_PASSWORD='password'
export CLOUDRON_MAIL_FROM='from@localhost'
export CLOUDRON_MAIL_IMAP_SERVER='localhost'
export CLOUDRON_MAIL_IMAP_PORT='25'
export CLOUDRON_MAIL_IMAP_USERNAME='username'
export CLOUDRON_MAIL_IMAP_PASSWORD='password'
export CLOUDRON_LDAP_SERVER='172.18.0.1'
export CLOUDRON_LDAP_PORT='3002'
export CLOUDRON_LDAP_URL='ldap://172.18.0.1:3002'
export CLOUDRON_LDAP_USERS_BASE_DN='ou=users,dc=cloudron'
export CLOUDRON_LDAP_GROUPS_BASE_DN='ou=groups,dc=cloudron'
export CLOUDRON_LDAP_BIND_DN='cn=app_id,ou=apps,dc=cloudron'
export CLOUDRON_LDAP_BIND_PASSWORD='example_bind_password'
fi

173
lib/doodbalib/__init__.py

@ -1,173 +0,0 @@
# -*- coding: utf-8 -*-
import logging
import os
from glob import glob
from pprint import pformat
from subprocess import check_output
import yaml
# Constants needed in scripts
CUSTOM_DIR = "/app/code/custom"
AUTO_DIR = "/app/code/auto"
ADDONS_DIR = os.path.join(AUTO_DIR, "addons")
SRC_DIR = os.path.join(CUSTOM_DIR, "src")
ADDONS_YAML = os.path.join(SRC_DIR, "addons")
if os.path.isfile("%s.yaml" % ADDONS_YAML):
ADDONS_YAML = "%s.yaml" % ADDONS_YAML
else:
ADDONS_YAML = "%s.yml" % ADDONS_YAML
REPOS_YAML = os.path.join(SRC_DIR, "repos")
if os.path.isfile("%s.yaml" % REPOS_YAML):
REPOS_YAML = "%s.yaml" % REPOS_YAML
else:
REPOS_YAML = "%s.yml" % REPOS_YAML
AUTO_REPOS_YAML = os.path.join(AUTO_DIR, "repos")
if os.path.isfile("%s.yml" % AUTO_REPOS_YAML):
AUTO_REPOS_YAML = "%s.yml" % AUTO_REPOS_YAML
else:
AUTO_REPOS_YAML = "%s.yaml" % AUTO_REPOS_YAML
CLEAN = os.environ.get("CLEAN") == "true"
LOG_LEVELS = frozenset({"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"})
FILE_APT_BUILD = os.path.join(CUSTOM_DIR, "dependencies", "apt_build.txt")
PRIVATE = "private"
CORE = "odoo/addons"
ENTERPRISE = "enterprise"
PRIVATE_DIR = os.path.join(SRC_DIR, PRIVATE)
CORE_DIR = os.path.join(SRC_DIR, CORE)
ODOO_DIR = os.path.join(SRC_DIR, "odoo")
ODOO_VERSION = os.environ["ODOO_VERSION"]
MANIFESTS = ("__manifest__.py", "__openerp__.py")
# Customize logging for build
logger = logging.getLogger("doodba")
log_handler = logging.StreamHandler()
log_formatter = logging.Formatter("%(name)s %(levelname)s: %(message)s")
log_handler.setFormatter(log_formatter)
logger.addHandler(log_handler)
_log_level = os.environ.get("LOG_LEVEL", "")
if _log_level.isdigit():
_log_level = int(_log_level)
elif _log_level in LOG_LEVELS:
_log_level = getattr(logging, _log_level)
else:
if _log_level:
logger.warning("Wrong value in $LOG_LEVEL, falling back to INFO")
_log_level = logging.INFO
logger.setLevel(_log_level)
class AddonsConfigError(Exception):
def __init__(self, message, *args):
super(AddonsConfigError, self).__init__(message, *args)
self.message = message
def addons_config(filtered=True, strict=False):
"""Yield addon name and path from ``ADDONS_YAML``.
:param bool filtered:
Use ``False`` to include all addon definitions. Use ``True`` (default)
to include only those matched by ``ONLY`` clauses, if any.
:param bool strict:
Use ``True`` to raise an exception if any declared addon is not found.
:return Iterator[str, str]:
A generator that yields ``(addon, repo)`` pairs.
"""
config = dict()
missing_glob = set()
missing_manifest = set()
all_globs = {}
try:
with open(ADDONS_YAML) as addons_file:
for doc in yaml.safe_load_all(addons_file):
# Skip sections with ONLY and that don't match
only = doc.pop("ONLY", {})
if not filtered:
doc.setdefault(CORE, ["*"])
doc.setdefault(PRIVATE, ["*"])
elif any(
os.environ.get(key) not in values for key, values in only.items()
):
logger.debug("Skipping section with ONLY %s", only)
continue
# Flatten all sections in a single dict
for repo, partial_globs in doc.items():
if repo == "ENV":
continue
logger.debug("Processing %s repo", repo)
all_globs.setdefault(repo, set())
all_globs[repo].update(partial_globs)
except IOError:
logger.debug("Could not find addons configuration yaml.")
# Add default values for special sections
for repo in (CORE, PRIVATE):
all_globs.setdefault(repo, {"*"})
logger.debug("Merged addons definition before expanding: %r", all_globs)
# Expand all globs and store config
for repo, partial_globs in all_globs.items():
for partial_glob in partial_globs:
logger.debug("Expanding in repo %s glob %s", repo, partial_glob)
full_glob = os.path.join(SRC_DIR, repo, partial_glob)
found = glob(full_glob)
if not found:
# Projects without private addons should never fail
if (repo, partial_glob) != (PRIVATE, "*"):
missing_glob.add(full_glob)
logger.debug("Skipping unexpandable glob '%s'", full_glob)
continue
for addon in found:
if not os.path.isdir(addon):
continue
manifests = (os.path.join(addon, m) for m in MANIFESTS)
if not any(os.path.isfile(m) for m in manifests):
missing_manifest.add(addon)
logger.debug(
"Skipping '%s' as it is not a valid Odoo " "module", addon
)
continue
logger.debug("Registering addon %s", addon)
addon = os.path.basename(addon)
config.setdefault(addon, set())
config[addon].add(repo)
# Fail now if running in strict mode
if strict:
error = []
if missing_glob:
error += ["Addons not found:", pformat(missing_glob)]
if missing_manifest:
error += ["Addons without manifest:", pformat(missing_manifest)]
if error:
raise AddonsConfigError("\n".join(error), missing_glob, missing_manifest)
logger.debug("Resulting configuration after expanding: %r", config)
for addon, repos in config.items():
# Private addons are most important
if PRIVATE in repos:
yield addon, PRIVATE
continue
# Odoo core addons are least important
if repos == {CORE}:
yield addon, CORE
continue
repos.discard(CORE)
# Other addons fall in between
if filtered and len(repos) != 1:
raise AddonsConfigError(
u"Addon {} defined in several repos {}".format(addon, repos)
)
for repo in repos:
yield addon, repo
try:
from shutil import which
except ImportError:
# Custom which implementation for Python 2
def which(binary):
return check_output(["which", binary]).strip()

119
lib/doodbalib/installer.py

@ -1,119 +0,0 @@
# -*- coding: utf-8 -*-
from collections import OrderedDict
from os.path import exists
from subprocess import check_call
from doodbalib import logger
class Installer(object):
"""Base class to install packages with some package system."""
_cleanup_commands = []
_install_command = None
_remove_command = None
def __init__(self, file_path):
self.file_path = file_path
self._requirements = self.requirements()
def _run_command(self, command):
logger.info("Executing: %s", command)
return check_call(command, shell=isinstance(command, str))
def cleanup(self):
"""Remove cache and other garbage produced by the installer engine."""
for command in self._cleanup_commands:
self._run_command(command)
def install(self):
"""Install the requirements from the given file."""
if self._requirements:
return not self._run_command(self._install_command + self._requirements)
else:
logger.info("No installable requirements found in %s", self.file_path)
return False
def remove(self):
"""Uninstall the requirements from the given file."""
if not self._remove_command:
return
if self._requirements:
self._run_command(self._remove_command + self._requirements)
else:
logger.info("No removable requirements found in %s", self.file_path)
def requirements(self):
"""Get a list of requirements from the given file."""
requirements = []
try:
with open(self.file_path, "r") as fh:
for line in fh:
line = line.strip()
if not line or line.startswith("#"):
continue
requirements += line.split()
except IOError:
# No requirements file
pass
return requirements
class AptInstaller(Installer):
_cleanup_commands = [["apt-get", "-y", "autoremove"], "rm -Rf /var/lib/apt/lists/*"]
_install_command = [
"apt-get",
"-o",
"Dpkg::Options::=--force-confdef",
"-o",
"Dpkg::Options::=--force-confold",
"-y",
"--no-install-recommends",
"install",
]
_remove_command = ["apt-get", "purge", "-y"]
def _dirty(self):
return exists("/var/lib/apt/lists/lock")
def cleanup(self):
if self._dirty():
super(AptInstaller, self).cleanup()
def install(self):
if not self._dirty() and self._requirements:
self._run_command(["apt-get", "update"])
return super(AptInstaller, self).install()
class GemInstaller(Installer):
_cleanup_commands = ["rm -Rf ~/.gem /var/lib/gems/*/cache/"]
_install_command = ["gem", "install", "--no-document", "--no-update-sources"]
class NpmInstaller(Installer):
_cleanup_commands = ["rm -Rf ~/.npm /tmp/*"]
_install_command = ["npm", "install", "-g"]
class PipInstaller(Installer):
_install_command = ["pip", "install", "--no-cache-dir", "-r"]
def requirements(self):
"""Pip will use its ``--requirements`` feature."""
return [self.file_path] if exists(self.file_path) else []
INSTALLERS = OrderedDict(
[
("apt", AptInstaller),
("gem", GemInstaller),
("npm", NpmInstaller),
("pip", PipInstaller),
]
)
def install(installer, file_path):
"""Perform a given type of installation from a given file."""
return INSTALLERS[installer](file_path).install()

BIN
logo.png

After

Width: 500  |  Height: 500  |  Size: 41 KiB

2
manifest/CHANGELOG.md

@ -1,2 +0,0 @@
v15.0
First release of Odoo 15.0.

12
manifest/DESCRIPTION.md

@ -1,12 +0,0 @@
Run Odoo on Cloudron.
Features
---
- Uses Cloudron LDAP for user federation
- Uses Cloudron SMTP for sending emails
- Uses Cloudron IMAP for incoming emails
- Hardened to disable database selection, debug mode, etc.
- Supports custom addons installed at `/app/data/extra-addons`
- Supports customization using `/app/data/odoo.conf`
- Supports long-polling actions like chat using custom `/run/nginx/nginx.conf` file

6
manifest/POSTINSTALL.md

@ -1,6 +0,0 @@
Log in with the default credentials and change your password immediately by clicking on the profile
(**Administrator**) button in the top right corner of the screen.
Email : `admin`
Password: `admin`

BIN
manifest/logo.png

Before

Width: 256  |  Height: 256  |  Size: 7.8 KiB

93
nginx.conf

@ -1,93 +0,0 @@
# Based on https://git.cloudron.io/cloudron/taiga-app/-/raw/master/nginx.conf
daemon off;
worker_processes auto;
pid /run/nginx.pid;
error_log stderr;
events {
worker_connections 768;
}
http {
sendfile on;
tcp_nopush on;
tcp_nodelay on;
keepalive_timeout 65;
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
client_body_temp_path /run/nginx/client_body;
proxy_temp_path /run/nginx/proxy_temp;
fastcgi_temp_path /run/nginx/fastcgi_temp;
scgi_temp_path /run/nginx/scgi_temp;
uwsgi_temp_path /run/nginx/uwsgi_temp;
##
# Logging Settings
##
access_log /dev/stdout;
##
# Gzip Settings
##
gzip on;
gzip_disable "msie6";
##
# Virtual Host Configs
##
#odoo server
upstream odoo {
server 127.0.0.1:8069;
}
upstream odoo-lp {
server 127.0.0.1:8072;
}
server {
listen 8000 default_server;
include /app/data/nginx-custom-locations.conf;
proxy_read_timeout 720s;
proxy_connect_timeout 720s;
proxy_send_timeout 720s;
large_client_header_buffers 4 32k;
client_max_body_size 50M;
charset utf-8;
# Get real IP from Cloudron nginx
set_real_ip_from 172.18.0.0/16;
real_ip_header X-Forwarded-For;
real_ip_recursive on;
# Add Headers for odoo proxy mode
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $realip_remote_addr;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Host $host;
# Redirect longpoll requests to odoo longpolling port
location /longpolling {
proxy_pass http://odoo-lp;
}
# Redirect requests to odoo backend server
location / {
proxy_redirect off;
proxy_pass http://odoo;
}
# common gzip
gzip_types text/css text/scss text/plain text/xml application/xml application/json application/javascript;
gzip on;
}
}

3
odoo.conf.sample

@ -1,3 +0,0 @@
[options]
addons_path = /app/code/addons,/app/data/extra-addons
data_dir = /app/data/odoo

271
odoo12CE_install.sh

@ -0,0 +1,271 @@
#!/bin/bash
# modified for cloudron - Samir Saidani
################################################################################
# Script for installing Odoo on Ubuntu 14.04, 15.04, 16.04 and 18.04 (could be used for other version too)
# Author: Yenthe Van Ginneken
#-------------------------------------------------------------------------------
# This script will install Odoo on your Ubuntu 16.04 server. It can install multiple Odoo instances
# in one Ubuntu because of the different xmlrpc_ports
#-------------------------------------------------------------------------------
# Make a new file:
# sudo nano odoo-install.sh
# Place this content in it and then make the file executable:
# sudo chmod +x odoo-install.sh
# Execute the script to install Odoo:
# ./odoo-install
################################################################################
OE_USER="odoo"
OE_HOME="/app/code"
OE_HOME_EXT="$OE_HOME/${OE_USER}-server"
# The default port where this Odoo instance will run under (provided you use the command -c in the terminal)
# Set to true if you want to install it, false if you don't need it or have it already installed.
INSTALL_WKHTMLTOPDF="False"
# Set the default Odoo port (you still have to use -c /etc/odoo-server.conf for example to use this.)
OE_PORT="8069"
# Choose the Odoo version which you want to install. For example: 12.0, 11.0, 10.0 or saas-18. When using 'master' the master version will be installed.
# IMPORTANT! This script contains extra libraries that are specifically needed for Odoo 12.0
OE_VERSION="12.0"
# Set this to True if you want to install the Odoo enterprise version!
IS_ENTERPRISE="False"
# set the superadmin password
OE_SUPERADMIN="admin"
OE_CONFIG="${OE_USER}"
##
### WKHTMLTOPDF download links
## === Ubuntu Trusty x64 & x32 === (for other distributions please replace these two links,
## in order to have correct version of wkhtmltopdf installed, for a danger note refer to
## https://github.com/odoo/odoo/wiki/Wkhtmltopdf ):
WKHTMLTOX_X64=https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.trusty_amd64.deb
WKHTMLTOX_X32=https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox_0.12.5-1.trusty_i386.deb
#--------------------------------------------------
# Update Server
#--------------------------------------------------
echo -e "\n---- Update Server ----"
# add-apt-repository can install add-apt-repository Ubuntu 18.x
sudo apt-get install software-properties-common -y
# universe package is for Ubuntu 18.x
sudo add-apt-repository universe
# libpng12-0 dependency for wkhtmltopdf
sudo add-apt-repository "deb http://mirrors.kernel.org/ubuntu/ xenial main"
sudo apt-get update
sudo apt-mark hold postfix phpmyadmin
sudo apt-get upgrade -y
#--------------------------------------------------
# Install PostgreSQL Server
#--------------------------------------------------
#echo -e "\n---- Install PostgreSQL Server ----"
#sudo apt-get install postgresql -y
echo -e "\n---- Creating the ODOO PostgreSQL User ----"
sudo su - postgres -c "createuser -s $OE_USER" 2> /dev/null || true
#--------------------------------------------------
# Install Dependencies
#--------------------------------------------------
echo -e "\n--- Installing Python 3 + pip3 --"
sudo apt-get install git python3 python3-pip build-essential wget python3-dev python3-venv python3-wheel libxslt-dev libzip-dev libldap2-dev libsasl2-dev python3-setuptools node-less libpng12-0 gdebi -y
sudo apt-get install python3-pypdf2 -y
echo -e "\n---- Upgrade pip ----"
pip install --upgrade pip
echo -e "\n---- Install python packages/requirements ----"
sudo pip3 install -r https://github.com/odoo/odoo/raw/${OE_VERSION}/requirements.txt
echo -e "\n---- Installing nodeJS NPM and rtlcss for LTR support ----"
sudo apt-get install nodejs npm -y
#sudo npm install -g rtlcss
echo -e "\n---- Installing modules necessary for mysqldb ----"
sudo pip3 install pyserial
#--------------------------------------------------
# Install Wkhtmltopdf if needed
#--------------------------------------------------
if [ $INSTALL_WKHTMLTOPDF = "True" ]; then
echo -e "\n---- Install wkhtml and place shortcuts on correct place for ODOO 12 ----"
#pick up correct one from x64 & x32 versions:
if [ "`getconf LONG_BIT`" == "64" ];then
_url=$WKHTMLTOX_X64
else
_url=$WKHTMLTOX_X32
fi
sudo wget $_url
sudo apt-get install gdebi-core -y
sudo gdebi --n `basename $_url`
sudo ln -s /usr/local/bin/wkhtmltopdf /usr/bin
sudo ln -s /usr/local/bin/wkhtmltoimage /usr/bin
else
echo "Wkhtmltopdf isn't installed due to the choice of the user!"
fi
echo -e "\n---- Create ODOO system user ----"
sudo adduser --system --quiet --shell=/bin/bash --home=$OE_HOME --gecos 'ODOO' --group $OE_USER
#The user should also be added to the sudo'ers group.
sudo adduser $OE_USER sudo
sudo chown -R $OE_USER:$OE_USER /app/code
sudo chown -R $OE_USER:$OE_USER /app/data
echo -e "\n---- Create Log directory ----"
sudo mkdir /var/log/$OE_USER
sudo chown $OE_USER:$OE_USER /var/log/$OE_USER
#--------------------------------------------------
# Install ODOO
#--------------------------------------------------
echo -e "\n==== Installing ODOO Server ===="
sudo git clone --depth 1 --branch $OE_VERSION https://www.github.com/odoo/odoo $OE_HOME_EXT/
if [ $IS_ENTERPRISE = "True" ]; then
# Odoo Enterprise install!
echo -e "\n--- Create symlink for node"
sudo ln -s /usr/bin/nodejs /usr/bin/node
sudo su $OE_USER -c "mkdir $OE_HOME/enterprise"
sudo su $OE_USER -c "mkdir $OE_HOME/enterprise/addons"
GITHUB_RESPONSE=$(sudo git clone --depth 1 --branch $OE_VERSION https://www.github.com/odoo/enterprise "$OE_HOME/enterprise/addons" 2>&1)
while [[ $GITHUB_RESPONSE == *"Authentication"* ]]; do
echo "------------------------WARNING------------------------------"
echo "Your authentication with Github has failed! Please try again."
printf "In order to clone and install the Odoo enterprise version you \nneed to be an offical Odoo partner and you need access to\nhttp://github.com/odoo/enterprise.\n"
echo "TIP: Press ctrl+c to stop this script."
echo "-------------------------------------------------------------"
echo " "
GITHUB_RESPONSE=$(sudo git clone --depth 1 --branch $OE_VERSION https://www.github.com/odoo/enterprise "$OE_HOME/enterprise/addons" 2>&1)
done
echo -e "\n---- Added Enterprise code under $OE_HOME/enterprise/addons ----"
echo -e "\n---- Installing Enterprise specific libraries ----"
sudo pip3 install num2words ofxparse
sudo npm install -g less
sudo npm install -g less-plugin-clean-css
fi
echo -e "\n---- Create custom module directory ----"
# sudo su $OE_USER -c "mkdir $OE_HOME/custom"
# sudo su $OE_USER -c "mkdir $OE_HOME/custom/addons"
sudo su $OE_USER -c "mkdir $OE_HOME/extra-addons"
echo -e "\n---- Setting permissions on home folder ----"
sudo chown -R $OE_USER:$OE_USER $OE_HOME/*
echo -e "* Create server config file"
sudo touch /app/data/${OE_CONFIG}.conf
sudo chown $OE_USER:$OE_USER /app/data/${OE_CONFIG}.conf
sudo chmod 640 /app/data/${OE_CONFIG}.conf
echo -e "* Creating server config file"
sudo su root -c "printf '[options] \n; This is the password that allows database operations:\n' >> /app/data/${OE_CONFIG}.conf"
sudo su root -c "printf 'admin_passwd = ${OE_SUPERADMIN}\n' >> /app/data/${OE_CONFIG}.conf"
sudo su root -c "printf 'xmlrpc_port = ${OE_PORT}\n' >> /app/data/${OE_CONFIG}.conf"
sudo su root -c "printf 'logfile =/var/log/${OE_USER}/${OE_CONFIG}.log\n' >> /app/data/${OE_CONFIG}.conf"
if [ $IS_ENTERPRISE = "True" ]; then
sudo su root -c "printf 'addons_path=${OE_HOME}/enterprise/addons,${OE_HOME_EXT}/addons\n' >> /app/data/${OE_CONFIG}.conf"
else
sudo su root -c "printf 'addons_path=${OE_HOME_EXT}/addons,${OE_HOME}/extra-addons\n' >> /app/data/${OE_CONFIG}.conf"
fi
echo -e "* Create startup file"
sudo su root -c "echo '#!/bin/sh' >> $OE_HOME_EXT/start-odoo.sh"
sudo su root -c "echo 'sudo -u $OE_USER $OE_HOME_EXT/odoo-bin --config=/app/data/${OE_CONFIG}.conf' >> $OE_HOME_EXT/start-odoo.sh"
sudo chmod 755 $OE_HOME_EXT/start-odoo.sh
#--------------------------------------------------
# Adding ODOO as a deamon (initscript)
#--------------------------------------------------
echo -e "* Create init file"
cat <<EOF > ~/$OE_CONFIG
#!/bin/sh
### BEGIN INIT INFO
# Provides: $OE_CONFIG
# Required-Start: \$remote_fs \$syslog
# Required-Stop: \$remote_fs \$syslog
# Should-Start: \$network
# Should-Stop: \$network
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Enterprise Business Applications
# Description: ODOO Business Applications
### END INIT INFO
PATH=/bin:/sbin:/usr/bin
DAEMON=$OE_HOME_EXT/odoo-bin
NAME=$OE_CONFIG
DESC=$OE_CONFIG
# Specify the user name (Default: odoo).
USER=$OE_USER
# Specify an alternate config file (Default: /etc/openerp-server.conf).
CONFIGFILE="/app/data/${OE_CONFIG}.conf"
# pidfile
PIDFILE=/var/run/\${NAME}.pid
# Additional options that are passed to the Daemon.
DAEMON_OPTS="-c \$CONFIGFILE"
[ -x \$DAEMON ] || exit 0
[ -f \$CONFIGFILE ] || exit 0
checkpid() {
[ -f \$PIDFILE ] || return 1
pid=\`cat \$PIDFILE\`
[ -d /proc/\$pid ] && return 0
return 1
}
case "\${1}" in
start)
echo -n "Starting \${DESC}: "
start-stop-daemon --start --quiet --pidfile \$PIDFILE \
--chuid \$USER --background --make-pidfile \
--exec \$DAEMON -- \$DAEMON_OPTS
echo "\${NAME}."
;;
stop)
echo -n "Stopping \${DESC}: "
start-stop-daemon --stop --quiet --pidfile \$PIDFILE \
--oknodo
echo "\${NAME}."
;;
restart|force-reload)
echo -n "Restarting \${DESC}: "
start-stop-daemon --stop --quiet --pidfile \$PIDFILE \
--oknodo
sleep 1
start-stop-daemon --start --quiet --pidfile \$PIDFILE \
--chuid \$USER --background --make-pidfile \
--exec \$DAEMON -- \$DAEMON_OPTS
echo "\${NAME}."
;;
*)
N=/etc/init.d/\$NAME
echo "Usage: \$NAME {start|stop|restart|force-reload}" >&2
exit 1
;;
esac
exit 0
EOF
echo -e "* Security Init File"
sudo mv ~/$OE_CONFIG /etc/init.d/$OE_CONFIG
sudo chmod 755 /etc/init.d/$OE_CONFIG
sudo chown root: /etc/init.d/$OE_CONFIG
echo -e "* Start ODOO on Startup"
sudo update-rc.d $OE_CONFIG defaults
echo -e "* Starting Odoo Service"
sudo su root -c "/etc/init.d/$OE_CONFIG start"
echo "-----------------------------------------------------------"
echo "Done! The Odoo server is up and running. Specifications:"
echo "Port: $OE_PORT"
echo "User service: $OE_USER"
echo "User PostgreSQL: $OE_USER"
echo "Code location: $OE_USER"
echo "Addons folder: $OE_HOME/addons/"
echo "Start Odoo service: sudo service $OE_CONFIG start"
echo "Stop Odoo service: sudo service $OE_CONFIG stop"
echo "Restart Odoo service: sudo service $OE_CONFIG restart"
echo "-----------------------------------------------------------"

693
sql_db.py

@ -0,0 +1,693 @@
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
"""
The PostgreSQL connector is a connectivity layer between the OpenERP code and
the database, *not* a database abstraction toolkit. Database abstraction is what
the ORM does, in fact.
"""
from contextlib import contextmanager
from functools import wraps
import logging
import time
import urllib.parse
import uuid
import psycopg2
import psycopg2.extras
import psycopg2.extensions
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ
from psycopg2.pool import PoolError
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
_logger = logging.getLogger(__name__)
types_mapping = {
'date': (1082,),
'time': (1083,),
'datetime': (1114,),
}
def unbuffer(symb, cr):
if symb is None:
return None
return str(symb)
def undecimalize(symb, cr):
if symb is None:
return None
return float(symb)
for name, typeoid in types_mapping.items():
psycopg2.extensions.register_type(psycopg2.extensions.new_type(typeoid, name, lambda x, cr: x))
psycopg2.extensions.register_type(psycopg2.extensions.new_type((700, 701, 1700,), 'float', undecimalize))
import tools
from tools.func import frame_codeinfo
from datetime import timedelta
import threading
from inspect import currentframe
import re
re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$')
re_into = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$')
sql_counter = 0
class Cursor(object):
"""Represents an open transaction to the PostgreSQL DB backend,
acting as a lightweight wrapper around psycopg2's
``cursor`` objects.
``Cursor`` is the object behind the ``cr`` variable used all
over the OpenERP code.
.. rubric:: Transaction Isolation
One very important property of database transactions is the
level of isolation between concurrent transactions.
The SQL standard defines four levels of transaction isolation,
ranging from the most strict *Serializable* level, to the least
strict *Read Uncommitted* level. These levels are defined in
terms of the phenomena that must not occur between concurrent
transactions, such as *dirty read*, etc.
In the context of a generic business data management software
such as OpenERP, we need the best guarantees that no data
corruption can ever be cause by simply running multiple
transactions in parallel. Therefore, the preferred level would
be the *serializable* level, which ensures that a set of
transactions is guaranteed to produce the same effect as
running them one at a time in some order.
However, most database management systems implement a limited
serializable isolation in the form of
`snapshot isolation <http://en.wikipedia.org/wiki/Snapshot_isolation>`_,
providing most of the same advantages as True Serializability,
with a fraction of the performance cost.
With PostgreSQL up to version 9.0, this snapshot isolation was
the implementation of both the ``REPEATABLE READ`` and
``SERIALIZABLE`` levels of the SQL standard.
As of PostgreSQL 9.1, the previous snapshot isolation implementation
was kept for ``REPEATABLE READ``, while a new ``SERIALIZABLE``
level was introduced, providing some additional heuristics to
detect a concurrent update by parallel transactions, and forcing
one of them to rollback.
OpenERP implements its own level of locking protection
for transactions that are highly likely to provoke concurrent
updates, such as stock reservations or document sequences updates.
Therefore we mostly care about the properties of snapshot isolation,
but we don't really need additional heuristics to trigger transaction
rollbacks, as we are taking care of triggering instant rollbacks
ourselves when it matters (and we can save the additional performance
hit of these heuristics).
As a result of the above, we have selected ``REPEATABLE READ`` as
the default transaction isolation level for OpenERP cursors, as
it will be mapped to the desired ``snapshot isolation`` level for
all supported PostgreSQL version (8.3 - 9.x).
Note: up to psycopg2 v.2.4.2, psycopg2 itself remapped the repeatable
read level to serializable before sending it to the database, so it would
actually select the new serializable mode on PostgreSQL 9.1. Make
sure you use psycopg2 v2.4.2 or newer if you use PostgreSQL 9.1 and
the performance hit is a concern for you.
.. attribute:: cache
Cache dictionary with a "request" (-ish) lifecycle, only lives as
long as the cursor itself does and proactively cleared when the
cursor is closed.
This cache should *only* be used to store repeatable reads as it
ignores rollbacks and savepoints, it should not be used to store
*any* data which may be modified during the life of the cursor.
"""
IN_MAX = 1000 # decent limit on size of IN queries - guideline = Oracle limit
def check(f):
@wraps(f)
def wrapper(self, *args, **kwargs):
if self._closed:
msg = 'Unable to use a closed cursor.'
if self.__closer:
msg += ' It was closed at %s, line %s' % self.__closer
raise psycopg2.OperationalError(msg)
return f(self, *args, **kwargs)
return wrapper
def __init__(self, pool, dbname, dsn, serialized=True):
self.sql_from_log = {}
self.sql_into_log = {}
# default log level determined at cursor creation, could be
# overridden later for debugging purposes
self.sql_log = _logger.isEnabledFor(logging.DEBUG)
self.sql_log_count = 0
# avoid the call of close() (by __del__) if an exception
# is raised by any of the following initialisations
self._closed = True
self.__pool = pool
self.dbname = dbname
# Whether to enable snapshot isolation level for this cursor.
# see also the docstring of Cursor.
self._serialized = serialized
self._cnx = pool.borrow(dsn)
self._obj = self._cnx.cursor()
if self.sql_log:
self.__caller = frame_codeinfo(currentframe(), 2)
else:
self.__caller = False
self._closed = False # real initialisation value
self.autocommit(False)
self.__closer = False
self._default_log_exceptions = True
self.cache = {}
# event handlers, see method after() below
self._event_handlers = {'commit': [], 'rollback': []}
def __build_dict(self, row):
return {d.name: row[i] for i, d in enumerate(self._obj.description)}
def dictfetchone(self):
row = self._obj.fetchone()
return row and self.__build_dict(row)
def dictfetchmany(self, size):
return map(self.__build_dict, self._obj.fetchmany(size))
def dictfetchall(self):
return map(self.__build_dict, self._obj.fetchall())
def __del__(self):
if not self._closed and not self._cnx.closed:
# Oops. 'self' has not been closed explicitly.
# The cursor will be deleted by the garbage collector,
# but the database connection is not put back into the connection
# pool, preventing some operation on the database like dropping it.
# This can also lead to a server overload.
msg = "Cursor not closed explicitly\n"
if self.__caller:
msg += "Cursor was created at %s:%s" % self.__caller
else:
msg += "Please enable sql debugging to trace the caller."
_logger.warning(msg)
self._close(True)
@check
def execute(self, query, params=None, log_exceptions=None):
if params and not isinstance(params, (tuple, list, dict)):
# psycopg2's TypeError is not clear if you mess up the params
raise ValueError("SQL query parameters should be a tuple, list or dict; got %r" % (params,))
if self.sql_log:
now = time.time()
_logger.debug("query: %s", query)
try:
params = params or None
res = self._obj.execute(query, params)
except Exception:
if self._default_log_exceptions if log_exceptions is None else log_exceptions:
_logger.info("bad query: %s", self._obj.query or query)
raise
# simple query count is always computed
self.sql_log_count += 1
# advanced stats only if sql_log is enabled
if self.sql_log:
delay = (time.time() - now) * 1E6
res_from = re_from.match(query.lower())
if res_from:
self.sql_from_log.setdefault(res_from.group(1), [0, 0])
self.sql_from_log[res_from.group(1)][0] += 1
self.sql_from_log[res_from.group(1)][1] += delay
res_into = re_into.match(query.lower())
if res_into:
self.sql_into_log.setdefault(res_into.group(1), [0, 0])
self.sql_into_log[res_into.group(1)][0] += 1
self.sql_into_log[res_into.group(1)][1] += delay
return res
def split_for_in_conditions(self, ids, size=None):
"""Split a list of identifiers into one or more smaller tuples
safe for IN conditions, after uniquifying them."""
return tools.misc.split_every(size or self.IN_MAX, ids)
def print_log(self):
global sql_counter
if not self.sql_log:
return
def process(type):
sqllogs = {'from': self.sql_from_log, 'into': self.sql_into_log}
sum = 0
if sqllogs[type]:
sqllogitems = sqllogs[type].items()
sqllogitems.sort(key=lambda k: k[1][1])
_logger.debug("SQL LOG %s:", type)
sqllogitems.sort(lambda x, y: cmp(x[1][0], y[1][0]))
for r in sqllogitems:
delay = timedelta(microseconds=r[1][1])
_logger.debug("table: %s: %s/%s", r[0], delay, r[1][0])
sum += r[1][1]
sqllogs[type].clear()
sum = timedelta(microseconds=sum)
_logger.debug("SUM %s:%s/%d [%d]", type, sum, self.sql_log_count, sql_counter)
sqllogs[type].clear()
process('from')
process('into')
self.sql_log_count = 0
self.sql_log = False
@check
def close(self):
return self._close(False)
def _close(self, leak=False):
global sql_counter
if not self._obj:
return
del self.cache
if self.sql_log:
self.__closer = frame_codeinfo(currentframe(), 3)
# simple query count is always computed
sql_counter += self.sql_log_count
# advanced stats only if sql_log is enabled
self.print_log()
self._obj.close()
# This force the cursor to be freed, and thus, available again. It is
# important because otherwise we can overload the server very easily
# because of a cursor shortage (because cursors are not garbage
# collected as fast as they should). The problem is probably due in
# part because browse records keep a reference to the cursor.
del self._obj
self._closed = True
# Clean the underlying connection.
self._cnx.rollback()
if leak:
self._cnx.leaked = True
else:
chosen_template = tools.config['db_template']
templates_list = tuple(set(['template0', 'template1', 'postgres', chosen_template]))
keep_in_pool = self.dbname not in templates_list
self.__pool.give_back(self._cnx, keep_in_pool=keep_in_pool)
@check
def autocommit(self, on):
if on:
isolation_level = ISOLATION_LEVEL_AUTOCOMMIT
else:
# If a serializable cursor was requested, we
# use the appropriate PotsgreSQL isolation level
# that maps to snaphsot isolation.
# For all supported PostgreSQL versions (8.3-9.x),
# this is currently the ISOLATION_REPEATABLE_READ.
# See also the docstring of this class.
# NOTE: up to psycopg 2.4.2, repeatable read
# is remapped to serializable before being
# sent to the database, so it is in fact
# unavailable for use with pg 9.1.
isolation_level = \
ISOLATION_LEVEL_REPEATABLE_READ \
if self._serialized \
else ISOLATION_LEVEL_READ_COMMITTED
self._cnx.set_isolation_level(isolation_level)
@check
def after(self, event, func):
""" Register an event handler.
:param event: the event, either `'commit'` or `'rollback'`
:param func: a callable object, called with no argument after the
event occurs
Be careful when coding an event handler, since any operation on the
cursor that was just committed/rolled back will take place in the
next transaction that has already begun, and may still be rolled
back or committed independently. You may consider the use of a
dedicated temporary cursor to do some database operation.
"""
self._event_handlers[event].append(func)
def _pop_event_handlers(self):
# return the current handlers, and reset them on self
result = self._event_handlers
self._event_handlers = {'commit': [], 'rollback': []}
return result
@check
def commit(self):
""" Perform an SQL `COMMIT`
"""
result = self._cnx.commit()
for func in self._pop_event_handlers()['commit']:
func()
return result
@check
def rollback(self):
""" Perform an SQL `ROLLBACK`
"""
result = self._cnx.rollback()
for func in self._pop_event_handlers()['rollback']:
func()
return result
def __enter__(self):
""" Using the cursor as a contextmanager automatically commits and
closes it::
with cr:
cr.execute(...)
# cr is committed if no failure occurred
# cr is closed in any case
"""
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.commit()
self.close()
@contextmanager
@check
def savepoint(self):
"""context manager entering in a new savepoint"""
name = uuid.uuid1().hex
self.execute('SAVEPOINT "%s"' % name)
try:
yield
except Exception:
self.execute('ROLLBACK TO SAVEPOINT "%s"' % name)
raise
else:
self.execute('RELEASE SAVEPOINT "%s"' % name)
@check
def __getattr__(self, name):
return getattr(self._obj, name)
@property
def closed(self):
return self._closed
class TestCursor(Cursor):
""" A cursor to be used for tests. It keeps the transaction open across
several requests, and simulates committing, rolling back, and closing.
"""
def __init__(self, *args, **kwargs):
super(TestCursor, self).__init__(*args, **kwargs)
# in order to simulate commit and rollback, the cursor maintains a
# savepoint at its last commit
self.execute("SAVEPOINT test_cursor")
# we use a lock to serialize concurrent requests
self._lock = threading.RLock()
def acquire(self):
self._lock.acquire()
def release(self):
self._lock.release()
def force_close(self):
super(TestCursor, self).close()
def close(self):
if not self._closed:
self.rollback() # for stuff that has not been committed
self.release()
def autocommit(self, on):
_logger.debug("TestCursor.autocommit(%r) does nothing", on)
def commit(self):
self.execute("RELEASE SAVEPOINT test_cursor")
self.execute("SAVEPOINT test_cursor")
def rollback(self):
self.execute("ROLLBACK TO SAVEPOINT test_cursor")
self.execute("SAVEPOINT test_cursor")
class LazyCursor(object):
""" A proxy object to a cursor. The cursor itself is allocated only if it is
needed. This class is useful for cached methods, that use the cursor
only in the case of a cache miss.
"""
def __init__(self, dbname=None):
self._dbname = dbname
self._cursor = None
self._depth = 0
@property
def dbname(self):
return self._dbname or threading.currentThread().dbname
def __getattr__(self, name):
cr = self._cursor
if cr is None:
from odoo import registry
cr = self._cursor = registry(self.dbname).cursor()
for _ in xrange(self._depth):
cr.__enter__()
return getattr(cr, name)
def __enter__(self):
self._depth += 1
if self._cursor is not None:
self._cursor.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._depth -= 1
if self._cursor is not None:
self._cursor.__exit__(exc_type, exc_value, traceback)
class PsycoConnection(psycopg2.extensions.connection):
pass
class ConnectionPool(object):
""" The pool of connections to database(s)
Keep a set of connections to pg databases open, and reuse them
to open cursors for all transactions.
The connections are *not* automatically closed. Only a close_db()
can trigger that.
"""
def locked(fun):
@wraps(fun)
def _locked(self, *args, **kwargs):
self._lock.acquire()
try:
return fun(self, *args, **kwargs)
finally:
self._lock.release()
return _locked
def __init__(self, maxconn=64):
self._connections = []
self._maxconn = max(maxconn, 1)
self._lock = threading.Lock()
def __repr__(self):
used = len([1 for c, u in self._connections[:] if u])
count = len(self._connections)
return "ConnectionPool(used=%d/count=%d/max=%d)" % (used, count, self._maxconn)
def _debug(self, msg, *args):
_logger.debug(('%r ' + msg), self, *args)
@locked
def borrow(self, connection_info):
"""
:param dict connection_info: dict of psql connection keywords
:rtype: PsycoConnection
"""
# free dead and leaked connections
for i, (cnx, _) in tools.reverse_enumerate(self._connections):
if cnx.closed:
self._connections.pop(i)
self._debug('Removing closed connection at index %d: %r', i, cnx.dsn)
continue
if getattr(cnx, 'leaked', False):
delattr(cnx, 'leaked')
self._connections.pop(i)
self._connections.append((cnx, False))
_logger.info('%r: Free leaked connection to %r', self, cnx.dsn)
for i, (cnx, used) in enumerate(self._connections):
if not used and cnx._original_dsn == connection_info:
try:
cnx.reset()
except psycopg2.OperationalError:
self._debug('Cannot reset connection at index %d: %r', i, cnx.dsn)
# psycopg2 2.4.4 and earlier do not allow closing a closed connection
if not cnx.closed:
cnx.close()
continue
self._connections.pop(i)
self._connections.append((cnx, True))
self._debug('Borrow existing connection to %r at index %d', cnx.dsn, i)
return cnx
if len(self._connections) >= self._maxconn:
# try to remove the oldest connection not used
for i, (cnx, used) in enumerate(self._connections):
if not used:
self._connections.pop(i)
if not cnx.closed:
cnx.close()
self._debug('Removing old connection at index %d: %r', i, cnx.dsn)
break
else:
# note: this code is called only if the for loop has completed (no break)
raise PoolError('The Connection Pool Is Full')
try:
result = psycopg2.connect(
connection_factory=PsycoConnection,
**connection_info)
except psycopg2.Error:
_logger.info('Connection to the database failed')
raise
result._original_dsn = connection_info
self._connections.append((result, True))
self._debug('Create new connection')
return result
@locked
def give_back(self, connection, keep_in_pool=True):
self._debug('Give back connection to %r', connection.dsn)
for i, (cnx, used) in enumerate(self._connections):
if cnx is connection:
self._connections.pop(i)
if keep_in_pool:
self._connections.append((cnx, False))
self._debug('Put connection to %r in pool', cnx.dsn)
else:
self._debug('Forgot connection to %r', cnx.dsn)
cnx.close()
break
else:
raise PoolError('This connection does not below to the pool')
@locked
def close_all(self, dsn=None):
count = 0
last = None
for i, (cnx, used) in tools.reverse_enumerate(self._connections):
if dsn is None or cnx._original_dsn == dsn:
cnx.close()
last = self._connections.pop(i)[0]
count += 1
_logger.info('%r: Closed %d connections %s', self, count,
(dsn and last and 'to %r' % last.dsn) or '')
class Connection(object):
""" A lightweight instance of a connection to postgres
"""
def __init__(self, pool, dbname, dsn):
self.dbname = dbname
self.dsn = dsn
self.__pool = pool
def cursor(self, serialized=True):
cursor_type = serialized and 'serialized ' or ''
_logger.debug('create %scursor to %r', cursor_type, self.dsn)
return Cursor(self.__pool, self.dbname, self.dsn, serialized=serialized)
def test_cursor(self, serialized=True):
cursor_type = serialized and 'serialized ' or ''
_logger.debug('create test %scursor to %r', cursor_type, self.dsn)
return TestCursor(self.__pool, self.dbname, self.dsn, serialized=serialized)
# serialized_cursor is deprecated - cursors are serialized by default
serialized_cursor = cursor
def __nonzero__(self):
"""Check if connection is possible"""
try:
_logger.info("__nonzero__() is deprecated. (It is too expensive to test a connection.)")
cr = self.cursor()
cr.close()
return True
except Exception:
return False
def connection_info_for(db_or_uri):
""" parse the given `db_or_uri` and return a 2-tuple (dbname, connection_params)
Connection params are either a dictionary with a single key ``dsn``
containing a connection URI, or a dictionary containing connection
parameter keywords which psycopg2 can build a key/value connection string
(dsn) from
:param str db_or_uri: database name or postgres dsn
:rtype: (str, dict)
"""
if db_or_uri.startswith(('postgresql://', 'postgres://')):
# extract db from uri
us = urlparse.urlsplit(db_or_uri)
if len(us.path) > 1:
db_name = us.path[1:]
elif us.username:
db_name = us.username
else:
db_name = us.hostname
return db_name, {'dsn': db_or_uri}
connection_info = {'database': db_or_uri}
for p in ('host', 'port', 'user', 'password', 'dbname'):
cfg = tools.config['db_' + p]
if cfg:
connection_info[p] = cfg
return db_or_uri, connection_info
_Pool = None
def db_connect(to, allow_uri=False):
global _Pool
if _Pool is None:
_Pool = ConnectionPool(int(tools.config['db_maxconn']))
db, info = connection_info_for(to)
if not allow_uri and db != to:
raise ValueError('URI connections not allowed')
return Connection(_Pool, db, info)
def close_db(db_name):
""" You might want to call odoo.modules.registry.Registry.delete(db_name) along this function."""
global _Pool
if _Pool:
_Pool.close_all(connection_info_for(db_name)[1])
def close_all():
global _Pool
if _Pool:
_Pool.close_all()

188
start.sh

@ -1,158 +1,36 @@
#!/bin/bash #!/bin/bash
set -eu pipefail
export LANG="C.UTF-8"
export ODOO_RC="/app/data/odoo.conf"
set -eu
echo "=> Ensure directories"
mkdir -p /app/data/addons /app/data/data
#if [[ ! -f "/app/data/odoo.conf" ]]; then
# echo "=> First run, create config file"
# cp /etc/odoo-server.conf /app/data/odoo.conf
#fi
echo "=> Patch config file"
# https://github.com/odoo/docker/blob/master/10.0/odoo.conf
crudini --set /app/data/odoo.conf options addons_path /app/data/addons,/app/code/odoo-server/addons,/app/code/extra-addons
crudini --set /app/data/odoo.conf options data_dir /app/data/data
crudini --set /app/data/odoo.conf options db_host ${CLOUDRON_POSTGRESQL_HOST}
crudini --set /app/data/odoo.conf options db_port ${CLOUDRON_POSTGRESQL_PORT}
crudini --set /app/data/odoo.conf options db_user ${CLOUDRON_POSTGRESQL_USERNAME}
crudini --set /app/data/odoo.conf options db_password ${CLOUDRON_POSTGRESQL_PASSWORD}
crudini --set /app/data/odoo.conf options db_dbname ${CLOUDRON_POSTGRESQL_DATABASE}
crudini --set /app/data/odoo.conf options smtp_password ${CLOUDRON_MAIL_SMTP_PASSWORD}
crudini --set /app/data/odoo.conf options smtp_port ${CLOUDRON_MAIL_SMTP_PORT}
crudini --set /app/data/odoo.conf options smtp_server ${CLOUDRON_MAIL_SMTP_SERVER}
crudini --set /app/data/odoo.conf options smtp_user ${CLOUDRON_MAIL_SMTP_USERNAME}
crudini --set /app/data/odoo.conf options smtp_ssl False
crudini --set /app/data/odoo.conf options email_from ${CLOUDRON_MAIL_FROM}
crudini --set /app/data/odoo.conf options list_db False
crudini --set /app/data/odoo.conf options without_demo WITHOUT_DEMO
echo "=> Ensure data ownership"
chown -R odoo:odoo /app/data/
echo "=> Starting odoo"
exec /usr/local/bin/gosu odoo:odoo /app/code/odoo-server/odoo-bin --config=/app/data/odoo.conf
pg_cli() {
PGPASSWORD=$CLOUDRON_POSTGRESQL_PASSWORD psql \
-h $CLOUDRON_POSTGRESQL_HOST \
-p $CLOUDRON_POSTGRESQL_PORT \
-U $CLOUDRON_POSTGRESQL_USERNAME \
-d $CLOUDRON_POSTGRESQL_DATABASE -c "$1"
}
# Create required directories if they don't exist
mkdir -p /app/data/extra-addons /app/data/odoo /run/odoo /run/nginx
chown -R cloudron:cloudron /run
# Check for First Run
if [[ ! -f /app/data/odoo.conf ]]; then
echo "First run. Initializing DB..."
# Initialize the database, and exit.
/usr/local/bin/gosu cloudron:cloudron /app/code/odoo/odoo-bin -i base,auth_ldap,fetchmail --without-demo all --data-dir /app/data/odoo --logfile /run/odoo/runtime.log -d $CLOUDRON_POSTGRESQL_DATABASE --db_host $CLOUDRON_POSTGRESQL_HOST --db_port $CLOUDRON_POSTGRESQL_PORT --db_user $CLOUDRON_POSTGRESQL_USERNAME --db_pass $CLOUDRON_POSTGRESQL_PASSWORD --stop-after-init
echo "Initialized successfully."
# echo "Adding required tables/relations for mail settings."
# pg_cli "INSERT INTO public.res_config_settings (create_uid, create_date, write_uid, write_date, company_id, user_default_rights, external_email_server_default, module_base_import, module_google_calendar, module_microsoft_calendar, module_google_drive, module_google_spreadsheet, module_auth_oauth, module_auth_ldap, module_base_gengo, module_account_inter_company_rules, module_pad, module_voip, module_web_unsplash, module_partner_autocomplete, module_base_geolocalize, module_google_recaptcha, group_multi_currency, show_effect, module_product_images, unsplash_access_key, fail_counter, alias_domain, restrict_template_rendering, use_twilio_rtc_servers, twilio_account_sid, twilio_account_token, auth_signup_reset_password, auth_signup_uninvited, auth_signup_template_user_id) VALUES (2, 'NOW()', 2, 'NOW()', 1, false, true, true, false, false, false, false, false, true, false, false, false, false, true, true, false, false, false, true, false, NULL, 0, '$CLOUDRON_APP_DOMAIN', false, false, NULL, NULL, false, 'b2b', 5) ON CONFLICT (id) DO NOTHING;"
pg_cli "INSERT INTO public.ir_config_parameter (key, value, create_uid, create_date, write_uid, write_date) VALUES ('base_setup.default_external_email_server', 'True', 2, 'NOW()', 2, 'NOW()');"
pg_cli "INSERT INTO public.ir_config_parameter (key, value, create_uid, create_date, write_uid, write_date) VALUES ('mail.catchall.domain', '$CLOUDRON_APP_DOMAIN', 2, 'NOW()', 2, 'NOW()');"
echo "Disabling public sign-up..."
pg_cli "UPDATE public.ir_config_parameter SET value='b2b' WHERE key='auth_signup.invitation_scope';"
echo "Copying default configuration file to /app/data/odoo.conf..."
cp /app/pkg/odoo.conf.sample /app/data/odoo.conf
crudini --set /app/data/odoo.conf 'options' list_db "False"
crudini --set /app/data/odoo.conf 'options' admin_password "$CLOUDRON_MAIL_SMTP_PASSWORD"
echo "First run complete."
fi
# These values should be re-set to make Odoo work as expcected.
echo "Ensuring proper [options] in /app/data/odoo.conf ..."
/usr/local/bin/gosu cloudron:cloudron /app/code/odoo/odoo-bin -i auth_ldap,fetchmail -d $CLOUDRON_POSTGRESQL_DATABASE -c /app/data/odoo.conf --without-demo all --stop-after-init
# Check if asking update
if [[ -f /app/data/update ]]; then
/usr/local/bin/gosu cloudron:cloudron /app/code/odoo/odoo-bin -u all -d $CLOUDRON_POSTGRESQL_DATABASE -c /app/data/odoo.conf --without-demo all --stop-after-init
fi
# Custom paths
crudini --set /app/data/odoo.conf 'options' addons_path "/app/data/extra-addons,/app/code/auto/addons,/app/code/odoo/addons"
crudini --set /app/data/odoo.conf 'options' data_dir "/app/data/odoo"
# Logging
crudini --set /app/data/odoo.conf 'options' logfile "/run/logs/odoo.log"
crudini --set /app/data/odoo.conf 'options' logrotate 'False'
crudini --set /app/data/odoo.conf 'options' log_db 'False'
crudini --set /app/data/odoo.conf 'options' syslog 'False'
# Http Server
crudini --set /app/data/odoo.conf 'options' proxy_mode "True"
crudini --set /app/data/odoo.conf 'options' secure 'False'
crudini --set /app/data/odoo.conf 'options' interface '127.0.0.1'
crudini --set /app/data/odoo.conf 'options' port '8069'
crudini --set /app/data/odoo.conf 'options' longpolling_port '8072'
# Securing Odoo
crudini --set /app/data/odoo.conf 'options' test_enable "False"
crudini --set /app/data/odoo.conf 'options' test_file "False"
crudini --set /app/data/odoo.conf 'options' test_report_directory "False"
crudini --set /app/data/odoo.conf 'options' without_demo "all"
crudini --set /app/data/odoo.conf 'options' debug_mode "False"
#TODO Disable debug mode
# DB
crudini --set /app/data/odoo.conf 'options' db_host "$CLOUDRON_POSTGRESQL_HOST"
crudini --set /app/data/odoo.conf 'options' db_port "$CLOUDRON_POSTGRESQL_PORT"
crudini --set /app/data/odoo.conf 'options' db_user "$CLOUDRON_POSTGRESQL_USERNAME"
crudini --set /app/data/odoo.conf 'options' db_password "$CLOUDRON_POSTGRESQL_PASSWORD"
crudini --set /app/data/odoo.conf 'options' db_name "$CLOUDRON_POSTGRESQL_DATABASE"
crudini --set /app/data/odoo.conf 'options' db_filter "^$CLOUDRON_POSTGRESQL_DATABASE.*$"
crudini --set /app/data/odoo.conf 'options' db_sslmode 'False'
# IMAP Configuration
if [[ -z "${CLOUDRON_MAIL_IMAP_SERVER+x}" ]]; then
echo "IMAP is disabled. Removing values from config."
pg_cli "UPDATE public.fetchmail_server SET active='f' WHERE name LIKE 'Cloudron%';"
else
echo "IMAP is enabled. Adding values to config."
pg_cli "INSERT INTO public.fetchmail_server (id, name, active, state, server, port, server_type, is_ssl, attach, original, date, \"user\", password, object_id, priority, configuration, script, create_uid, create_date, write_uid, write_date) VALUES (1, 'Cloudron IMAP Service', true, 'done', '$CLOUDRON_MAIL_IMAP_SERVER', $CLOUDRON_MAIL_IMAP_PORT, 'imap', false, true, false, NULL, '$CLOUDRON_MAIL_IMAP_USERNAME', '$CLOUDRON_MAIL_IMAP_PASSWORD', 151, 5, NULL, '/mail/static/scripts/odoo-mailgate.py', 2, 'NOW()', 2, 'NOW()') ON CONFLICT (id) DO NOTHING;"
fi
# SMTP Configuration
if [[ -z "${CLOUDRON_MAIL_SMTP_SERVER+x}" ]]; then
echo "SMTP is disabled. Removing values from config."
pg_cli "UPDATE public.ir_mail_server SET active='f' WHERE name LIKE 'Cloudron%';"
else
echo "SMTP is enabled. Adding values to config."
pg_cli "INSERT INTO public.ir_mail_server (id, name, smtp_host, smtp_port, smtp_user, smtp_pass, smtp_encryption, smtp_debug, sequence, active, create_uid, create_date, write_uid, write_date) VALUES (1, 'Cloudron SMTP Service', '$CLOUDRON_MAIL_SMTP_SERVER', $CLOUDRON_MAIL_SMTP_PORT, '$CLOUDRON_MAIL_SMTP_USERNAME', '$CLOUDRON_MAIL_SMTP_PASSWORD', 'none', false, 10, true, 2, 'NOW()', 2, 'NOW()') ON CONFLICT (id) DO NOTHING;"
fi
# LDAP Configuration
if [[ -z "${CLOUDRON_LDAP_SERVER+x}" ]]; then
echo "LDAP is disabled. Removing values from config."
pg_cli "DELETE FROM public.res_company_ldap WHERE id = 1 AND company = 1"
else
echo "LDAP is enabled. Adding values to config."
pg_cli "INSERT INTO public.res_company_ldap (id, sequence, company, ldap_server, ldap_server_port, ldap_binddn, ldap_password, ldap_filter, ldap_base, \"user\", create_user, ldap_tls, create_uid, create_date, write_uid, write_date) VALUES (1, 10, 1, '$CLOUDRON_LDAP_SERVER', $CLOUDRON_LDAP_PORT, '$CLOUDRON_LDAP_BIND_DN', '$CLOUDRON_LDAP_BIND_PASSWORD', '(&(objectclass=user)(mail=%s))', '$CLOUDRON_LDAP_USERS_BASE_DN', NULL, true, false, 2, 'NOW()', 2, 'NOW()') ON CONFLICT (id) DO NOTHING;;"
fi
# Start nginx process
sed -e "s,__REPLACE_WITH_CLOUDRON_APP_DOMAIN__,${CLOUDRON_APP_DOMAIN}," /app/pkg/nginx.conf >/run/nginx/nginx.conf
if [[ ! -f /app/data/nginx-custom-locations.conf ]]; then
cat >/app/data/nginx-custom-locations.conf <<EOF
# Content of this file is included inside the server { } block.
# Add custom locations except "/" and "/longpolling" as they are reserved for Odoo.
# Or add custom directives. See https://nginx.org/en/docs/http/ngx_http_core_module.html#server
EOF
fi
chown -R cloudron:cloudron /app/data
echo "=> Start nginx"
rm -f /run/nginx.pid
nginx -c /run/nginx/nginx.conf &
# Done nginx
echo "Resource allocation (hard limit: 100% of available memory; soft limit: 80%)"
if [[ -f /sys/fs/cgroup/memory/memory.memsw.limit_in_bytes ]]; then
memory_limit_hard=$(($(cat /sys/fs/cgroup/memory/memory.memsw.limit_in_bytes)))
memory_limit_soft=$((memory_limit_hard * 4 / 5))
else
memory_limit_hard=2684354560
memory_limit_soft=2147483648 # (memory_limit_hard * 4 / 5)
fi
worker_count=$((memory_limit_hard / 1024 / 1024 / 150)) # 1 worker for 150M
worker_count=$((worker_count > 8 ? 8 : worker_count)) # max of 8
worker_count=$((worker_count < 1 ? 1 : worker_count)) # min of 1
echo "Memory limits - hard limit: $memory_limit_hard bytes, soft limit: $memory_limit_soft bytes"
crudini --set /app/data/odoo.conf 'options' limit_memory_hard $memory_limit_hard
crudini --set /app/data/odoo.conf 'options' limit_memory_soft $memory_limit_soft
crudini --set /app/data/odoo.conf 'options' workers $worker_count
echo "Done. Starting server with $worker_count workers.."
chown -R cloudron:cloudron /app/data/
/usr/local/bin/gosu cloudron:cloudron /app/code/odoo/odoo-bin -c /app/data/odoo.conf
Loading…
Cancel
Save